summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-context.lua2
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-tex.lua4
-rw-r--r--context/data/scite/context/scite-context-data-context.properties81
-rw-r--r--context/data/scite/context/scite-context-data-tex.properties120
-rw-r--r--doc/context/documents/general/manuals/luatex.pdfbin1009576 -> 1017376 bytes
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-enhancements.tex27
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-fonts.tex49
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-introduction.tex102
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-languages.tex20
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-libraries.tex305
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-lua.tex94
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-math.tex85
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-modifications.tex133
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-nodes.tex60
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-style.tex2
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-titlepage.tex12
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex.tex7
-rw-r--r--scripts/context/lua/mtx-fonts.lua24
-rw-r--r--scripts/context/lua/mtxrun.lua39
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua39
-rw-r--r--scripts/context/stubs/unix/mtxrun39
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua39
-rw-r--r--tex/context/base/context-version.pdfbin4172 -> 4157 bytes
-rw-r--r--tex/context/base/mkii/colo-xwi.mkii2
-rw-r--r--tex/context/base/mkii/mult-de.mkii1
-rw-r--r--tex/context/base/mkii/mult-en.mkii1
-rw-r--r--tex/context/base/mkii/mult-fr.mkii1
-rw-r--r--tex/context/base/mkii/mult-it.mkii1
-rw-r--r--tex/context/base/mkii/mult-nl.mkii1
-rw-r--r--tex/context/base/mkii/mult-pe.mkii1
-rw-r--r--tex/context/base/mkii/mult-ro.mkii1
-rw-r--r--tex/context/base/mkiv/back-exp.lua14
-rw-r--r--tex/context/base/mkiv/back-exp.mkiv4
-rw-r--r--tex/context/base/mkiv/blob-ini.lua53
-rw-r--r--tex/context/base/mkiv/colo-imp-crayola.mkiv254
-rw-r--r--tex/context/base/mkiv/colo-imp-dem.mkiv19
-rw-r--r--tex/context/base/mkiv/colo-imp-ema.mkiv136
-rw-r--r--tex/context/base/mkiv/colo-imp-rgb.mkiv68
-rw-r--r--tex/context/base/mkiv/colo-imp-x11.mkiv226
-rw-r--r--tex/context/base/mkiv/colo-imp-xwi.mkiv12
-rw-r--r--tex/context/base/mkiv/colo-ini.lua2
-rw-r--r--tex/context/base/mkiv/colo-ini.mkiv171
-rw-r--r--tex/context/base/mkiv/colo-run.lua56
-rw-r--r--tex/context/base/mkiv/colo-run.mkiv81
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context-performance.tex23
-rw-r--r--tex/context/base/mkiv/context-todo.tex39
-rw-r--r--tex/context/base/mkiv/context.mkiv4
-rw-r--r--tex/context/base/mkiv/core-con.lua56
-rw-r--r--tex/context/base/mkiv/core-con.mkiv12
-rw-r--r--tex/context/base/mkiv/font-ext.lua1
-rw-r--r--tex/context/base/mkiv/font-gbn.lua (renamed from tex/generic/context/luatex/luatex-fonts-cbk.lua)110
-rw-r--r--tex/context/base/mkiv/font-lib.mkvi40
-rw-r--r--tex/context/base/mkiv/font-nod.lua55
-rw-r--r--tex/context/base/mkiv/font-osd.lua720
-rw-r--r--tex/context/base/mkiv/font-ota.lua97
-rw-r--r--tex/context/base/mkiv/font-otj.lua241
-rw-r--r--tex/context/base/mkiv/font-otl.lua3
-rw-r--r--tex/context/base/mkiv/font-otr.lua19
-rw-r--r--tex/context/base/mkiv/font-ots.lua1813
-rw-r--r--tex/context/base/mkiv/font-pre.mkiv11
-rw-r--r--tex/context/base/mkiv/font-sol.lua1
-rw-r--r--tex/context/base/mkiv/font-syn.lua4
-rw-r--r--tex/context/base/mkiv/font-xtx.lua (renamed from tex/generic/context/luatex/luatex-fonts-def.lua)0
-rw-r--r--tex/context/base/mkiv/lang-dis.lua19
-rw-r--r--tex/context/base/mkiv/lang-hyp.lua45
-rw-r--r--tex/context/base/mkiv/lang-hyp.mkiv2
-rw-r--r--tex/context/base/mkiv/lang-rep.lua34
-rw-r--r--tex/context/base/mkiv/lang-wrd.lua6
-rw-r--r--tex/context/base/mkiv/lpdf-ini.lua74
-rw-r--r--tex/context/base/mkiv/lxml-ini.mkiv17
-rw-r--r--tex/context/base/mkiv/lxml-tab.lua36
-rw-r--r--tex/context/base/mkiv/math-ini.mkiv2
-rw-r--r--tex/context/base/mkiv/math-noa.lua18
-rw-r--r--tex/context/base/mkiv/mult-def.lua3
-rw-r--r--tex/context/base/mkiv/mult-low.lua1
-rw-r--r--tex/context/base/mkiv/mult-prm.lua6
-rw-r--r--tex/context/base/mkiv/node-fnt.lua228
-rw-r--r--tex/context/base/mkiv/node-ini.lua4
-rw-r--r--tex/context/base/mkiv/node-ltp.lua108
-rw-r--r--tex/context/base/mkiv/node-met.lua161
-rw-r--r--tex/context/base/mkiv/node-nut.lua60
-rw-r--r--tex/context/base/mkiv/node-pro.lua9
-rw-r--r--tex/context/base/mkiv/node-rul.lua1
-rw-r--r--tex/context/base/mkiv/node-shp.lua10
-rw-r--r--tex/context/base/mkiv/node-tra.lua12
-rw-r--r--tex/context/base/mkiv/publ-ini.mkiv4
-rw-r--r--tex/context/base/mkiv/scrp-cjk.lua22
-rw-r--r--tex/context/base/mkiv/scrp-eth.lua1
-rw-r--r--tex/context/base/mkiv/scrp-ini.lua86
-rw-r--r--tex/context/base/mkiv/spac-ali.mkiv67
-rw-r--r--tex/context/base/mkiv/spac-chr.lua45
-rw-r--r--tex/context/base/mkiv/spac-hor.mkiv7
-rw-r--r--tex/context/base/mkiv/spac-ver.lua9
-rw-r--r--tex/context/base/mkiv/spac-ver.mkiv27
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin9119 -> 8977 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin266042 -> 266913 bytes
-rw-r--r--tex/context/base/mkiv/strc-itm.mkvi2
-rw-r--r--tex/context/base/mkiv/strc-lst.mkvi33
-rw-r--r--tex/context/base/mkiv/strc-not.mkvi1
-rw-r--r--tex/context/base/mkiv/syst-aux.lua50
-rw-r--r--tex/context/base/mkiv/syst-aux.mkiv11
-rw-r--r--tex/context/base/mkiv/syst-ini.mkiv63
-rw-r--r--tex/context/base/mkiv/tabl-tbl.mkiv2
-rw-r--r--tex/context/base/mkiv/tabl-xtb.mkvi2
-rw-r--r--tex/context/base/mkiv/trac-vis.lua36
-rw-r--r--tex/context/base/mkiv/typo-brk.lua14
-rw-r--r--tex/context/base/mkiv/typo-cap.lua1
-rw-r--r--tex/context/base/mkiv/typo-dha.lua13
-rw-r--r--tex/context/base/mkiv/typo-drp.lua2
-rw-r--r--tex/context/base/mkiv/typo-dua.lua5
-rw-r--r--tex/context/base/mkiv/typo-dub.lua5
-rw-r--r--tex/context/base/mkiv/typo-itc.lua170
-rw-r--r--tex/context/base/mkiv/typo-krn.lua41
-rw-r--r--tex/context/base/mkiv/typo-lin.lua1
-rw-r--r--tex/context/base/mkiv/typo-rep.lua6
-rw-r--r--tex/context/base/mkiv/typo-spa.lua8
-rw-r--r--tex/context/base/mkiv/typo-sus.lua7
-rw-r--r--tex/context/base/mkiv/typo-tal.lua11
-rw-r--r--tex/context/fonts/mkiv/type-imp-lato.mkiv4
-rw-r--r--tex/context/fonts/mkiv/type-imp-texgyre.mkiv2
-rw-r--r--tex/context/interface/common/keys-cs.xml1
-rw-r--r--tex/context/interface/common/keys-de.xml1
-rw-r--r--tex/context/interface/common/keys-en.xml1
-rw-r--r--tex/context/interface/common/keys-fr.xml1
-rw-r--r--tex/context/interface/common/keys-it.xml1
-rw-r--r--tex/context/interface/common/keys-nl.xml1
-rw-r--r--tex/context/interface/common/keys-pe.xml1
-rw-r--r--tex/context/interface/common/keys-ro.xml1
-rw-r--r--tex/context/modules/mkiv/m-json.mkiv2
-rw-r--r--tex/context/modules/mkiv/m-visual.mkiv2
-rw-r--r--tex/context/modules/mkiv/x-set-11.mkiv191
-rw-r--r--tex/generic/context/luatex/luatex-basics-chr.lua758
-rw-r--r--tex/generic/context/luatex/luatex-basics-gen.lua38
-rw-r--r--tex/generic/context/luatex/luatex-basics-nod.lua38
-rw-r--r--tex/generic/context/luatex/luatex-basics-prepare.tex90
-rw-r--r--tex/generic/context/luatex/luatex-fonts-lua.lua33
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua17839
-rw-r--r--tex/generic/context/luatex/luatex-fonts-ota.lua451
-rw-r--r--tex/generic/context/luatex/luatex-fonts-syn.lua13
-rw-r--r--tex/generic/context/luatex/luatex-fonts-tfm.lua38
-rw-r--r--tex/generic/context/luatex/luatex-fonts.lua115
-rw-r--r--tex/generic/context/luatex/luatex-pdf.tex63
-rw-r--r--tex/generic/context/luatex/luatex-test.tex13
144 files changed, 17592 insertions, 9353 deletions
diff --git a/context/data/scite/context/lexers/data/scite-context-data-context.lua b/context/data/scite/context/lexers/data/scite-context-data-context.lua
index ec08c551f..5de36e439 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plustwohundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar", "Ux", "eUx", "Umathaccents" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "foundtrue", "foundfalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "mathortext", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhpack", "ruledvpack", "ruledtpack", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "flushnextbox", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchmin", "scratchmax", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "removepunctuation", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "normalsuperscript", "normalsubscript", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "cldprocessfile", "cldloadfile", "cldcontext", "cldcommand", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "break", "nobreak", "allowbreak", "goodbreak", "nospace", "nospacing", "dospacing", "naturalhbox", "naturalvbox", "naturalhpack", "naturalvpack", "frule" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "foundtrue", "foundfalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "mathortext", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhpack", "ruledvpack", "ruledtpack", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "flushnextbox", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchmin", "scratchmax", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "removepunctuation", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "leftboundary", "rightboundary", "signalcharacter", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "normalsuperscript", "normalsubscript", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "cldprocessfile", "cldloadfile", "cldcontext", "cldcommand", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "break", "nobreak", "allowbreak", "goodbreak", "nospace", "nospacing", "dospacing", "naturalhbox", "naturalvbox", "naturalhpack", "naturalvpack", "frule" },
} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/data/scite-context-data-tex.lua b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
index 887272423..fbc5d0269 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-tex.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
@@ -3,7 +3,7 @@ return {
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "bodydir", "boxdir", "catcodetable", "clearmarks", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "hjcode", "firstvalidlanguage", "fontid", "formatname", "gleaders", "hyphenationmin", "ifabsdim", "ifabsnum", "ifprimitive", "ignoreligaturesinfont", "initcatcodetable", "insertht", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "luafunction", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathoption", "mathscriptsmode", "mathstyle", "mathsurroundskip", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "normaldeviate", "outputbox", "outputmode", "pagedir", "pageheight", "pagebottomoffset", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "protrudechars", "pxdimen", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "suppressmathparerror", "synctex", "tagcode", "textdir", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
["omega"]={ "OmegaVersion", "bodydir", "chardp", "charht", "charit", "charwd", "leftghost", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "mathdir", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "oradical", "pagedir", "pageheight", "pagewidth", "pardir", "rightghost", "textdir" },
- ["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfignoreunknownimages", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
- ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "boundary", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "copyfont", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "dviextension", "dvifeedback", "dvivariable", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "hjcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "expandglyphsinfont", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "firstvalidlanguage", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignoreligaturesinfont", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastskip", "lastxpos", "lastypos", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letcharcode", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathinner", "mathop", "mathopen", "mathoption", "mathord", "mathpunct", "mathrel", "mathscriptsmode", "mathstyle", "mathsurroundskip", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "normaldeviate", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdfextension", "pdfvariable", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "pxdimen", "quitvmode", "radical", "raise", "randomseed", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "saveboxresource", "saveimageresource", "savepos", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setfontid", "setlanguage", "setrandomseed", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracingfonts", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "uniformdeviate", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "useboxresource", "useimageresource", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
+ ["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfignoreunknownimages", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinfoid", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfsuppressptexinfo", "pdfsuppressoptionalinfo", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
+ ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "boundary", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "copyfont", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "dviextension", "dvifeedback", "dvivariable", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "hjcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "expandglyphsinfont", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "firstvalidlanguage", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignoreligaturesinfont", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastskip", "lastxpos", "lastypos", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letcharcode", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathinner", "mathop", "mathopen", "mathoption", "mathord", "mathpunct", "mathrel", "mathscriptsmode", "mathstyle", "mathsurroundskip", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "normaldeviate", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdfextension", "pdfvariable", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinfoid", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfsuppressptexinfo", "pdfsuppressoptionalinfo", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "pxdimen", "quitvmode", "radical", "raise", "randomseed", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "saveboxresource", "saveimageresource", "savepos", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setfontid", "setlanguage", "setrandomseed", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracingfonts", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "uniformdeviate", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "useboxresource", "useimageresource", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/context/data/scite/context/scite-context-data-context.properties b/context/data/scite/context/scite-context-data-context.properties
index 9d592d15b..fdf2f64c9 100644
--- a/context/data/scite/context/scite-context-data-context.properties
+++ b/context/data/scite/context/scite-context-data-context.properties
@@ -169,44 +169,45 @@ doiffirstcharelse startnointerference stopnointerference twodigits threedigits \
leftorright offinterlineskip oninterlineskip nointerlineskip strut \
halfstrut quarterstrut depthstrut setstrut strutbox \
strutht strutdp strutwd struthtdp begstrut \
-endstrut lineheight ordordspacing ordopspacing ordbinspacing \
-ordrelspacing ordopenspacing ordclosespacing ordpunctspacing ordinnerspacing \
-opordspacing opopspacing opbinspacing oprelspacing opopenspacing \
-opclosespacing oppunctspacing opinnerspacing binordspacing binopspacing \
-binbinspacing binrelspacing binopenspacing binclosespacing binpunctspacing \
-bininnerspacing relordspacing relopspacing relbinspacing relrelspacing \
-relopenspacing relclosespacing relpunctspacing relinnerspacing openordspacing \
-openopspacing openbinspacing openrelspacing openopenspacing openclosespacing \
-openpunctspacing openinnerspacing closeordspacing closeopspacing closebinspacing \
-closerelspacing closeopenspacing closeclosespacing closepunctspacing closeinnerspacing \
-punctordspacing punctopspacing punctbinspacing punctrelspacing punctopenspacing \
-punctclosespacing punctpunctspacing punctinnerspacing innerordspacing inneropspacing \
-innerbinspacing innerrelspacing inneropenspacing innerclosespacing innerpunctspacing \
-innerinnerspacing normalreqno startimath stopimath normalstartimath \
-normalstopimath startdmath stopdmath normalstartdmath normalstopdmath \
-normalsuperscript normalsubscript uncramped cramped triggermathstyle \
-mathstylefont mathsmallstylefont mathstyleface mathsmallstyleface mathstylecommand \
-mathpalette mathstylehbox mathstylevbox mathstylevcenter mathstylevcenteredhbox \
-mathstylevcenteredvbox mathtext setmathsmalltextbox setmathtextbox pushmathstyle \
-popmathstyle triggerdisplaystyle triggertextstyle triggerscriptstyle triggerscriptscriptstyle \
-triggeruncrampedstyle triggercrampedstyle triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle \
-triggerbigstyle triggeruncrampedbigstyle triggercrampedbigstyle luaexpr expelsedoif \
-expdoif expdoifnot expdoifelsecommon expdoifcommonelse expdoifelseinset \
-expdoifinsetelse ctxdirectlua ctxlatelua ctxsprint ctxwrite \
-ctxcommand ctxdirectcommand ctxlatecommand ctxreport ctxlua \
-luacode lateluacode directluacode registerctxluafile ctxloadluafile \
-luaversion luamajorversion luaminorversion ctxluacode luaconditional \
-luaexpanded startluaparameterset stopluaparameterset luaparameterset definenamedlua \
-obeylualines obeyluatokens startluacode stopluacode startlua \
-stoplua startctxfunction stopctxfunction ctxfunction startctxfunctiondefinition \
-stopctxfunctiondefinition installctxfunction cldprocessfile cldloadfile cldcontext \
-cldcommand carryoverpar assumelongusagecs Umathbotaccent righttolefthbox \
-lefttorighthbox righttoleftvbox lefttorightvbox righttoleftvtop lefttorightvtop \
-rtlhbox ltrhbox rtlvbox ltrvbox rtlvtop \
-ltrvtop autodirhbox autodirvbox autodirvtop leftorrighthbox \
-leftorrightvbox leftorrightvtop lefttoright righttoleft synchronizelayoutdirection \
-synchronizedisplaydirection synchronizeinlinedirection lesshyphens morehyphens nohyphens \
-dohyphens Ucheckedstartdisplaymath Ucheckedstopdisplaymath break nobreak \
-allowbreak goodbreak nospace nospacing dospacing \
-naturalhbox naturalvbox naturalhpack naturalvpack frule
+endstrut lineheight leftboundary rightboundary signalcharacter \
+ordordspacing ordopspacing ordbinspacing ordrelspacing ordopenspacing \
+ordclosespacing ordpunctspacing ordinnerspacing opordspacing opopspacing \
+opbinspacing oprelspacing opopenspacing opclosespacing oppunctspacing \
+opinnerspacing binordspacing binopspacing binbinspacing binrelspacing \
+binopenspacing binclosespacing binpunctspacing bininnerspacing relordspacing \
+relopspacing relbinspacing relrelspacing relopenspacing relclosespacing \
+relpunctspacing relinnerspacing openordspacing openopspacing openbinspacing \
+openrelspacing openopenspacing openclosespacing openpunctspacing openinnerspacing \
+closeordspacing closeopspacing closebinspacing closerelspacing closeopenspacing \
+closeclosespacing closepunctspacing closeinnerspacing punctordspacing punctopspacing \
+punctbinspacing punctrelspacing punctopenspacing punctclosespacing punctpunctspacing \
+punctinnerspacing innerordspacing inneropspacing innerbinspacing innerrelspacing \
+inneropenspacing innerclosespacing innerpunctspacing innerinnerspacing normalreqno \
+startimath stopimath normalstartimath normalstopimath startdmath \
+stopdmath normalstartdmath normalstopdmath normalsuperscript normalsubscript \
+uncramped cramped triggermathstyle mathstylefont mathsmallstylefont \
+mathstyleface mathsmallstyleface mathstylecommand mathpalette mathstylehbox \
+mathstylevbox mathstylevcenter mathstylevcenteredhbox mathstylevcenteredvbox mathtext \
+setmathsmalltextbox setmathtextbox pushmathstyle popmathstyle triggerdisplaystyle \
+triggertextstyle triggerscriptstyle triggerscriptscriptstyle triggeruncrampedstyle triggercrampedstyle \
+triggersmallstyle triggeruncrampedsmallstyle triggercrampedsmallstyle triggerbigstyle triggeruncrampedbigstyle \
+triggercrampedbigstyle luaexpr expelsedoif expdoif expdoifnot \
+expdoifelsecommon expdoifcommonelse expdoifelseinset expdoifinsetelse ctxdirectlua \
+ctxlatelua ctxsprint ctxwrite ctxcommand ctxdirectcommand \
+ctxlatecommand ctxreport ctxlua luacode lateluacode \
+directluacode registerctxluafile ctxloadluafile luaversion luamajorversion \
+luaminorversion ctxluacode luaconditional luaexpanded startluaparameterset \
+stopluaparameterset luaparameterset definenamedlua obeylualines obeyluatokens \
+startluacode stopluacode startlua stoplua startctxfunction \
+stopctxfunction ctxfunction startctxfunctiondefinition stopctxfunctiondefinition installctxfunction \
+cldprocessfile cldloadfile cldcontext cldcommand carryoverpar \
+assumelongusagecs Umathbotaccent righttolefthbox lefttorighthbox righttoleftvbox \
+lefttorightvbox righttoleftvtop lefttorightvtop rtlhbox ltrhbox \
+rtlvbox ltrvbox rtlvtop ltrvtop autodirhbox \
+autodirvbox autodirvtop leftorrighthbox leftorrightvbox leftorrightvtop \
+lefttoright righttoleft synchronizelayoutdirection synchronizedisplaydirection synchronizeinlinedirection \
+lesshyphens morehyphens nohyphens dohyphens Ucheckedstartdisplaymath \
+Ucheckedstopdisplaymath break nobreak allowbreak goodbreak \
+nospace nospacing dospacing naturalhbox naturalvbox \
+naturalhpack naturalvpack frule
diff --git a/context/data/scite/context/scite-context-data-tex.properties b/context/data/scite/context/scite-context-data-tex.properties
index f7893084e..2cdcde6ab 100644
--- a/context/data/scite/context/scite-context-data-tex.properties
+++ b/context/data/scite/context/scite-context-data-tex.properties
@@ -91,20 +91,21 @@ pdfendthread pdffirstlineheight pdffontattr pdffontexpand pdffontname \
pdffontobjnum pdffontsize pdfxformmargin pdfgamma pdfgentounicode \
pdfglyphtounicode pdfhorigin pdfignoreddimen pdfimageapplygamma pdfimagegamma \
pdfimagehicolor pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfignoreunknownimages \
-pdfinclusionerrorlevel pdfignoreunknownimages pdfinfo pdfinsertht pdflastannot \
-pdflastlinedepth pdflastlink pdflastobj pdflastxform pdflastximage \
-pdflastximagepages pdflastxpos pdflastypos pdflinkmargin pdfliteral \
-pdfmapfile pdfmapline pdfminorversion pdfnames pdfnoligatures \
-pdfnormaldeviate pdfobj pdfobjcompresslevel pdfoutline pdfoutput \
-pdfpageattr pdfpagebox pdfpageheight pdfpageref pdfpageresources \
-pdfpagesattr pdfpagewidth pdfpkmode pdfpkresolution pdfpkfixeddpi \
-pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed pdfrefobj \
-pdfrefxform pdfrefximage pdfreplacefont pdfrestore pdfretval \
-pdfsave pdfsavepos pdfsetmatrix pdfsetrandomseed pdfstartlink \
-pdfstartthread pdftexbanner pdftexrevision pdftexversion pdfthread \
-pdfthreadmargin pdftracingfonts pdftrailer pdfuniformdeviate pdfuniqueresname \
-pdfvorigin pdfxform pdfxformattr pdfxformname pdfxformresources \
-pdfximage quitvmode rightmarginkern rpcode tagcode
+pdfinclusionerrorlevel pdfignoreunknownimages pdfinfo pdfinfoid pdfinsertht \
+pdflastannot pdflastlinedepth pdflastlink pdflastobj pdflastxform \
+pdflastximage pdflastximagepages pdflastxpos pdflastypos pdflinkmargin \
+pdfliteral pdfmapfile pdfmapline pdfminorversion pdfnames \
+pdfnoligatures pdfnormaldeviate pdfobj pdfobjcompresslevel pdfoutline \
+pdfoutput pdfpageattr pdfpagebox pdfpageheight pdfpageref \
+pdfpageresources pdfpagesattr pdfpagewidth pdfpkmode pdfpkresolution \
+pdfpkfixeddpi pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed \
+pdfrefobj pdfrefxform pdfrefximage pdfreplacefont pdfrestore \
+pdfretval pdfsave pdfsavepos pdfsetmatrix pdfsetrandomseed \
+pdfstartlink pdfstartthread pdftexbanner pdftexrevision pdftexversion \
+pdfsuppressptexinfo pdfsuppressoptionalinfo pdfthread pdfthreadmargin pdftracingfonts \
+pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform \
+pdfxformattr pdfxformname pdfxformresources pdfximage quitvmode \
+rightmarginkern rpcode tagcode
keywordclass.tex.tex=\
- / AlephVersion Alephminorversion \
@@ -216,51 +217,52 @@ pdffirstlineheight pdffontattr pdffontexpand pdffontname pdffontobjnum \
pdffontsize pdfxformmargin pdfgamma pdfgentounicode pdfglyphtounicode \
pdfhorigin pdfignoreddimen pdfimageaddfilename pdfimageapplygamma pdfimagegamma \
pdfimagehicolor pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel \
-pdfignoreunknownimages pdfinfo pdfinsertht pdflastannot pdflastlinedepth \
-pdflastlink pdflastobj pdflastxform pdflastximage pdflastximagepages \
-pdflastxpos pdflastypos pdflinkmargin pdfliteral pdfmapfile \
-pdfmapline pdfminorversion pdfnames pdfnoligatures pdfnormaldeviate \
-pdfobj pdfobjcompresslevel pdfoutline pdfoutput pdfpageattr \
-pdfpagebox pdfpageheight pdfpageref pdfpageresources pdfpagesattr \
-pdfpagewidth pdfpkmode pdfpkresolution pdfpkfixeddpi pdfprimitive \
-pdfprotrudechars pdfpxdimen pdfrandomseed pdfrefobj pdfrefxform \
-pdfrefximage pdfreplacefont pdfrestore pdfretval pdfsave \
-pdfsavepos pdfsetmatrix pdfsetrandomseed pdfstartlink pdfstartthread \
-pdftexbanner pdftexrevision pdftexversion pdfthread pdfthreadmargin \
-pdftracingfonts pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin \
-pdfxform pdfxformattr pdfxformname pdfxformresources pdfximage \
-penalty postdisplaypenalty postexhyphenchar posthyphenchar predisplaydirection \
-predisplaypenalty predisplaysize preexhyphenchar prehyphenchar pretolerance \
-prevdepth prevgraf primitive protected pxdimen \
-quitvmode radical raise randomseed read \
-readline relax relpenalty right rightghost \
-righthyphenmin rightmarginkern rightskip romannumeral rpcode \
-saveboxresource saveimageresource savepos savecatcodetable savinghyphcodes \
-savingvdiscards scantextokens scantokens scriptfont scriptscriptfont \
-scriptscriptstyle scriptspace scriptstyle scrollmode setbox \
-setfontid setlanguage setrandomseed sfcode shipout \
-show showbox showboxbreadth showboxdepth showgroups \
-showifs showlists showthe showtokens skewchar \
-skip skipdef spacefactor spaceskip span \
-special splitbotmark splitbotmarks splitdiscards splitfirstmark \
-splitfirstmarks splitmaxdepth splittopskip string suppressfontnotfounderror \
-suppressifcsnameerror suppresslongerror suppressoutererror synctex tabskip \
-tagcode textdir textfont textstyle the \
-thickmuskip thinmuskip time toks toksdef \
-tolerance topmark topmarks topskip tracingassigns \
-tracingcommands tracingfonts tracinggroups tracingifs tracinglostchars \
-tracingmacros tracingnesting tracingonline tracingoutput tracingpages \
-tracingparagraphs tracingrestores tracingscantokens tracingstats uccode \
-uchyph underline unexpanded unhbox unhcopy \
-uniformdeviate unkern unless unpenalty unskip \
-unvbox unvcopy uppercase useboxresource useimageresource \
-vadjust valign vbadness vbox vcenter \
-vfil vfill vfilneg vfuzz voffset \
-vrule vsize vskip vsplit vss \
-vtop wd widowpenalties widowpenalty write \
-xdef xleaders xspaceskip year vpack \
-hpack tpack csstring begincsname lastnamedcs \
-toksapp tokspre etoksapp etokspre
+pdfignoreunknownimages pdfinfo pdfinfoid pdfinsertht pdflastannot \
+pdflastlinedepth pdflastlink pdflastobj pdflastxform pdflastximage \
+pdflastximagepages pdflastxpos pdflastypos pdflinkmargin pdfliteral \
+pdfmapfile pdfmapline pdfminorversion pdfnames pdfnoligatures \
+pdfnormaldeviate pdfobj pdfobjcompresslevel pdfoutline pdfoutput \
+pdfpageattr pdfpagebox pdfpageheight pdfpageref pdfpageresources \
+pdfpagesattr pdfpagewidth pdfpkmode pdfpkresolution pdfpkfixeddpi \
+pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed pdfrefobj \
+pdfrefxform pdfrefximage pdfreplacefont pdfrestore pdfretval \
+pdfsave pdfsavepos pdfsetmatrix pdfsetrandomseed pdfstartlink \
+pdfstartthread pdftexbanner pdftexrevision pdftexversion pdfsuppressptexinfo \
+pdfsuppressoptionalinfo pdfthread pdfthreadmargin pdftracingfonts pdftrailer \
+pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform pdfxformattr \
+pdfxformname pdfxformresources pdfximage penalty postdisplaypenalty \
+postexhyphenchar posthyphenchar predisplaydirection predisplaypenalty predisplaysize \
+preexhyphenchar prehyphenchar pretolerance prevdepth prevgraf \
+primitive protected pxdimen quitvmode radical \
+raise randomseed read readline relax \
+relpenalty right rightghost righthyphenmin rightmarginkern \
+rightskip romannumeral rpcode saveboxresource saveimageresource \
+savepos savecatcodetable savinghyphcodes savingvdiscards scantextokens \
+scantokens scriptfont scriptscriptfont scriptscriptstyle scriptspace \
+scriptstyle scrollmode setbox setfontid setlanguage \
+setrandomseed sfcode shipout show showbox \
+showboxbreadth showboxdepth showgroups showifs showlists \
+showthe showtokens skewchar skip skipdef \
+spacefactor spaceskip span special splitbotmark \
+splitbotmarks splitdiscards splitfirstmark splitfirstmarks splitmaxdepth \
+splittopskip string suppressfontnotfounderror suppressifcsnameerror suppresslongerror \
+suppressoutererror synctex tabskip tagcode textdir \
+textfont textstyle the thickmuskip thinmuskip \
+time toks toksdef tolerance topmark \
+topmarks topskip tracingassigns tracingcommands tracingfonts \
+tracinggroups tracingifs tracinglostchars tracingmacros tracingnesting \
+tracingonline tracingoutput tracingpages tracingparagraphs tracingrestores \
+tracingscantokens tracingstats uccode uchyph underline \
+unexpanded unhbox unhcopy uniformdeviate unkern \
+unless unpenalty unskip unvbox unvcopy \
+uppercase useboxresource useimageresource vadjust valign \
+vbadness vbox vcenter vfil vfill \
+vfilneg vfuzz voffset vrule vsize \
+vskip vsplit vss vtop wd \
+widowpenalties widowpenalty write xdef xleaders \
+xspaceskip year vpack hpack tpack \
+csstring begincsname lastnamedcs toksapp tokspre \
+etoksapp etokspre
keywordclass.tex.xetex=\
XeTeXversion
diff --git a/doc/context/documents/general/manuals/luatex.pdf b/doc/context/documents/general/manuals/luatex.pdf
index ba3599e63..81a7c42b9 100644
--- a/doc/context/documents/general/manuals/luatex.pdf
+++ b/doc/context/documents/general/manuals/luatex.pdf
Binary files differ
diff --git a/doc/context/sources/general/manuals/luatex/luatex-enhancements.tex b/doc/context/sources/general/manuals/luatex/luatex-enhancements.tex
index 19d99b74f..37d99a84d 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-enhancements.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-enhancements.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -32,17 +34,15 @@ it may be needed to put these assignments before the above line:
\catcode `\}=2
\stoptyping
-More fine-grained primitives control is possible, you can look up the details in
+More fine|-|grained primitives control is possible, you can look up the details in
\in {section} [luaprimitives]. For simplicity's sake, this manual assumes that you
have executed the \type {\directlua} command as given above.
-The startup behavior documented above is considered stable in the sense that
-there will not be backward|-|incompatible changes any more. However, we can
-decide to promite some primitives to the \LUATEX\ namespace. For instance, after
-version 0.80.1 we promoted some rather generic \PDFTEX\ primitives to core
-\LUATEX\ ones, and the ones inherited frome \ALEPH\ (\OMEGA) are also promoted.
-Effectively this means that we now have the \type {tex}, \type {etex}, \type
-{luatex} and \type {pdftex} (sub)sets left.
+The startup behaviour documented above is considered stable in the sense that
+there will not be backward|-|incompatible changes any more. We have promoted some
+rather generic \PDFTEX\ primitives to core \LUATEX\ ones, and the ones inherited
+frome \ALEPH\ (\OMEGA) are also promoted. Effectively this means that we now have
+the \type {tex}, \type {etex}, \type {luatex} and \type {pdftex} (sub)sets left.
\section{Version information}
@@ -327,7 +327,7 @@ can break up \LUATEX\ pretty bad. If you are not careful while working with the
node list interface, you may even end up with assertion errors from within the
\TEX\ portion of the executable.
-The behavior documented in the above subsection is considered stable in the sense
+The behaviour documented in the above subsection is considered stable in the sense
that there will not be backward-incompatible changes any more.
\subsection{\type {\latelua}}
@@ -478,7 +478,7 @@ preambles.
Catcode tables are a new feature that allows you to switch to a predefined
catcode regime in a single statement. You can have a practically unlimited number
of different tables. This subsystem is backward compatible: if you never use the
-following commands, your document will not notice any difference in behavior
+following commands, your document will not notice any difference in behaviour
compared to traditional \TEX. The contents of each catcode table is independent
from any other catcode tables, and their contents is stored and retrieved from
the format file.
@@ -565,8 +565,9 @@ If this new integer parameter is non|-|zero, then \LUATEX\ will not complain
about non-expandable commands appearing in the middle of a \type {\ifcsname}
expansion. Instead, it will keep getting expanded tokens from the input until it
encounters an \type {\endcsname} command. Use with care! This command is
-experimental: if the input expansion is unbalanced wrt. \type {\csname} \ldots
-\type {\endcsname} pairs, the \LUATEX\ process may hang indefinitely.
+experimental: if the input expansion is unbalanced with respect to \type
+{\csname} \ldots \type {\endcsname} pairs, the \LUATEX\ process may hang
+indefinitely.
\subsection{\type {\suppressoutererror}}
@@ -597,7 +598,7 @@ a $
\section{\type {\matheqnogapstep}}
By default \TEX\ will add one quad between the equation and the number. This is
-hardcoded. A new primitive can control this:
+hard coded. A new primitive can control this:
\startsyntax
\matheqnogapstep = 1000
diff --git a/doc/context/sources/general/manuals/luatex/luatex-fonts.tex b/doc/context/sources/general/manuals/luatex/luatex-fonts.tex
index 937e99c91..745d28c74 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-fonts.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-fonts.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -54,7 +56,7 @@ The top|-|level keys in the table are as follows:
\NC \NR
\NC psname \NC no \NC no \NC yes \NC string \NC
actual (\POSTSCRIPT) name (this is the PS fontname in the incoming font
- source, also used as fontname identifier in the \PDF\ output, new in 0.43)
+ source, also used as fontname identifier in the \PDF\ output)
\NC \NR
\NC fullname \NC no \NC no \NC yes \NC string \NC
output font name, used as a fallback in the \PDF\ output
@@ -110,16 +112,15 @@ The top|-|level keys in the table are as follows:
the \type {\pdffontattr}
\NC \NR
\NC cache \NC no \NC no \NC yes \NC string \NC
- this key controls caching of the lua table on the \type {tex} end. \type {yes}:
- use a reference to the table that is passed to \LUATEX\ (this is the
- default). \type {no}: don't store the table reference, don't cache any lua
- data for this font. \type {renew}: don't store the table reference, but save a
- reference to the table that is created at the first access to one of its
- fields in font.fonts. (new in 0.40.0, before that caching was always
- \type {yes}). Note: the saved reference is thread-local, so be careful when
- you are using coroutines: an error will be thrown if the table has been
- cached in one thread, but you reference it from another thread ($\approx$
- coroutine)
+ this key controls caching of the \LUA\ table on the \type {tex} end. \type
+ {yes}: use a reference to the table that is passed to \LUATEX\ (this is the
+ default). \type {no}: don't store the table reference, don't cache any \LUA\
+ data for this font. \type {renew}: don't store the table reference, but save
+ a reference to the table that is created at the first access to one of its
+ fields in font.fonts. Note: the saved reference is thread-local, so be
+ careful when you are using coroutines: an error will be thrown if the table
+ has been cached in one thread, but you reference it from another thread
+ ($\approx$ coroutine)
\NC \NR
\NC nomath \NC no \NC no \NC yes \NC boolean\NC
this key allows a minor speedup for text fonts. if it is present and true,
@@ -241,11 +242,11 @@ The following top|-|level keys can be present inside a character hash:
\NC left_protruding \NC no \NC no \NC maybe \NC number \NC character's \type {\lpcode} \NC\NR
\NC right_protruding \NC no \NC no \NC maybe \NC number \NC character's \type {\rpcode} \NC\NR
\NC expansion_factor \NC no \NC no \NC maybe \NC number \NC character's \type {\efcode} \NC\NR
-\NC tounicode \NC no \NC no \NC maybe \NC string \NC character's Unicode equivalent(s), in UTF-16BE hexadecimal format\NC\NR
-\NC next \NC no \NC yes \NC yes \NC number \NC the \quote{next larger} character index \NC\NR
+\NC tounicode \NC no \NC no \NC maybe \NC string \NC character's \UNICODE\ equivalent(s), in \UTF|-|16BE hexadecimal format \NC\NR
+\NC next \NC no \NC yes \NC yes \NC number \NC the \quote {next larger} character index \NC\NR
\NC extensible \NC no \NC yes \NC yes \NC table \NC the constituent parts of an extensible recipe \NC\NR
-\NC vert_variants \NC no \NC no \NC yes \NC table \NC constituent parts of a vertical variant set\NC \NR
-\NC horiz_variants \NC no \NC no \NC yes \NC table \NC constituent parts of a horizontal variant set\NC \NR
+\NC vert_variants \NC no \NC no \NC yes \NC table \NC constituent parts of a vertical variant set \NC \NR
+\NC horiz_variants \NC no \NC no \NC yes \NC table \NC constituent parts of a horizontal variant set \NC \NR
\NC kerns \NC no \NC yes \NC yes \NC table \NC kerning information \NC\NR
\NC ligatures \NC no \NC yes \NC yes \NC table \NC ligaturing information \NC\NR
\NC commands \NC yes \NC no \NC yes \NC array \NC virtual font commands \NC\NR
@@ -271,13 +272,13 @@ entry for the \PDF\ font (or font subset) based on the character|-|level \type
{tounicode} strings, where they are available. If a character does not have a
sensible \UNICODE\ equivalent, do not provide a string either (no empty strings).
-If the font-level \type {tounicode} is not set, then \LUATEX\ will build up \type
+If the font level \type {tounicode} is not set, then \LUATEX\ will build up \type
{/ToUnicode} based on the \TEX\ code points you used, and any character-level
-\type {tounicodes} will be ignored. {\it At the moment, the string format is
-exactly the format that is expected by Adobe \CMAP\ files (\UTF-16BE in
-hexadecimal encoding), minus the enclosing angle brackets. This may change in the
-future.} Small example: the \type {tounicode} for a \type {fi} ligature would be
-\type {00660069}.
+\type {tounicodes} will be ignored. The string format is exactly the format that
+is expected by Adobe \CMAP\ files (\UTF-16BE in hexadecimal encoding), minus the
+enclosing angle brackets. Small example: the \type {tounicode} for a \type {fi}
+ligature would be \type {00660069}. When you pass a number the conversion will be
+done for you.
The presence of \type {extensible} will overrule \type {next}, if that is also
present. It in in turn can be overruled by \type {vert_variants}.
@@ -301,7 +302,7 @@ Each of those components is itself a hash of up to five keys:
\NC extender \NC number \NC One (1) if this part is repeatable, zero (0) otherwise. \NC \NR
\NC start \NC number \NC Maximum overlap at the starting side (in scaled points). \NC \NR
\NC end \NC number \NC Maximum overlap at the ending side (in scaled points). \NC \NR
-\NC advance \NC number \NC Total advance width of this item (can be zero or missing,
+\NC advance \NC number \NC The total advance width of this item (can be zero or missing,
then the natural size of the glyph for character \type {component}
is used). \NC \NR
\stoptabulate
@@ -390,7 +391,7 @@ and \TRUETYPE\ fonts loaded via \LUA. For \TYPEONE\ fonts, you have to set \type
supported at all.
If no special care is needed, \LUATEX\ currently falls back to the
-mapfile|-|based solution used by \PDFTEX\ and \DVIPS. This behavior will be
+mapfile|-|based solution used by \PDFTEX\ and \DVIPS. This behaviour will be
removed in the future, when the existing code becomes integrated in the new
subsystem.
@@ -495,7 +496,7 @@ fonts = {
says that the first referenced font (index 1) in this virtual font is \type
{ptrmr8a} loaded at 10pt, and the second is \type {psyr} loaded at a little over
-9pt. The third one is previously defined font that is known to \LUATEX\ as fontid
+9pt. The third one is previously defined font that is known to \LUATEX\ as font id
\quote {38}.
The array index numbers are used by the character command definitions that are
diff --git a/doc/context/sources/general/manuals/luatex/luatex-introduction.tex b/doc/context/sources/general/manuals/luatex/luatex-introduction.tex
index 5fcc96546..ad03970a5 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-introduction.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-introduction.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -6,40 +8,36 @@
\startchapter[title=Introduction]
This book will eventually become the reference manual of \LUATEX. At the moment,
-it simply reports the behavior of the executable matching the snapshot or beta
+it simply reports the behaviour of the executable matching the snapshot or beta
release date in the title page. We don't claim it is complete and we assume that
the reader knows about \TEX\ as described in \quotation {The \TEX\ Book}, the
\quotation {\ETEX\ manual}, the \quotation {\PDFTEX\ manual}, etc. Additional
reference material is published in journals of user groups and \CONTEXT\ related
documentation.
-Features may come and go. The current version of \LUATEX\ can be used for
-production (in fact it is used in production by the authors) but users cannot
-depend on complete stability, nor on functionality staying the same. This means
-that when you update your binary, you also need to check if something fundamental
-has changed. Normally this is communicated in articles or messages to a mailing
-list. We're still not at version 1 but when we reach that state the interface
-will be stable. Of course we then can decide to move towards version 2 with
-different properties.
-
-This means that occasionally you can encounter functionality not described here.
-Just consider that experimental then. They are either a playground or are being
-tested in real situations first before being finalized. And we can equally well
-kick them out again. When they stay we will descibe them in the manual or more
-extensively in articles.
+Successive versions brought new functionality, more control, some cleanup of
+internals and experimental features evolved into stable ones or were dropped.
+Already quite early \LUATEX\ could be used for production and it was used in
+production by the authors. Successive versions sometimes demanded a adaption to
+the \LUA\ interfacing, but the concepts were unchanged. The current version can
+be considered stable in functionality and there will be no fundamental changes
+between 0.90 and 1.00. Of course we then can decide to move towards version 2.00
+with different properties.
Don't expect \LUATEX\ to behave the same as \PDFTEX ! Although the core
-functionality of that 8 bit engine is present, \LUATEX\ can behave different due
-to its wide (32 bit) characters, many registers and large memory support. There
-is native \UTF\ input, support for large (more that 8 bit) fonts, and the math
-machinery is tuned for \OPENTYPE\ math. Ther ei ssupport for directional
+functionality of that 8 bit engine was starting point, it has been combined with
+the directional support of \OMEGA\ (\ALEPH). But, \LUATEX\ can behave different
+due to its wide (32 bit) characters, many registers and large memory support.
+There is native \UTF\ input, support for large (more that 8 bit) fonts, and the
+math machinery is tuned for \OPENTYPE\ math. There is support for directional
typesetting too. The log output can differ from other engines and will likely
differ more as we move forward. When you run plain \TEX\ for sure \LUATEX\ runs
-slower than \PDFTEX\ but when you run for instance \CONTEXT\ it might be faster. But
-in any case: 32 bit all||over combined with more features has a price.
+slower than \PDFTEX\ but when you run for instance \CONTEXT\ \MKIV\ it might be
+faster on more complex documents. But in any case: 32 bit all||over combined with
+more features has a price.
\LUATEX\ consists of a number of interrelated but (still) distinguishable parts.
-The organization of the source code is adapted so that it cna glue all these
+The organization of the source code is adapted so that it can glue all these
components together. We continue cleaning up side effects of the accumulated
code in \TEX\ engines (especially code that is not needed any longer).
@@ -47,53 +45,69 @@ code in \TEX\ engines (especially code that is not needed any longer).
\startitem
Most of \PDFTEX\ version 1.40.9, converted to C (with patches from later
releases). Some experimental features have been removed and some utility
- macros are not inherited as their functionality can be done in \LUA. We
- still use the \type {\pdf*} primitive namespace.
+ macros are not inherited as their functionality can be done in \LUA. The
+ number of backend interface commands has been reduced to a few. The
+ extensions are separated from the core (which we keep close to the
+ original \TEX\ core). Some mechanisms like expansion and protrusion can
+ behave different from the original due to some cleanup and optimization.
+ Some whatsit based functionality (image support and reusable content)
+ is now core functionality.
\stopitem
\startitem
The direction model and some other bits from \ALEPH\ RC4 (derived from
- \OMEGA) is included. The related primitives are part of core \LUATEX.
+ \OMEGA) is included. The related primitives are part of core \LUATEX\ but
+ at the node level directional support is no longer based on so called
+ whatsits but on real nodes. In fact, whatsits are now only used for
+ backend specific extensions.
\stopitem
\startitem
- We currently use \LUA\ 5.2.*. At some point we might decide to move to
- 5.3.* but that is yet to be decided.
- \stopitem
- \startitem
- There are few \LUA\ libraries that we consider part of the core \LUA\
- machinery.
+ Neither \ALEPH's I/O translation processes, nor tcx files, nor \ENCTEX\
+ can be used, these encoding|-|related functions are superseded by a
+ \LUA|-|based solution (reader callbacks). In a similar fashion all file
+ \IO\ can be intercepted.
\stopitem
\startitem
+ We currently use \LUA\ 5.2.*. At some point we might decide to move to
+ 5.3.* but that is yet to be decided. There are few \LUA\ libraries that
+ we consider part of the core \LUA\ machinery, for instance \type {lpeg}.
There are additional \LUA\ libraries that interface to the internals of
\TEX.
\stopitem
\startitem
There are various \TEX\ extensions but only those that cannot be done
- using the \LUA\ interfaces.
+ using the \LUA\ interfaces. The math machinery often has two code paths:
+ one traditional and the other more suitable for wide \OPENTYPE\ fonts.
\stopitem
\startitem
The fontloader uses parts of \FONTFORGE\ 2008.11.17 combined with
- additionaL code specific for usage in a \TEX\ engine.
+ additionaL code specific for usage in a \TEX\ engine. We try to minimize
+ specific font support to what \TEX needs: character references and
+ dimensions and delegate everything else to \LUA. That way we keep \TEX\
+ open for extensions without touching the core.
\stopitem
\startitem
- the \METAPOST\ library
+ The \METAPOST\ library is integral part of \LUATEX. This gives \TEX\ some
+ graphical capabilities using a relative high speed graphical subsystem.
+ Again \LUA\ is used as glue between the frontend and backend. Further
+ development of \METAPOST\ is closely related to \LUATEX.
\stopitem
\stopitemize
-Neither \ALEPH's I/O translation processes, nor tcx files, nor \ENCTEX\ can be
-used, these encoding|-|related functions are superseded by a \LUA|-|based
-solution (reader callbacks). Most of the \PDFTEX\ backend is available but via a
-bit different interface.
+The yearly \TEXLIVE\ version is the stable version, any version between them are
+to be considered beta. The beta releases are normally available via the \CONTEXT\
+distribution channels (the garden and so called minimals).
+
+\blank[1*big]
-The yearly \TEXLIVE\ version is the stable version, any version between them is
-considered beta. Keep in mind that new (or changed) features also need to be
-reflected in the macro package that you use.
+Hans Hagen, Harmut Henkel, \crlf
+Taco Hoekwater \& Luigi Scarso
\blank[3*big]
\starttabulate
-\NC \LUATEX \EQ Version \number\luatexversion.\luatexrevision \NC \NR
-\NC \CONTEXT \EQ \contextversion \NC \NR
-\NC timestamp \EQ \currentdate \NC \NR
+\NC Version \EQ \currentdate \NC \NR
+\NC \LUATEX \EQ Snapshot \number\luatexversion.\luatexrevision \NC \NR
+\NC \CONTEXT \EQ \contextversion \NC \NR
\stoptabulate
\stopchapter
diff --git a/doc/context/sources/general/manuals/luatex/luatex-languages.tex b/doc/context/sources/general/manuals/luatex/luatex-languages.tex
index 35ffd945e..ad73a4d31 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-languages.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-languages.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -59,8 +61,8 @@ stored in the same place as other nodes like boxes and kerns and glues.
In \LUATEX, these two types are merged into one, somewhat larger structure called
a \type {glyph_node}. Besides having the old character, font, and component
-fields, and the new special fields like \quote {attr}
-(see~\in{section}[glyphnodes]), these nodes also contain:
+fields, and the new special fields like \quote {attr} (see~\in {section}
+[glyphnodes]), these nodes also contain:
\startitemize
@@ -153,7 +155,7 @@ but rejected because in practice the current approach is sufficient and it would
not be compatible anyway.
Beware: the values are always saved in the format, independent of the setting
-of \type {\savinghyphcodes} at the mnoment the format is dumped.
+of \type {\savinghyphcodes} at the moment the format is dumped.
\section{The main control loop}
@@ -173,10 +175,10 @@ achieve the same effect.
This change of meaning did happen with \type {\char}, that now generates \quote
{glyph} nodes with a character subtype. In traditional \TEX\ there was a strong
-relationship betwene the 8|-|bit input encoding, hyphenation and glyph staken
+relationship between the 8|-|bit input encoding, hyphenation and glyphs taken
from a font. In \LUATEX\ we have \UTF\ input, and in most cases this maps
directly to a character in a font, apart from glyph replacement in the font
-engine. If you want to access arbitrary glyphs in a font directly you can alwasy
+engine. If you want to access arbitrary glyphs in a font directly you can always
use \LUA\ to do so, because fonts are available as \LUA\ table.
Second, all the results of processing in math mode eventually become nodes with
@@ -207,7 +209,7 @@ control loop.
The only use \LUATEX\ has for \type {\hyphenchar} is at the check whether a word
should be considered for hyphenation at all. If the \type {\hyphenchar} of the font
attached to the first character node in a word is negative, then hyphenation of
-that word is abandoned immediately. {\bf This behavior is added for backward
+that word is abandoned immediately. {\bf This behaviour is added for backward
compatibility only, and the use of \type {\hyphenchar=-1} as a means of
preventing hyphenation should not be used in new \LUATEX\ documents.}
@@ -294,7 +296,7 @@ speed gain would be lost if it had to interpret command sequences while doing so
It is possible to specify extra hyphenation points in compound words by using
\type {{-}{}{-}} for the explicit hyphen character (replace \type {-} by the
actual explicit hyphen character if needed). For example, this matches the word
-\quote {multi|-|word|-|boundaries} and allows an extra break inbetweem \quote
+\quote {multi|-|word|-|boundaries} and allows an extra break inbetween \quote
{boun} and \quote {daries}:
\starttyping
@@ -391,14 +393,14 @@ word boundary).
All languages start out with \type {\prehyphenchar=`\-}, \type {\posthyphenchar=0},
\type {\preexhyphenchar=0} and \type {\postexhyphenchar=0}. When you assign the
values of one of these four parameters, you are actually changing the settings
-for the current \type {\language}, this behavior is compatible with \type {\patterns}
+for the current \type {\language}, this behaviour is compatible with \type {\patterns}
and \type {\hyphenation}.
\LUATEX\ also hyphenates the first word in a paragraph. Words can be up to 256
characters long (up from 64 in \TEX82). Longer words generate an error right now,
but eventually either the limitation will be removed or perhaps it will become
possible to silently ignore the excess characters (this is what happens in
-\TEX82, but there the behavior cannot be controlled).
+\TEX82, but there the behaviour cannot be controlled).
If you are using the \LUA\ function \type {lang.hyphenate}, you should be aware
that this function expects to receive a list of \quote {character} nodes. It will
diff --git a/doc/context/sources/general/manuals/luatex/luatex-libraries.tex b/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
index e03d875f9..71d14040b 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -31,7 +33,7 @@ This library has functions that register, find and list callbacks. Callbacks are
\LUA\ functions that are called in well defined places. There are two kind of
callbacks: those that mix with existing functionality, and those that (when
enabled) replace functionality. In mosty cases the second category is expected to
-behave similar to the built in functiontionality because in a next step specific
+behave similar to the built in functionality because in a next step specific
data is expected. For instance, you can replace the hyphenation routine. The
function gets a list that can be hyphenated (or not). The final list should be
valid and is (normally) used for constructing a paragraph. Another function can
@@ -63,8 +65,6 @@ whatever it would execute by default (when no callback function is registered at
all). Be warned: this may cause all sorts of grief unless you know {\em exactly}
what you are doing!
-Currently, callbacks are not dumped into the format file.
-
\startfunctioncall
<table> info = callback.list()
\stopfunctioncall
@@ -80,7 +80,7 @@ If the callback is not set, \type {callback.find} returns \type {nil}.
\subsection{File discovery callbacks}
-The behavior documented in this subsection is considered stable in the sense that
+The behaviour documented in this subsection is considered stable in the sense that
there will not be backward|-|incompatible changes any more.
\subsubsection{\type {find_read_file} and \type {find_write_file}}
@@ -220,7 +220,7 @@ Your callback function should have the following conventions:
\stopfunctioncall
The \type {asked_name} is an image file. Your return value is used to open a file
-from the harddisk, so make sure you return something that is considered the name
+from the hard disk, so make sure you return something that is considered the name
of a valid file by your operating system.
\subsection[iocallback]{File reading callbacks}
@@ -353,9 +353,8 @@ end
\stopfunctioncall
If you return \type {nil}, \LUATEX\ will pretend like your callback never
-happened. You can gain a small amount of processing time from that.
-
-This callback does not replace any internal code.
+happened. You can gain a small amount of processing time from that. This callback
+does not replace any internal code.
\subsubsection{\type {process_output_buffer}}
@@ -371,9 +370,8 @@ end
\stopfunctioncall
If you return \type {nil}, \LUATEX\ will pretend like your callback never
-happened. You can gain a small amount of processing time from that.
-
-This callback does not replace any internal code.
+happened. You can gain a small amount of processing time from that. This callback
+does not replace any internal code.
\subsubsection{\type {process_jobname}}
@@ -389,40 +387,8 @@ end
The only argument is the actual job name; you should not use \type {tex.jobname}
inside this function or infinite recursion may occur. If you return \type {nil},
-\LUATEX\ will pretend your callback never happened.
-
-This callback does not replace any internal code.
-
-% \subsubsection{\type {token_filter}}
-%
-% This callback allows you to replace the way \LUATEX\ fetches lexical tokens.
-%
-% \startfunctioncall
-% function()
-% return <table> token
-% end
-% \stopfunctioncall
-%
-% The calling convention for this callback is a bit more complicated than for most
-% other callbacks. The function should either return a \LUA\ table representing a
-% valid to|-|be|-|processed token or tokenlist, or something else like \type {nil}
-% or an empty table.
-%
-% If your \LUA\ function does not return a table representing a valid token, it
-% will be immediately called again, until it eventually does return a useful token
-% or tokenlist (or until you reset the callback value to nil). See the description
-% of \type {token} for some handy functions to be used in conjunction with this
-% callback.
-%
-% If your function returns a single usable token, then that token will be processed
-% by \LUATEX\ immediately. If the function returns a token list (a table consisting
-% of a list of consecutive token tables), then that list will be pushed to the
-% input stack at a completely new token list level, with its token type set to
-% \quote {inserted}. In either case, the returned token(s) will not be fed back
-% into the callback function.
-%
-% Setting this callback to \type {false} has no effect (because otherwise nothing
-% would happen, forever).
+\LUATEX\ will pretend your callback never happened. This callback does not
+replace any internal code.
\subsection{Node list processing callbacks}
@@ -522,7 +488,7 @@ three things:
\startitemize
\startitem
- boolean \type {true} signals succesful processing
+ boolean \type {true} signals succesfull processing
\stopitem
\startitem
\type {<node>} signals that the \quote {head} node should be replaced by the
@@ -793,7 +759,7 @@ end
\stopfunctioncall
This callback replaces the code that prints \LUATEX's banner. Note that for
-successful use, this callback has to be set in the lua initialization script,
+successful use, this callback has to be set in the \LUA\ initialization script,
otherwise it will be seen only after the run has already started.
\subsubsection{\type {stop_run}}
@@ -836,37 +802,7 @@ end
This callback is run from inside the \TEX\ error function, and the idea is to
allow you to do some extra reporting on top of what \TEX\ already does (none of
the normal actions are removed). You may find some of the values in the \type
-{status} table useful.
-
-This callback does not replace any internal code.
-
-\iffalse % this has been retracted for the moment
-
- \startitemize
-
- \sym{message}
-
- is the formal error message \TEX\ has given to the user. (the line after the
- \type {'!'}).
-
- \sym{indicator}
-
- is either a filename (when it is a string) or a location indicator (a number)
- that can mean lots of different things like a token list id or a \type {\read}
- number.
-
- \sym{lineno}
-
- is the current line number.
- \stopitemize
-
- This is an investigative item for 'testing the water' only. The final goal is the
- total replacement of \TEX's error handling routines, but that needs lots of
- adjustments in the web source because \TEX\ deals with errors in a somewhat
- haphazard fashion. This is why the exact definition of \type {indicator} is not
- given here.
-
-\fi
+{status} table useful. This callback does not replace any internal code.
\subsubsection{\type {show_error_message}}
@@ -885,7 +821,7 @@ function()
end
\stopfunctioncall
-This callback replaces the code that prints the extra lua error message.
+This callback replaces the code that prints the extra \LUA\ error message.
\subsubsection{\type {start_file}}
@@ -937,7 +873,7 @@ function(shippingout)
end
\stopfunctioncall
-This callback is called after the pdf page stream has been assembled and before
+This callback is called after the \PDF\ page stream has been assembled and before
the page object gets finalized.
\subsection{Font-related callbacks}
@@ -970,7 +906,7 @@ The \type {id} is the internal number assigned to the font.
The internal structure of the \type {font} table that is to be returned is
explained in \in {chapter} [fonts]. That table is saved internally, so you can
put extra fields in the table for your later \LUA\ code to use. In alternative,
-retval can be a previously defined fontid. This is useful if a previous
+\type {retval} can be a previously defined fontid. This is useful if a previous
definition can be reused instead of creating a whole new font structure.
Setting this callback to \type {false} is pointless as it will prevent font
@@ -978,12 +914,12 @@ loading completely but will nevertheless generate errors.
\section{The \type {epdf} library}
-The \type {epdf} library provides Lua bindings to many \PDF\ access functions
-that are defined by the poppler pdf viewer library (written in C$+{}+$ by
+The \type {epdf} library provides \LUA\ bindings to many \PDF\ access functions
+that are defined by the poppler \PDF\ viewer library (written in C$+{}+$ by
Kristian H\o gsberg, based on xpdf by Derek Noonburg). Within \LUATEX\ (and
\PDFTEX), xpdf functionality is being used since long time to embed \PDF\ files.
The \type {epdf} library shall allow to scrutinize an external \PDF\ file. It
-gives access to its document structure, e.g., catalog, cross-reference table,
+gives access to its document structure: catalog, cross|-|reference table,
individual pages, objects, annotations, info, and metadata. The \LUATEX\ team is
evaluating the possibility of reducing the binding to a basic low level \PDF\
primitives and delegate the complete set of functions to an external shared
@@ -1452,10 +1388,10 @@ case the given length is (at most) the string length.
The returned object can be used in the \type {img} library instead of a filename.
Both the memory stream and it's use in the image library is experimental and can
change. In case you wonder where this can be used: when you use the swiglib
-library for graphic magick, it can return such a userdata object. This permits
-conversion in memory and passing the result directly to the backend. This might
-save some runtime in one|-|pass workflows. This feature is currently not meant
-for production.
+library for \type {graphicmagick}, it can return such a userdata object. This
+permits conversion in memory and passing the result directly to the backend. This
+might save some runtime in one|-|pass workflows. This feature is currently not
+meant for production.
\section{The \type {font} library}
@@ -1521,7 +1457,7 @@ font.setfont(<number> n, <table> f)
\stopfunctioncall
Note that at the moment, each access to the \type {font.fonts} or call to \type
-{font.getfont} creates a lua table for the whole font. This process can be quite
+{font.getfont} creates a \LUA\ table for the whole font. This process can be quite
slow. In a later version of \LUATEX, this interface will change (it will start
using userdata objects instead of actual tables).
@@ -1611,7 +1547,7 @@ of consecutive numbers: in some cases there can be holes in the sequence.
\stopfunctioncall
This function returns either \type {nil}, or a \type {table}, or an array of
-small tables (in the case of a TrueType collection). The returned table(s) will
+small tables (in the case of a \TRUETYPE\ collection). The returned table(s) will
contain some fairly interesting information items from the font(s) defined by the
file:
@@ -1825,11 +1761,11 @@ fontloader.close(f)
\stoptyping
In this case, the \LUATEX\ memory requirement stays below 100MB on the test
-computer, while the internal stucture generated by \type {to_table()} needs more
+computer, while the internal structure generated by \type {to_table()} needs more
than 2GB of memory (the font itself is 6.9MB in disk size).
Only the top|-|level font, the subfont table entries, and the glyphs are virtual
-objects, everything else still produces normal lua values and tables.
+objects, everything else still produces normal \LUA\ values and tables.
If you want to know the valid fields in a font or glyph structure, call the \type
{fields} function on an object of a particular type (either glyph or font):
@@ -3130,9 +3066,8 @@ Test if an (absolute) file name is a readable file.
The return value is the actual absolute filename you should use, because the disk
name is not always the same as the requested name, due to aliases and
-system|-|specific handling under e.g.\ \MSDOS.
-
-Returns \type {nil} if the file does not exist or is not readable.
+system|-|specific handling under e.g.\ \MSDOS. Returns \type {nil} if the file
+does not exist or is not readable.
\subsection{\type {expand_path}}
@@ -3372,7 +3307,7 @@ lua.name[<number> n] = <string> s
<string> s = lua.name[<number> n]
\stopfunctioncall
-If you want to unset a lua name, you can assign \type {nil} to it.
+If you want to unset a \LUA\ name, you can assign \type {nil} to it.
\section{The \type {mplib} library}
@@ -3420,7 +3355,7 @@ with:
\type {pfb}, \type {enc} \NC \NR
\stoptabulate
-Return either the full pathname of the found file, or \type {nil} if the file
+Return either the full path name of the found file, or \type {nil} if the file
cannot be found.
Note that the new version of \MPLIB\ no longer uses binary mem files, so the way
@@ -3466,7 +3401,7 @@ Generally speaking, it is best to keep your chunks small, but beware that all
chunks have to obey proper syntax, like each of them is a small file. For
instance, you cannot split a single statement over multiple chunks.
-In contrast with the normal standalone \type {mpost} command, there is {\em no}
+In contrast with the normal stand alone \type {mpost} command, there is {\em no}
implied \quote{input} at the start of the first chunk.
\subsection{\type {mp:finish}}
@@ -3523,7 +3458,7 @@ you can call:
\NC copy_objects \NC function \NC returns a deep copy of the array of graphic
objects in this \type {fig} \NC \NR
\NC filename \NC function \NC the filename this \type {fig}'s \POSTSCRIPT\
- output would have written to in standalone
+ output would have written to in stand alone
mode \NC \NR
\NC width \NC function \NC the \type {fontcharwd} value \NC \NR
\NC height \NC function \NC the \type {fontcharht} value \NC \NR
@@ -4044,7 +3979,7 @@ is also possible:
This calling method takes glue settings into account and is especially useful for
finding the actual width of a sublist of nodes that are already boxed, for
-example in code like this, which prints the width of the space inbetween the
+example in code like this, which prints the width of the space in between the
\type {a} and \type {b} as it would be if \type {\box0} was used as-is:
\starttyping
@@ -4119,7 +4054,7 @@ This function also accept string \type {id}'s.
<node> t = node.traverse(<node> n)
\stopfunctioncall
-This is a lua iterator that loops over the node list that starts at \type {n}.
+This is a \LUA\ iterator that loops over the node list that starts at \type {n}.
Typically code looks like this:
\starttyping
@@ -4266,7 +4201,7 @@ tail (both \type {n} and \type {m} can change into a new ligature).
<node> h, <node> t, <boolean> success = node.kerning(<node> n, <node> m)
\stopfunctioncall
-Apply \TEX|-|style kerning to the specified nodelist. The tail node \type {m} is
+Apply \TEX|-|style kerning to the specified node list. The tail node \type {m} is
optional. The two returned nodes \type {h} and \type {t} are the head and tail
(either one of these can be an inserted kern node, because special kernings with
word boundaries are possible).
@@ -4457,7 +4392,7 @@ The counterpart of this function returns two values.
{pdf.getxformmargin}}
These function can be used to set and retrieve the margins that are added to the
-natural boundingboxes of the respective objects.
+natural bounding boxes of the respective objects.
\subsection{\type {pdf.h}, \type {pdf.v}}
@@ -4742,11 +4677,11 @@ iteration down to properly process any other, embedded XObjects.
Of course, this is not a very useful example in practise, but for the purpose of
demonstrating \type {pdfscanner}, it is just long enough. It makes use of only
one \type {scanner} method: \type {scanner:pop()}. That function pops the top
-operand of the internal stack, and returns a lua table where the object at index
+operand of the internal stack, and returns a \LUA\ table where the object at index
one is a string representing the type of the operand, and object two is its
value.
-The list of possible operand types and associated lua value types is:
+The list of possible operand types and associated \LUA\ value types is:
\starttabulate[|lT|p|]
\NC integer \NC <number> \NC \NR
@@ -4768,7 +4703,7 @@ In case of \type {string}, please bear in mind that PDF actually supports
different types of strings (with different encodings) in different parts of the
PDF document, so may need to reencode some of the results; \type {pdfscanner}
always outputs the byte stream without reencoding anything. \type {pdfscanner}
-does not differentiate between literal strings and hexidecimal strings (the
+does not differentiate between literal strings and hexadecimal strings (the
hexadecimal values are decoded), and it treats the stream data for inline images
as a string that is the single operand for \type {EI}.
@@ -4873,8 +4808,8 @@ The current list is:
\NC filename \NC name of the current input file \NC \NR
\NC inputid \NC numeric id of the current input \NC \NR
\NC linenumber \NC location in the current input file \NC \NR
-\NC lasterrorstring \NC last tex error string \NC \NR
-\NC lastluaerrorstring \NC last lua error string \NC \NR
+\NC lasterrorstring \NC last \TEX\ error string \NC \NR
+\NC lastluaerrorstring \NC last \LUA\ error string \NC \NR
\NC lastwarningtag \NC last warning string\NC \NR
\NC lastwarningstring \NC last warning tag, normally an indication of in what part\NC \NR
\NC lasterrorcontext \NC last error context string (with newlines) \NC \NR
@@ -4885,8 +4820,8 @@ The current list is:
\NC callbacks \NC total number of executed callbacks so far \NC \NR
\NC indirect_callbacks \NC number of those that were themselves
a result of other callbacks (e.g. file readers) \NC \NR
-\NC luatex_version \NC the luatex version number \NC \NR
-\NC luatex_revision \NC the luatex revision string \NC \NR
+\NC luatex_version \NC the \LUATEX\ version number \NC \NR
+\NC luatex_revision \NC the \LUATEX\ revision string \NC \NR
\NC ini_version \NC \type {true} if this is an \INITEX\ run \NC \NR
\NC shell_escape \NC \type {0} means disabled, \type {1} is restricted and
\type {2} means anything is permitted \NC \NR
@@ -5704,7 +5639,7 @@ makes it suitable for use as a partial line input mechanism:
\starttyping
before\directlua{tex.sprint("\\relax")tex.sprint(" inbetween")}after
\stoptyping
- the space before \type {inbetween} will be gobbled as a result of the \quote
+ the space before \type {in between} will be gobbled as a result of the \quote
{normal} scanning of \type {\relax}.
\stopitem
\stopitemize
@@ -5910,16 +5845,6 @@ will define \type {\LuaTeXformatname} with the same intrinsic meaning as the
documented primitive \type {\formatname}, provided that the control sequences \type
{\LuaTeXformatname} is currently undefined.
-% Second example:
-%
-% \starttyping
-% tex.enableprimitives('Omega',tex.extraprimitives ('omega'))
-% \stoptyping
-%
-% will define a whole series of csnames like \type {\Omegatextdir}, \type
-% {\Omegapardir}, etc., but it will stick with \type {\OmegaVersion} instead of
-% creating the doubly-prefixed \type {\OmegaOmegaVersion}.
-
When \LUATEX\ is run with \type {--ini} only the \TEX82 primitives and \type
{\directlua} are available, so no extra primitives {\bf at all}.
@@ -5970,9 +5895,9 @@ end
\NC luatex \NC \ctxlua{document.showprimitives('luatex') } \NC \NR
\stoptabulate
-Note that \type {'luatex'} does not contain \type {directlua}, as that
-isconsidered to be a core primitive, along with all the \TEX82 primitives, so it
-is part of the list that is returned from \type {'core'}.
+Note that \type {'luatex'} does not contain \type {directlua}, as that is
+considered to be a core primitive, along with all the \TEX82 primitives, so it is
+part of the list that is returned from \type {'core'}.
% \type {'umath'} is a subset of \type {'luatex'} that covers the Unicode math
% primitives as it might be desired to handle the prefixing of that subset
@@ -6138,7 +6063,7 @@ the executable after loading and executing the startup file.
shell_escape_commands \NC string \NC \NC Comma-separated list of command
names that may be executed by \type {\write18} even if \type {shell_escape}
is set to \type {'p'}. Do {\it not\/} use spaces around commas, separate any
- required command arguments by using a space, and use the ASCII double quote
+ required command arguments by using a space, and use the \ASCII\ double quote
(\type {"}) for any needed argument or path quoting
\NC \NR
@@ -6172,12 +6097,12 @@ the executable after loading and executing the startup file.
\NC \NR
\NC formatname \NC string \NC
\NC
- if no format name was given on the commandline, this key will be tested first
+ if no format name was given on the command line, this key will be tested first
instead of simply quitting
\NC \NR
\NC jobname \NC string \NC
\NC
- if no input file name was given on the commandline, this key will be tested
+ if no input file name was given on the command line, this key will be tested
first instead of simply giving up
\NC \NR
\stoptabulate
@@ -6225,132 +6150,6 @@ string if you only want to move to the next line.
You can disable \type {^^} escaping of control characters by passing a value of
zero.
-% \section[luatokens]{The \type {oldtoken} library (obsolete)}
-%
-% {\em Nota Bene: This library will disappear soon. It is replaced by the \type
-% {token} library, that used to be called \type {newroken}.}
-%
-% The \type {token} table contains interface functions to \TEX's handling of
-% tokens. These functions are most useful when combined with the \type
-% {token_filter} callback, but they could be used standalone as well.
-%
-% A token is represented in \LUA\ as a small table. For the moment, this table
-% consists of three numeric entries:
-%
-% \starttabulate[|l|l|p|]
-% \NC \bf index \NC \bf meaning \NC \bf description \NC \NR
-% \NC 1 \NC command code \NC this is a value between~$0$ and~$130$ (approximately)\NC \NR
-% \NC 2 \NC command modifier \NC this is a value between~$0$ and~$2^{21}$ \NC \NR
-% \NC 3 \NC control sequence id \NC for commands that are not the result of control
-% sequences, like letters and characters, it is zero,
-% otherwise, it is a number pointing into the \quote
-% {equivalence table} \NC \NR
-% \stoptabulate
-%
-% \subsection{\type {oldtoken.get_next}}
-%
-% \startfunctioncall
-% token t = oldtoken.get_next()
-% \stopfunctioncall
-%
-% This fetches the next input token from the current input source, without
-% expansion.
-%
-% \subsection{\type {oldtoken.is_expandable}}
-%
-% \startfunctioncall
-% <boolean> b = oldtoken.is_expandable(<token> t)
-% \stopfunctioncall
-%
-% This tests if the token \type {t} could be expanded.
-%
-% \subsection{\type {oldtoken.expand}}
-%
-% \startfunctioncall
-% oldtoken.expand(<token> t)
-% \stopfunctioncall
-%
-% If a token is expandable, this will expand one level of it, so that the first
-% token of the expansion will now be the next token to be read by \type
-% {oldtoken.get_next()}.
-%
-% \subsection{\type {oldtoken.is_activechar}}
-%
-% \startfunctioncall
-% <boolean> b = oldtoken.is_activechar(<token> t)
-% \stopfunctioncall
-%
-% This is a special test that is sometimes handy. Discovering whether some control
-% sequence is the result of an active character turned out to be very hard
-% otherwise.
-%
-% \subsection{\type {oldtoken.create}}
-%
-% \startfunctioncall
-% token t = oldtoken.create(<string> csname)
-% token t = oldtoken.create(<number> charcode)
-% token t = oldtoken.create(<number> charcode, <number> catcode)
-% \stopfunctioncall
-%
-% This is the token factory. If you feed it a string, then it is the name of a
-% control sequence (without leading backslash), and it will be looked up in the
-% equivalence table.
-%
-% If you feed it number, then this is assumed to be an input character, and an
-% optional second number gives its category code. This means it is possible to
-% overrule a character's category code, with a few exceptions: the category codes~0
-% (escape), 9~(ignored), 13~(active), 14~(comment), and 15 (invalid) cannot occur
-% inside a token. The values~0, 9, 14 and~15 are therefore illegal as input to
-% \type {oldtoken.create()}, and active characters will be resolved immediately.
-%
-% Note: unknown string sequences and never defined active characters will result in
-% a token representing an \quote {undefined control sequence} with a near|-|random
-% name. It is {\em not} possible to define brand new control sequences using
-% \type {oldtoken.create}!
-%
-% \subsection{\type {oldtoken.command_name}}
-%
-% \startfunctioncall
-% <string> commandname = oldtoken.command_name(<token> t)
-% \stopfunctioncall
-%
-% This returns the name associated with the \quote {command} value of the token in
-% \LUATEX. There is not always a direct connection between these names and
-% primitives. For instance, all \type {\ifxxx} tests are grouped under \type
-% {if_test}, and the \quote {command modifier} defines which test is to be run.
-%
-% \subsection{\type {oldtoken.command_id}}
-%
-% \startfunctioncall
-% <number> i = oldtoken.command_id(<string> commandname)
-% \stopfunctioncall
-%
-% This returns a number that is the inverse operation of the previous command, to
-% be used as the first item in a token table.
-%
-% \subsection{\type {oldtoken.csname_name}}
-%
-% \startfunctioncall
-% <string> csname = oldtoken.csname_name(<token> t)
-% \stopfunctioncall
-%
-% This returns the name associated with the \quote {equivalence table} value of the
-% token in \LUATEX. It returns the string value of the command used to create the
-% current token, or an empty string if there is no associated control sequence.
-%
-% Keep in mind that there are potentially two control sequences that return the
-% same csname string: single character control sequences and active characters have
-% the same \quote {name}.
-%
-% \subsection{\type {oldtoken.csname_id}}
-%
-% \startfunctioncall
-% <number> i = oldtoken.csname_id(<string> csname)
-% \stopfunctioncall
-%
-% This returns a number that is the inverse operation of the previous command, to
-% be used as the third item in a token table.
-
\subsection{The \type {token} libray}
The current \type {token} library will be replaced by a new one that is more
diff --git a/doc/context/sources/general/manuals/luatex/luatex-lua.tex b/doc/context/sources/general/manuals/luatex/luatex-lua.tex
index 86ddc17e8..5bf4f3475 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-lua.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-lua.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -26,11 +28,11 @@ interpreter:
\stopitemize
In this mode, it will set \LUA's \type {arg[0]} to the found script name, pushing
-preceding options in negative values and the rest of the commandline in the
+preceding options in negative values and the rest of the command line in the
positive values, just like the \LUA\ interpreter.
\LUATEX\ will exit immediately after executing the specified \LUA\ script and is,
-in effect, a somewhat bulky standalone \LUA\ interpreter with a bunch of extra
+in effect, a somewhat bulky stand alone \LUA\ interpreter with a bunch of extra
preloaded libraries.
\subsection{\LUATEX\ as a \LUA\ byte compiler}
@@ -38,18 +40,18 @@ preloaded libraries.
There are two situations that make \LUATEX\ behave like the \LUA\ byte compiler:
\startitemize[packed]
-\startitem if a \type {--luaconly} option is given on the commandline, or \stopitem
+\startitem if a \type {--luaconly} option is given on the command line, or \stopitem
\startitem if the executable is named \type {texluac} \stopitem
\stopitemize
-In this mode, \LUATEX\ is exactly like \type {luac} from the standalone \LUA\
+In this mode, \LUATEX\ is exactly like \type {luac} from the stand alone \LUA\
distribution, except that it does not have the \type {-l} switch, and that it
accepts (but ignores) the \type {--luaconly} switch.
\subsection{Other commandline processing}
-When the \LUATEX\ executable starts, it looks for the \type {--lua} commandline
-option. If there is no \type {--lua} option, the commandline is interpreted in a
+When the \LUATEX\ executable starts, it looks for the \type {--lua} command line
+option. If there is no \type {--lua} option, the command line is interpreted in a
similar fashion as in traditional \PDFTEX\ and \ALEPH. Some options are accepted
but have no consequence. The following command|-|line options are understood:
@@ -59,7 +61,7 @@ but have no consequence. The following command|-|line options are understood:
\NC --safer \NC disable easily exploitable \LUA\ commands \NC\NR
\NC --nosocket \NC disable the \LUA\ socket library \NC\NR
\NC --help \NC display help and exit \NC\NR
-\NC --ini \NC be iniluatex, for dumping formats \NC\NR
+\NC --ini \NC be \type {iniluatex}, for dumping formats \NC\NR
\NC --interaction=STRING \NC set interaction mode: \type {batchmode}, \type {nonstopmode}
\type {scrollmode} or \type {errorstopmode} \NC \NR
\NC --halt-on-error \NC stop processing at the first error\NC \NR
@@ -129,16 +131,16 @@ any other web2c-based typesetting engine, except that \LUATEX\ has a few extra
switches.
If the \type {--lua} option is present, \LUATEX\ will enter an alternative mode
-of commandline processing in comparison to the standard web2c programs.
+of command line processing in comparison to the standard web2c programs.
In this mode, a small series of actions is taken in order. First, it will parse
-the commandline as usual, but it will only interpret a small subset of the
+the command line as usual, but it will only interpret a small subset of the
options immediately: \type {--safer}, \type {--nosocket}, \type
{--[no-]shell-escape}, \type {--enable-write18}, \type {--disable-write18}, \type
{--shell-restricted}, \type {--help}, \type {--version}, and \type {--credits}.
Now it searches for the requested \LUA\ initialization script. If it cannot be
-found using the actual name given on the commandline, a second attempt is made by
+found using the actual name given on the command line, a second attempt is made by
prepending the value of the environment variable \type {LUATEXDIR}, if that
variable is defined in the environment.
@@ -156,10 +158,10 @@ Then it checks the various safety switches. You can use those to disable some
Furthermore, it disables loading of compiled \LUA\ libraries and it makes \type
{io.open()} fail on files that are opened for anything besides reading.
-When \LUATEX\ starts it set the locale to a neutral value. If for some reason you use
-\type {os.locale}, you need to make sire you nil it afterwards because otherise it
-can interfere with code that for instance generates dates. You can nil the
-locale with
+When \LUATEX\ starts it set the locale to a neutral value. If for some reason you
+use \type {os.locale}, you need to make sure you \type {nil} it afterwards
+because otherwise it can interfere with code that for instance generates dates.
+You can nil the locale with
\starttyping
os.setlocale(nil.nil)
@@ -174,15 +176,15 @@ make \type {io.popen()}, \type {os.execute}, \type {os.exec} and \type {os.spawn
adhere to the requested option.
Next the initialization script is loaded and executed. From within the script,
-the entire commandline is available in the \LUA\ table \type {arg}, beginning with
+the entire command line is available in the \LUA\ table \type {arg}, beginning with
\type {arg[0]}, containing the name of the executable. As consequence, the warning
about unrecognized option is suppressed.
-Commandline processing happens very early on. So early, in fact, that none of
+Command line processing happens very early on. So early, in fact, that none of
\TEX's initializations have taken place yet. For that reason, the tables that
deal with typesetting, like \type {tex}, \type {token}, \type {node} and
\type {pdf}, are off|-|limits during the execution of the startup file (they
-are nilled). Special care is taken that \type {texio.write} and \type
+are \type {nil}'d). Special care is taken that \type {texio.write} and \type
{texio.write_nl} function properly, so that you can at least report your actions
to the log file when (and if) it eventually becomes opened (note that \TEX\ does
not even know its \type {\jobname} yet at this point). See \in {chapter} [libraries]
@@ -196,17 +198,17 @@ should not store anything in variables or within tables with these four global
names, as they will be overwritten completely.
We recommend you use the startup file only for your own \TEX|-|independent
-initializations (if you need any), to parse the commandline, set values in the
+initializations (if you need any), to parse the command line, set values in the
\type {texconfig} table, and register the callbacks you need.
-\LUATEX\ allows some of the commandline options to be overridden by reading
+\LUATEX\ allows some of the command line options to be overridden by reading
values from the \type {texconfig} table at the end of script execution (see the
description of the \type {texconfig} table later on in this document for more
details on which ones exactly).
Unless the \type {texconfig} table tells \LUATEX\ not to initialize \KPATHSEA\
at all (set \type {texconfig.kpse_init} to \type {false} for that), \LUATEX\
-acts on some more commandline options after the initialization script is
+acts on some more command line options after the initialization script is
finished: in order to initialize the built|-|in \KPATHSEA\ library properly,
\LUATEX\ needs to know the correct program name to use, and for that it needs to
check \type {--progname}, or \type {--ini} and \type {--fmt}, if \type
@@ -270,7 +272,7 @@ In keeping with the other \TEX|-|like programs in \TEXLIVE, the two \LUA\ functi
{shell_escape} and|/|or \type {shell_escape_commands} in account. Whenever
\LUATEX\ is run with the assumed intention to typeset a document (and by that we
mean that it is called as \type {luatex}, as opposed to \type {texlua}, and that
-the commandline option \type {--luaonly} was not given), it will only run the
+the command line option \type {--luaonly} was not given), it will only run the
four functions above if the matching \type {texmf.cnf} variable(s) or their \type
{texconfig} (see \in {section} [texconfig]) counterparts allow execution of the
requested system command. In \quote {script interpreter} runs of \LUATEX, these
@@ -370,7 +372,7 @@ The \type {os} library has a few extra functions and variables:
The set of consecutive values starting at integer~1 in the table are the
arguments that are passed on to the command (the value at index~1 becomes
\type {arg[0]}). The command is searched for in the execution path, so there
- is normally no need to pass on a fully qualified pathname.
+ is normally no need to pass on a fully qualified path name.
If the argument is a string, then it is automatically converted into a table
by splitting on whitespace. In this case, it is impossible for the command
@@ -483,7 +485,7 @@ LC_NUMERIC=C
\section {\LUA\ modules}
-The implied use of the built|-|in Lua modules in this section is deprecated. If
+The implied use of the built|-|in \LUA\ modules in this section is deprecated. If
you want to use one of these libraries, please start your source file with a
proper \type {require} line. At some point \LUATEX\ will switch to loading these
modules on demand.
@@ -494,42 +496,40 @@ Some modules that are normally external to \LUA\ are statically linked in with
\startitemize
\startitem
- \type {slnunicode}, from the \type {Selene} libraries, \hyphenatedurl
- {http://luaforge.net/projects/sln}. (version 1.1) This library has been
- slightly extended so that the \type {unicode.utf8.*} functions also accept the
- first 256 values of plane~18. This is the range \LUATEX\ uses for raw binary
- output, as explained above.
+ \type {slnunicode}, from the \type {selene} libraries, \hyphenatedurl
+ {http://luaforge.net/projects/sln}. This library has been slightly extended
+ so that the \type {unicode.utf8.*} functions also accept the first 256 values
+ of plane~18. This is the range \LUATEX\ uses for raw binary output, as
+ explained above.
\stopitem
\startitem
- \type {luazip}, from the kepler project,
- \hyphenatedurl{http://www.keplerproject.org/luazip/}. (version 1.2.1, but
- patched for compilation with \LUA\ 5.2)
+ \type {luazip}, from the kepler project, \hyphenatedurl
+ {http://www.keplerproject.org/luazip/}.
\stopitem
\startitem
\type {luafilesystem}, also from the kepler project, \hyphenatedurl
- {http://www.keplerproject.org/luafilesystem/}. (version 1.5.0)
+ {http://www.keplerproject.org/luafilesystem/}.
\stopitem
\startitem
\type {lpeg}, by Roberto Ierusalimschy, \hyphenatedurl
- {http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html}. (version 0.10.2) This
- library is not \UNICODE|-|aware, but interprets strings on a
- byte|-|per|-|byte basis. This mainly means that \type {lpeg.S} cannot be
- used with \UTF\ characters encoded in more than two bytes, and thus \type
- {lpeg.S} will look for one of those two bytes when matching, not the
- combination of the two. The same is true for \type {lpeg.R}, although the
- latter will display an error message if used with multibyte characters.
- Therefore \type {lpeg.R('aä')} results in the message \type {bad argument
- #1 to 'R' (range must have two characters)}, since to \type {lpeg}, \type {ä}
- is two 'characters' (bytes), so \type {aä} totals three. In practice this is
- no real issue.
+ {http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html}. This library is not
+ \UNICODE|-|aware, but interprets strings on a byte|-|per|-|byte basis. This
+ mainly means that \type {lpeg.S} cannot be used with \UTF\ characters encoded
+ in more than two bytes, and thus \type {lpeg.S} will look for one of those
+ two bytes when matching, not the combination of the two. The same is true for
+ \type {lpeg.R}, although the latter will display an error message if used
+ with multibyte characters. Therefore \type {lpeg.R('aä')} results in the
+ message \type {bad argument #1 to 'R' (range must have two characters)},
+ since to \type {lpeg}, \type {ä} is two 'characters' (bytes), so \type {aä}
+ totals three. In practice this is no real issue.
\stopitem
\startitem
\type {lzlib}, by Tiago Dionizio, \hyphenatedurl
- {http://luaforge.net/projects/lzlib/}. (version 0.2)
+ {http://luaforge.net/projects/lzlib/}.
\stopitem
\startitem
@@ -539,9 +539,9 @@ Some modules that are normally external to \LUA\ are statically linked in with
\startitem
\type {luasocket}, by Diego Nehab \hyphenatedurl
- {http://w3.impa.br/~diego/software/luasocket/} (version 2.0.2). The \type
- {.lua} support modules from \type {luasocket} are also preloaded inside the
- executable, there are no external file dependencies.
+ {http://w3.impa.br/~diego/software/luasocket/}. The \type {.lua} support
+ modules from \type {luasocket} are also preloaded inside the executable,
+ there are no external file dependencies.
\stopitem
\stopitemize
diff --git a/doc/context/sources/general/manuals/luatex/luatex-math.tex b/doc/context/sources/general/manuals/luatex/luatex-math.tex
index 5447835db..d28b4e6b5 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-math.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-math.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -189,7 +191,7 @@ are described as follows:
In \LUATEX\ one can set the styles in more detail which means that you sometimes
have to set both normal and cramped styles to get the effect you want. If we
-force styles in the scriptr using \type {\scriptstyle} and \type {\crampedscriptstyle}
+force styles in the script using \type {\scriptstyle} and \type {\crampedscriptstyle}
we get this:
\startbuffer[demo]
@@ -238,7 +240,7 @@ has resulted in many more parameters than were accessible before.
\starttabulate
\NC \bf primitive name \NC \bf description \NC \NR
-\NC \type {\Umathquad} \NC the width of 18mu's \NC \NR
+\NC \type {\Umathquad} \NC the width of 18 mu's \NC \NR
\NC \type {\Umathaxis} \NC height of the vertical center axis of
the math formula above the baseline \NC \NR
\NC \type {\Umathoperatorsize} \NC minimum size of large operators in display mode \NC \NR
@@ -306,13 +308,14 @@ needed.
\section{Skips around display math}
-The injection of \type {\abovedisplayskip} and \type {\belowdisplayskip} is not symmetrical. An
-above one is always inserted, also when zero, but the below is only inserted when larger than
-zero. Especially the later mkes it sometimes hard to fully control spacing. Therefore \LUATEX\
-comes with a new directive: \type {\mathdisplayskipmode}. The following values apply:
+The injection of \type {\abovedisplayskip} and \type {\belowdisplayskip} is not
+symmetrical. An above one is always inserted, also when zero, but the below is
+only inserted when larger than zero. Especially the later mkes it sometimes hard
+to fully control spacing. Therefore \LUATEX\ comes with a new directive: \type
+{\mathdisplayskipmode}. The following values apply:
\starttabulate
-\NC 0 \NC normal tex behaviour: always above, only below when larger than zero \NC \NR
+\NC 0 \NC normal \TEX\ behaviour: always above, only below when larger than zero \NC \NR
\NC 1 \NC always \NC \NR
\NC 2 \NC only when not zero \NC \NR
\NC 3 \NC never, not even when not zero \NC \NR
@@ -425,24 +428,24 @@ that assumes that an oldstyle \TEX\ font is used. Also, they do not set \type
{\Umathradicaldegreeraise}. These are then automatically initialized to
$5/18$quad, $-10/18$quad, and 60.
-Note 3: If tfm fonts are used, then the \type {\Umathradicalvgap} is not set until
-the first time \LUATEX\ has to typeset a formula because this needs parameters
-from both family2 and family3. This provides a partial backward compatibility
-with \TEX82, but that compatibility is only partial: once the \type
+Note 3: If \TFM\ fonts are used, then the \type {\Umathradicalvgap} is not set
+until the first time \LUATEX\ has to typeset a formula because this needs
+parameters from both family~2 and family~3. This provides a partial backward
+compatibility with \TEX82, but that compatibility is only partial: once the \type
{\Umathradicalvgap} is set, it will not be recalculated any more.
-Note 4: (also if tfm fonts are used) A similar situation arises wrt. \type
-{\Umathspaceafterscript}: it is not set until the first time \LUATEX\ has to
-typeset a formula. This provides some backward compatibility with \TEX82. But
-once the \type {\Umathspaceafterscript} is set, \type {\scriptspace} will never be
-looked at again.
+Note 4: When \TFM\ fonts are used a similar situation arises with respect to
+\type {\Umathspaceafterscript}: it is not set until the first time \LUATEX\ has
+to typeset a formula. This provides some backward compatibility with \TEX82. But
+once the \type {\Umathspaceafterscript} is set, \type {\scriptspace} will never
+be looked at again.
-Note 5: Tfm fonts set \type {\Umathconnectoroverlapmin} to zero because \TEX82\
-always stacks extensibles without any overlap.
+Note 5: Traditional \TFM\ fonts set \type {\Umathconnectoroverlapmin} to zero
+because \TEX82\ always stacks extensibles without any overlap.
-Note 6: The \type {\Umathoperatorsize} is only used in \type {\displaystyle}, and is
-only set in \OPENTYPE\ fonts. In \TFM\ font mode, it is artificially set to one
-scaled point more than the initial attempt's size, so that always the \quote
+Note 6: The \type {\Umathoperatorsize} is only used in \type {\displaystyle}, and
+is only set in \OPENTYPE\ fonts. In \TFM\ font mode, it is artificially set to
+one scaled point more than the initial attempt's size, so that always the \quote
{first next} will be tried, just like in \TEX82.
Note 7: The \type {\Umathradicaldegreeraise} is a special case because it is the
@@ -450,12 +453,12 @@ only parameter that is expressed in a percentage instead of as a number of scale
points.
Note 8: \type {SubscriptShiftDownWithSuperscript} does not actually exist in the
-\quote {standard} Opentype Math font Cambria, but it is useful enough to be
+\quote {standard} \OPENTYPE\ math font Cambria, but it is useful enough to be
added.
Note 9: \type {FractionDelimiterDisplayStyleSize} and \type
-{FractionDelimiterSize} do not actually exist in the \quote {standard} Opentype
-Math font Cambria, but were useful enough to be added.
+{FractionDelimiterSize} do not actually exist in the \quote {standard} \OPENTYPE\
+math font Cambria, but were useful enough to be added.
\section{Math spacing setting}
@@ -540,8 +543,8 @@ like this:
\Umathopordspacing\displaystyle=4mu plus 2mu
\stoptyping
-They are all initialized by initex to the values mentioned in the table in
-Chapter~18 of the \TEX book.
+They are all initialized by \type {initex} to the values mentioned in the table
+in Chapter~18 of the \TEX book.
Note 1: for ease of use as well as for backward compatibility, \type
{\thinmuskip}, \type {\medmuskip} and \type {\thickmuskip} are treated
@@ -587,14 +590,8 @@ followed by its italic correction is used instead.
The vertical placement of a top accent depends on the \type {x_height} of the
font of the accentee (as explained in the \TEX book), but if value that turns out
-to be zero and the font had a MathConstants table, then \type {AccentBaseHeight}
-is used instead.
-
-% there is no bot_accent in opentype math
-%
-% If a math bottom accent has to be placed, the \type {bot_accent} value is checked
-% instead of \type {top_accent}. Because bottom accents do not exist in \TEX82, the
-% \type {\skewchar} kern is ignored.
+to be zero and the font had a \type {MathConstants} table, then \type
+{AccentBaseHeight} is used instead.
The vertical placement of a bottom accent is straight below the accentee, no
correction takes place.
@@ -603,7 +600,7 @@ Possible locations are \type {top}, \type {bottom}, \type {both} and \type
{center}. When no location is given \type {top} is assumed. An additional
parameter \type {fraction} can be specified followed by a number; a value of for
instance 1200 means that the criterium is 1.2 times the width of the nuclues. The
-fraction only aplies to the stepwise selected shapes and is mostly meant for the
+fraction only applies to the stepwise selected shapes and is mostly meant for the
\type {overlay} location. It also works for the other locations but then it
concerns the width.
@@ -624,15 +621,15 @@ The placement of the degree is controlled by the math parameters \type
\section{Math kerning in super- and subscripts}
-The character fields in a lua-loaded OpenType math font can have a \quote
+The character fields in a \LUA|-|loaded \OPENTYPE\ math font can have a \quote
{mathkern} table. The format of this table is the same as the \quote {mathkern}
table that is returned by the \type {fontloader} library, except that all height
and kern values have to be specified in actual scaled points.
When a super- or subscript has to be placed next to a math item, \LUATEX\ checks
whether the super- or subscript and the nucleus are both simple character items.
-If they are, and if the fonts of both character imtes are OpenType fonts (as
-opposed to legacy \TEX\ fonts), then \LUATEX\ will use the OpenType MATH
+If they are, and if the fonts of both character items are \OPENTYPE\ fonts (as
+opposed to legacy \TEX\ fonts), then \LUATEX\ will use the \OPENTYPE\ math
algorithm for deciding on the horizontal placement of the super- or subscript.
This works as follows:
@@ -656,12 +653,12 @@ This works as follows:
For each of these two locations:
\startitemize
\startitem
- find the mathkern value at this height for the base (for a subscript
+ find the math kern value at this height for the base (for a subscript
placement, this is the bottom_right corner, for a superscript
placement the top_right corner)
\stopitem
\startitem
- find the mathkern value at this height for the script (for a
+ find the math kern value at this height for the script (for a
subscript placement, this is the top_left corner, for a superscript
placement the bottom_left corner)
\stopitem
@@ -676,9 +673,9 @@ This works as follows:
\stopitem
\stopitemize
-The mathkern value at a specific height is the kern value that is specified by the
+The math kern value at a specific height is the kern value that is specified by the
next higher height and kern pair, or the highest one in the character (if there is no
-value high enough in the character), or simply zero (if the character has no mathkern
+value high enough in the character), or simply zero (if the character has no math kern
pairs at all).
\section{Scripts on horizontally extensible items like arrows}
@@ -895,7 +892,7 @@ of \type {\mathsurround}, we can remain compatible.
Normally you will force delimiters to certain sizes by putting an empty box or
rule next to it. The resulting delimiter will either be a character from the
stepwise size range or an extensible. The latter can be quite differently
-positioned that the characters as it depends on the fit aas well as the fact if
+positioned that the characters as it depends on the fit as well as the fact if
the used characters in the font have depth or height. Commands like (plain \TEX
s) \type {\big} need use this feature. In \LUATEX\ we provide a bit more control
by three variants that supporting optional parameters \type {height}, \type
@@ -923,7 +920,7 @@ by three variants that supporting optional parameters \type {height}, \type
The keyword \type {exact} can be used as directive that the real dimensions
should be applied when the criteria can't be met which can happen when we're
-still stepping through the succesively larger variants. When no dimensions are
+still stepping through the successively larger variants. When no dimensions are
given the \type {noaxis} command can be used to prevent shifting over the axis.
You can influence the final class with the keyword \type {class} which will
diff --git a/doc/context/sources/general/manuals/luatex/luatex-modifications.tex b/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
index f0bbe2a26..81d90253f 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -11,8 +13,8 @@
The first version of \LUATEX\ only had a few extra primitives and it was largely
the same as \PDFTEX. Then we merged substantial parts of \ALEPH\ into the code
-and got more primitives. When we got more stable teh decision was made to clean
-up the rather hybrid nature of the program. This means that some primnitives have
+and got more primitives. When we got more stable the decision was made to clean
+up the rather hybrid nature of the program. This means that some primitives have
been promoted to core primitives, often with a different name, and that others
were removed. This made it possible to start cleaning up the code base. We will
describe most in following paragraphs.
@@ -69,7 +71,7 @@ most still comes from the original. But we divert a bit.
\startsubsection[title=Changes from \ETEX\ 2.2]
Being the de factor standard extension of course we provide the \ETEX\
-functionality, but with a few small adaptions.
+functionality, but with a few small adaptations.
\startitemize
@@ -132,7 +134,7 @@ which in turn triggered renaming primitives.
\stopitem
\startitem
- A number of \quote {pdftex primitives} have been removed as they can be
+ A number of \quote {\PDFTEX\ primitives} have been removed as they can be
implemented using \LUA:
\start \raggedright
@@ -181,7 +183,7 @@ which in turn triggered renaming primitives.
\startitem
The current version of \LUATEX\ no longer replaces and|/|or merges fonts in
- embedded pdf files with fonts of the enveloping \PDF\ document. This
+ embedded \PDF\ files with fonts of the enveloping \PDF\ document. This
regression may be temporary, depending on how the rewritten font backend will
look like.
\stopitem
@@ -213,7 +215,7 @@ which in turn triggered renaming primitives.
\startitem
When \type {\adjustspacing} has value~2, hz optimization will be applied to
- glyphs and kerns. When the valus is~3, only glyphs will be treated. A value
+ glyphs and kerns. When the value is~3, only glyphs will be treated. A value
smaller than~2 disables this feature.
\stopitem
@@ -270,7 +272,7 @@ which in turn triggered renaming primitives.
One change involves the so called xforms and ximages. In \PDFTEX\ these are
implemented as so called whatsits. But contrary to other whatsits they have
dimensions that need to be taken into account when for instance calculating
-optimal linebreaks. In \LUATEX\ these are now promoted to normal nodes, which
+optimal line breaks. In \LUATEX\ these are now promoted to normal nodes, which
simplifies code that needs those dimensions.
Another reason for promotion is that these are useful concepts. Backends can
@@ -293,6 +295,35 @@ typical backend specific ones. The index that gets returned is to be considered
as \quote {just a number} and although it still has the same meaning (object
related) as before, you should not depend on that.
+The protrusion detection mechanism is enhanced a bit to enable a bit more complex
+situations. When protrusion characters are identified some nodes are skipped:
+
+\startitemize[packed]
+\startitem zero glue \stopitem
+\startitem penalties \stopitem
+\startitem empty discretionaries \stopitem
+\startitem normal zero kerns \stopitem
+\startitem rules with zero dimensions \stopitem
+\startitem math nodes with a surround of zero \stopitem
+\startitem dir nodes \stopitem
+\startitem empty horizontal lists \stopitem
+\startitem local par nodes \stopitem
+\startitem inserts, marks and adjusts \stopitem
+\startitem boundaries \stopitem
+\startitem whatsits \stopitem
+\stopitemize
+
+Because this can not be enough, you can also use a boundary node to make the next
+node being ignored. When the boundary value is~1 or~3, the next node will be
+ignored in the test when locating a left boundary condition. When the value is~2
+or~3, the previous node will be ignored when locating a right boundary condition
+(the search goes from right to left). This permits protrusion combined with for
+instance content moved into the margin:
+
+\starttyping
+\boundary1\llap{!\quad}«Who needs protrusion?»
+\stoptyping
+
\stopsubsection
\startsubsection[title=Changes from \ALEPH\ RC4]
@@ -464,7 +495,7 @@ others promoted to core \LUATEX\ primitives. That is only part of the story. In
order to separate the backend specific primitives in de code these commands are
now replaced by only a few. In traditional \TEX\ we only had the \DVI\ backend
but now we have two: \DVI\ and \PDF. Additional functionality is implemented as
-\quote {extensions} in \TEX speak. By seperating more strickly we are able to
+\quote {extensions} in \TEX speak. By separating more strickly we are able to
keep the core (fontend) clean and stable. If for some reason an extra backend
option is needed, it can be implemented without touching the core. The three
\PDF\ backend related primitives are
@@ -484,33 +515,35 @@ has already be the case right from the start. If you want the traditional \PDFTE
primitives (for as far their functionality is still around) you now can do this:
\starttyping
-\protected\def\pdfliteral {\pdfextension literal}
-\protected\def\pdfcolorstack {\pdfextension colorstack}
-\protected\def\pdfsetmatrix {\pdfextension setmatrix}
-\protected\def\pdfsave {\pdfextension save\relax}
-\protected\def\pdfrestore {\pdfextension restore\relax}
-\protected\def\pdfobj {\pdfextension obj }
-\protected\def\pdfrefobj {\pdfextension refobj }
-\protected\def\pdfannot {\pdfextension annot }
-\protected\def\pdfstartlink {\pdfextension startlink }
-\protected\def\pdfendlink {\pdfextension endlink\relax}
-\protected\def\pdfoutline {\pdfextension outline }
-\protected\def\pdfdest {\pdfextension dest }
-\protected\def\pdfthread {\pdfextension thread }
-\protected\def\pdfstartthread {\pdfextension startthread }
-\protected\def\pdfendthread {\pdfextension endthread\relax}
-\protected\def\pdfinfo {\pdfextension info }
-\protected\def\pdfcatalog {\pdfextension catalog }
-\protected\def\pdfnames {\pdfextension names }
-\protected\def\pdfincludechars {\pdfextension includechars }
-\protected\def\pdffontattr {\pdfextension fontattr }
-\protected\def\pdfmapfile {\pdfextension mapfile }
-\protected\def\pdfmapline {\pdfextension mapline }
-\protected\def\pdftrailer {\pdfextension trailer }
-\protected\def\pdfglyphtounicode{\pdfextension glyphtounicode }
+\protected\def\pdfliteral {\pdfextension literal}
+\protected\def\pdfcolorstack {\pdfextension colorstack}
+\protected\def\pdfsetmatrix {\pdfextension setmatrix}
+\protected\def\pdfsave {\pdfextension save\relax}
+\protected\def\pdfrestore {\pdfextension restore\relax}
+\protected\def\pdfobj {\pdfextension obj }
+\protected\def\pdfrefobj {\pdfextension refobj }
+\protected\def\pdfannot {\pdfextension annot }
+\protected\def\pdfstartlink {\pdfextension startlink }
+\protected\def\pdfendlink {\pdfextension endlink\relax}
+\protected\def\pdfoutline {\pdfextension outline }
+\protected\def\pdfdest {\pdfextension dest }
+\protected\def\pdfthread {\pdfextension thread }
+\protected\def\pdfstartthread {\pdfextension startthread }
+\protected\def\pdfendthread {\pdfextension endthread\relax}
+\protected\def\pdfinfo {\pdfextension info }
+\protected\def\pdfcatalog {\pdfextension catalog }
+\protected\def\pdfnames {\pdfextension names }
+\protected\def\pdfincludechars {\pdfextension includechars }
+\protected\def\pdffontattr {\pdfextension fontattr }
+\protected\def\pdfmapfile {\pdfextension mapfile }
+\protected\def\pdfmapline {\pdfextension mapline }
+\protected\def\pdftrailer {\pdfextension trailer }
+\protected\def\pdfglyphtounicode {\pdfextension glyphtounicode }
+\protected\def\pdfsuppressoptionalinfo{\pdfextension suppressoptionalinfo }
+\protected\def\pdfinfoid {\pdfextension infoid }
\stoptyping
-The introspective primitives can bve defines as:
+The introspective primitives can be defined as:
\starttyping
\def\pdftexversion {\numexpr\pdffeedback version\relax}
@@ -673,6 +706,38 @@ The backend is derived from \PDFTEX\ so the same syntax applies. However, the
checking takes place so when this is used it had better be a valid (flushed)
object.
+In order to be (more or less) compatible with \PDFTEX\ we also support the
+option to suppress some info:
+
+\starttyping
+\pdfvariable suppressoptionalinfo \numexpr
+ 0
+ + 1 % PTEX.FullBanner
+ + 2 % PTEX.FileName
+ + 4 % PTEX.PageNumber
+ + 8 % PTEX.InfoDict
+ + 16 % Creator
+ + 32 % CreationDate
+ + 64 % ModDate
+ + 128 % Producer
+ + 256 % Trapped
+ + 512 % ID
+\relax
+\stoptyping
+
+In addition you can overload the trailer id, but we don't do any checking on
+validity, so you have to pass a valid array. The following is like the ones
+normally generated by the engine:
+
+\starttyping
+\pdfvariable trailerid {[
+ <FA052949448907805BA83C1E78896398>
+ <FA052949448907805BA83C1E78896398>
+]}
+\stoptyping
+
+So, you even need to include the brackets!
+
\stopsubsection
\stopsection
@@ -763,7 +828,7 @@ All of the internal code is changed in such a way that if one of the \type
basically the same convention as the callback: a single read into a buffer big
enough to hold the entire file contents. While this uses more memory than the
previous code (that mostly used \type {getc} calls), it can be quite a bit faster
-(depending on your I/O subsystem).
+(depending on your \IO\ subsystem).
\stopsubsection
diff --git a/doc/context/sources/general/manuals/luatex/luatex-nodes.tex b/doc/context/sources/general/manuals/luatex/luatex-nodes.tex
index 8ffa9507b..9b67b1f61 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-nodes.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-nodes.tex
@@ -1,3 +1,5 @@
+% language=uk
+
\environment luatex-style
\environment luatex-logos
@@ -138,9 +140,9 @@ Id: \showid{hlist}
A warning: never assign a node list to the \type {head} field unless you are sure
its internal link structure is correct, otherwise an error may result.
-Note: the new field name \type {head} was introduced in 0.65 to replace the old
-name \type {list}. Use of the name \type {list} is now deprecated, but it will
-stay available until at least version 0.80.
+Note: the field name \type {head} and \type {list} are both valid. Sometimes it
+makes more sense to refer to a list by \type {head}, sometimes \type {list} makes
+more sense.
\subsubsection{vlist nodes}
@@ -177,7 +179,7 @@ image.
backend specific). \NC \NR
\stoptabulate
-The subtypes 1 and~2 replace the xform and ximage whatsits and in nodelists they
+The subtypes 1 and~2 replace the xform and ximage whatsits and in node lists they
behave like rules of subtype_0 when it comes to dimensions. Subtype~3 only has
dimensions.
@@ -345,15 +347,21 @@ The exact meanings of the subtypes are as follows:
\NC 13 \NC \type {\spaceskip} \NC \NR
\NC 14 \NC \type {\xspaceskip} \NC \NR
\NC 15 \NC \type {\parfillskip} \NC \NR
-\NC 16 \NC \type {\thinmuskip} \NC \NR
-\NC 17 \NC \type {\medmuskip} \NC \NR
-\NC 18 \NC \type {\thickmuskip} \NC \NR
+\NC 16 \NC \type {\mathsurroundskip} \NC \NR
+\NC 17 \NC \type {\thinmuskip} \NC \NR
+\NC 18 \NC \type {\medmuskip} \NC \NR
+\NC 19 \NC \type {\thickmuskip} \NC \NR
+\NC 98 \NC \type {conditional math skip} \NC \NR
+\NC 99 \NC \type {muglue} \NC \NR
\NC 100 \NC \type {\leaders} \NC \NR
\NC 101 \NC \type {\cleaders} \NC \NR
\NC 102 \NC \type {\xleaders} \NC \NR
\NC 103 \NC \type {\gleaders} \NC \NR
\stoptabulate
+A regular word space also results in a \type {spaceskip} subtype (this used to be
+a \type {userskip} with subtype zero).
+
For convenience we provide access to the spec fields directly so that you can
avoid the spec lookup. So, the following fields can also be queried or set. When
you set a field and no spec is set, a spec will automatically be created.
@@ -452,9 +460,15 @@ replaced them by expansion factors that travel with glyph nodes. Apart from a
cleaner approach this is also a step towards a better separation between front-
and backend.
-The \type {is_char} function checks if a node is a glyphnode with a subtype still
+The \type {is_char} function checks if a node is a glyph node with a subtype still
less than 256. This function can be used to determine if applying font logic to a
-glyph node makes sense.
+glyph node makes sense. The value \type {nil} gets returned when the node is not
+a glyph, a character number is returned if the node is still tagged as character
+and \type {false} gets returned otherwise. When nil is returned, the id is also
+returned. The \type {is_glyph} variant doesn't check for a subtype being less
+than 256, so it returns either the character value or nil plus the id. These
+helpers are not always faster than separate calls but they sometimes permit
+making more readable tests.
\subsubsection{margin_kern nodes}
@@ -546,8 +560,8 @@ version as an extension to the \type {small_char}.
\subsubsection{Math core nodes}
First, there are the objects (the \TEX book calls then \quote {atoms}) that are
-associated with the simple math objects: Ord, Op, Bin, Rel, Open, Close, Punct,
-Inner, Over, Under, Vcent. These all have the same fields, and they are combined
+associated with the simple math objects: ord, op, bin, rel, open, close, punct,
+inner, over, under, vcent. These all have the same fields, and they are combined
into a single node type with separate subtypes for differentiation.
\subsubsubsection{simple nodes}
@@ -817,8 +831,8 @@ Id: \showid{dir}
\NC attr \NC \syntax{<node>} \NC \NC \NR
\NC dir \NC string \NC the direction (but see below) \NC \NR
\NC level \NC number \NC nesting level of this direction whatsit \NC \NR
-\NC dvi_ptr \NC number \NC a saved dvi buffer byte offset \NC \NR
-\NC dir_h \NC number \NC a saved dvi position \NC \NR
+\NC dvi_ptr \NC number \NC a saved \DVI\ buffer byte offset \NC \NR
+\NC dir_h \NC number \NC a saved \DVI\ position \NC \NR
\stoptabulate
A note on \type {dir} strings. Direction specifiers are three|-|letter
@@ -931,7 +945,7 @@ Id: \showid{whatsit,pdf_dest}
\NC width \NC number \NC \NC \NR
\NC height \NC number \NC \NC \NR
\NC depth \NC number \NC \NC \NR
-\NC named_id \NC number \NC is the dest_id a string value? \NC \NR
+\NC named_id \NC number \NC is the \type {dest_id} a string value? \NC \NR
\NC dest_id \NC number \NC the destination id \NC \NR
\NC \NC string \NC the destination name \NC \NR
\NC dest_type \NC number \NC type of destination \NC \NR
@@ -969,7 +983,7 @@ Id: \showid{whatsit,pdf_thread}
\NC width \NC number \NC \NC \NR
\NC height \NC number \NC \NC \NR
\NC depth \NC number \NC \NC \NR
-\NC named_id \NC number \NC is the tread_id a string value? \NC \NR
+\NC named_id \NC number \NC is \type {tread_id} a string value? \NC \NR
\NC tread_id \NC number \NC the thread id \NC \NR
\NC \NC string \NC the thread name \NC \NR
\NC thread_attr \NC number \NC extra thread information \NC \NR
@@ -986,7 +1000,7 @@ Id: \showid{whatsit,pdf_start_thread}
\NC width \NC number \NC \NC \NR
\NC height \NC number \NC \NC \NR
\NC depth \NC number \NC \NC \NR
-\NC named_id \NC number \NC is the tread_id a string value? \NC \NR
+\NC named_id \NC number \NC is \type {tread_id} a string value? \NC \NR
\NC tread_id \NC number \NC the thread id \NC \NR
\NC \NC string \NC the thread name \NC \NR
\NC thread_attr \NC number \NC extra thread information \NC \NR
@@ -1022,7 +1036,7 @@ Id: \showid{whatsit,late_lua}
\NC attr \NC \syntax{<node>} \NC \NC \NR
\NC data \NC string \NC data to execute \NC \NR
\NC string \NC string \NC data to execute \NC \NR
-\NC name \NC string \NC the name to use for lua error reporting \NC \NR
+\NC name \NC string \NC the name to use for \LUA\ error reporting \NC \NR
\stoptabulate
The difference between \type {data} and \type {string} is that on assignment, the
@@ -1161,11 +1175,11 @@ If performance matters you can use an function instead:
\starttabulate[|T|p|]
\NC getnext \NC parsing nodelist always involves this one \NC \NR
-\NC getprev \NC used less but is logical companion to getnext \NC \NR
+\NC getprev \NC used less but is logical companion to \type {getnext} \NC \NR
\NC getboth \NC returns the next and prev pointer of a node \NC \NR
\NC getid \NC consulted a lot \NC \NR
\NC getsubtype \NC consulted less but also a topper \NC \NR
-\NC getfont \NC used a lot in otf handling (glyph nodes are consulted a lot) \NC \NR
+\NC getfont \NC used a lot in \OPENTYPE\ handling (glyph nodes are consulted a lot) \NC \NR
\NC getchar \NC idem and also in other places \NC \NR
\NC getdisc \NC returns the \type {pre}, \type {post} and \type {replace} fields and
optionally when true is passed also the tail fields. \NC \NR
@@ -1228,6 +1242,7 @@ summarized this:
\NC \type {insert_after} \NC \yes \NC \yes \NC \NR
\NC \type {insert_before} \NC \yes \NC \yes \NC \NR
\NC \type {is_char} \NC \yes \NC \yes \NC \NR
+\NC \type {is_glyph} \NC \yes \NC \yes \NC \NR
\NC \type {is_direct} \NC \nop \NC \yes \NC \NR
\NC \type {is_node} \NC \yes \NC \yes \NC \NR
\NC \type {kerning} \NC \yes \NC \yes \NC \NR
@@ -1281,6 +1296,13 @@ taken for providing meta information about nodes. Note: The getters do only basi
checking for valid keys. You should just stick to the keys mentioned in the
sections that describe node properties.
+Some nodes have indirect references. For instance a math character refers to a
+family instead of a font. In that case we provide a virtual font field as
+accessor. So, \type {getfont} and \type {.font} can be used on them. The same is
+true for the \type {width}, \type {height} and \type {depth} of glue nodes. These
+actually access the spec node properties, and here we can set as well as get the
+values.
+
\stopchapter
\stopcomponent
diff --git a/doc/context/sources/general/manuals/luatex/luatex-style.tex b/doc/context/sources/general/manuals/luatex/luatex-style.tex
index 90479adf1..1c79b7695 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-style.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-style.tex
@@ -197,7 +197,7 @@
luaorbitfactor := 1 ;
picture p ; p := lualogo xsized (3PaperWidth/5) ;
- draw p shifted center Page shifted (0,-ypart center ulcorner p) ;
+ draw p shifted center Page shifted (0,-.5ypart center ulcorner p) ;
StopPage ;
\stopuseMPgraphic
diff --git a/doc/context/sources/general/manuals/luatex/luatex-titlepage.tex b/doc/context/sources/general/manuals/luatex/luatex-titlepage.tex
index cf40b8eb8..20c22160d 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-titlepage.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-titlepage.tex
@@ -23,17 +23,17 @@
frame=off]
{Lua\TeX\\Reference}
- \definedfont[Bold*default at 24pt] \setupinterlinespace
+ \definedfont[Bold*default at 18pt] \setupinterlinespace
\setlayerframed
[page]
- [preset=middletop,
- voffset=.35\paperheight]
- [align=middle,
+ [preset=rightbottom,
+ offset=.01\paperheight]
+ [align=flushright,
foregroundcolor=blue,
frame=off]
- {\doifsomething{\documentvariable{snapshot}}{snapshot \documentvariable{snapshot}}%
- \doifsomething{\documentvariable{beta}} {beta \documentvariable{beta}}}
+ {\currentdate[month,space,year]\par
+ Version \documentvariable{version}}
\stopstandardmakeup
diff --git a/doc/context/sources/general/manuals/luatex/luatex.tex b/doc/context/sources/general/manuals/luatex/luatex.tex
index 6640e126e..5dea07094 100644
--- a/doc/context/sources/general/manuals/luatex/luatex.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex.tex
@@ -3,18 +3,13 @@
% \tex vs \type vs \syntax vs. \luatex
% \em \it \/
-% \enabledirectives[nodes.basepass*]
-
-% \unprotect
-% \protect
-
\environment luatex-style
\environment luatex-logos
\dontcomplain
\startdocument
- [beta=0.89.0]
+ [version=0.90.0]
\component luatex-titlepage
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index f25e198bc..79c8cf65e 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -10,6 +10,10 @@ local getargument = environment.getargument
local setargument = environment.setargument
local givenfiles = environment.files
+local suffix, addsuffix, removesuffix, replacesuffix = file.suffix, file.addsuffix, file.removesuffix, file.replacesuffix
+local nameonly, basename, joinpath, collapsepath = file.nameonly, file.basename, file.join, file.collapsepath
+local lower = string.lower
+
local otfversion = 2.819
local otlversion = 3.013
@@ -168,7 +172,7 @@ function fonts.names.simple(alsotypeone)
local simpleversion = 1.001
local simplelist = { "ttf", "otf", "ttc", "dfont", alsotypeone and "afm" or nil }
local name = "luatex-fonts-names.lua"
- local path = file.collapsepath(caches.getwritablepath("..","..","generic","fonts","data"))
+ local path = collapsepath(caches.getwritablepath("..","..","generic","fonts","data"))
fonts.names.filters.list = simplelist
fonts.names.version = simpleversion -- this number is the same as in font-dum.lua
report("generating font database for 'luatex-fonts' version %s",fonts.names.version)
@@ -186,7 +190,7 @@ function fonts.names.simple(alsotypeone)
local format = simplelist[i]
for tag, index in next, data.mappings[format] do
local s = specifications[index]
- simplemappings[tag] = { s.rawname, s.filename, s.subfont }
+ simplemappings[tag] = { s.rawname or nameonly(s.filename), s.filename, s.subfont }
end
end
if environment.arguments.nocache then
@@ -195,7 +199,7 @@ function fonts.names.simple(alsotypeone)
dir.mkdirs(path)
if lfs.isdir(path) then
report("saving names on cache path %a",path)
- name = file.join(path,name)
+ name = joinpath(path,name)
else
report("invalid cache path %a",path)
end
@@ -424,14 +428,14 @@ function scripts.fonts.justload()
end
function scripts.fonts.unpack()
- local name = file.removesuffix(file.basename(givenfiles[1] or ""))
+ local name = removesuffix(basename(givenfiles[1] or ""))
if name and name ~= "" then
local cacheid = getargument("cache") or "otl"
local cache = containers.define("fonts", cacheid, otlversion, true) -- cache is temp
local cleanname = containers.cleanname(name)
local data = containers.read(cache,cleanname)
if data then
- local savename = file.addsuffix(cleanname .. "-unpacked","tma")
+ local savename = addsuffix(cleanname .. "-unpacked","tma")
report("fontsave, saving data in %s",savename)
if data.creator == "context mkiv" then
fonts.handlers.otf.readers.unpack(data)
@@ -452,9 +456,9 @@ function scripts.fonts.save()
if fontblob then
if fontblob.validation_state and table.contains(fontblob.validation_state,"bad_ps_fontname") then
report("ignoring bad fontname for %a",name)
- savename = file.nameonly(name) .. "-bad-ps-name"
+ savename = nameonly(name) .. "-bad-ps-name"
end
- savename = file.addsuffix(string.lower(savename),"lua")
+ savename = addsuffix(lower(savename),"lua")
report("fontsave, saving data in %a",savename)
table.tofile(savename,fontloader.to_table(fontblob),"return")
fontloader.close(fontblob)
@@ -463,7 +467,7 @@ function scripts.fonts.save()
if name and name ~= "" then
local filename = resolvers.findfile(name) -- maybe also search for opentype
if filename and filename ~= "" then
- local suffix = string.lower(file.suffix(filename))
+ local suffix = lower(suffix(filename))
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then
local fontinfo = fontloader.info(filename)
if fontinfo then
@@ -496,13 +500,13 @@ function scripts.fonts.convert() -- new save
if name and name ~= "" then
local filename = resolvers.findfile(name) -- maybe also search for opentype
if filename and filename ~= "" then
- local suffix = string.lower(file.suffix(filename))
+ local suffix = lower(suffix(filename))
if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' then
local data = fonts.handlers.otf.readers.loadfont(filename,sub)
if data then
fonts.handlers.otf.readers.compact(data)
fonts.handlers.otf.readers.rehash(data,getargument("names") and "names" or "unicodes")
- local savename = file.replacesuffix(string.lower(data.metadata.fullname or filename),"lua")
+ local savename = replacesuffix(lower(data.metadata.fullname or filename),"lua")
table.save(savename,data)
report("font: %a saved as %a",filename,savename)
else
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index d72be372e..176598941 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -9989,7 +9989,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 56627, stripped down to: 35669
+-- original size: 56973, stripped down to: 35872
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -10006,6 +10006,7 @@ local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local sort=table.sort
local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
@@ -10950,22 +10951,34 @@ local function verbose_element(e,handlers,escape)
local ats=eat and next(eat) and {}
if ats then
local n=0
- for k,v in next,eat do
+ for k in next,eat do
n=n+1
- ats[n]=f_attribute(k,escaped(v))
+ ats[n]=k
+ end
+ if n==1 then
+ local k=ats[1]
+ ats=f_attribute(k,escaped(eat[k]))
+ else
+ sort(ats)
+ for i=1,n do
+ local k=ats[i]
+ ats[i]=f_attribute(k,escaped(eat[k]))
+ end
+ ats=concat(ats," ")
end
end
if ern and trace_entities and ern~=ens then
ens=ern
end
+ local n=edt and #edt
if ens~="" then
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
+ handle("<",ens,":",etg," ",ats,">")
else
handle("<",ens,":",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10976,19 +10989,19 @@ local function verbose_element(e,handlers,escape)
handle("</",ens,":",etg,">")
else
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ handle("<",ens,":",etg," ",ats,"/>")
else
handle("<",ens,":",etg,"/>")
end
end
else
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",etg," ",concat(ats," "),">")
+ handle("<",etg," ",ats,">")
else
handle("<",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10999,7 +11012,7 @@ local function verbose_element(e,handlers,escape)
handle("</",etg,">")
else
if ats then
- handle("<",etg," ",concat(ats," "),"/>")
+ handle("<",etg," ",ats,"/>")
else
handle("<",etg,"/>")
end
@@ -18722,8 +18735,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 796212
--- stripped bytes : 288762
+-- original bytes : 796558
+-- stripped bytes : 288905
-- end library merge
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index d72be372e..176598941 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -9989,7 +9989,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 56627, stripped down to: 35669
+-- original size: 56973, stripped down to: 35872
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -10006,6 +10006,7 @@ local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local sort=table.sort
local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
@@ -10950,22 +10951,34 @@ local function verbose_element(e,handlers,escape)
local ats=eat and next(eat) and {}
if ats then
local n=0
- for k,v in next,eat do
+ for k in next,eat do
n=n+1
- ats[n]=f_attribute(k,escaped(v))
+ ats[n]=k
+ end
+ if n==1 then
+ local k=ats[1]
+ ats=f_attribute(k,escaped(eat[k]))
+ else
+ sort(ats)
+ for i=1,n do
+ local k=ats[i]
+ ats[i]=f_attribute(k,escaped(eat[k]))
+ end
+ ats=concat(ats," ")
end
end
if ern and trace_entities and ern~=ens then
ens=ern
end
+ local n=edt and #edt
if ens~="" then
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
+ handle("<",ens,":",etg," ",ats,">")
else
handle("<",ens,":",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10976,19 +10989,19 @@ local function verbose_element(e,handlers,escape)
handle("</",ens,":",etg,">")
else
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ handle("<",ens,":",etg," ",ats,"/>")
else
handle("<",ens,":",etg,"/>")
end
end
else
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",etg," ",concat(ats," "),">")
+ handle("<",etg," ",ats,">")
else
handle("<",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10999,7 +11012,7 @@ local function verbose_element(e,handlers,escape)
handle("</",etg,">")
else
if ats then
- handle("<",etg," ",concat(ats," "),"/>")
+ handle("<",etg," ",ats,"/>")
else
handle("<",etg,"/>")
end
@@ -18722,8 +18735,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 796212
--- stripped bytes : 288762
+-- original bytes : 796558
+-- stripped bytes : 288905
-- end library merge
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index d72be372e..176598941 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -9989,7 +9989,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 56627, stripped down to: 35669
+-- original size: 56973, stripped down to: 35872
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -10006,6 +10006,7 @@ local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local sort=table.sort
local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
@@ -10950,22 +10951,34 @@ local function verbose_element(e,handlers,escape)
local ats=eat and next(eat) and {}
if ats then
local n=0
- for k,v in next,eat do
+ for k in next,eat do
n=n+1
- ats[n]=f_attribute(k,escaped(v))
+ ats[n]=k
+ end
+ if n==1 then
+ local k=ats[1]
+ ats=f_attribute(k,escaped(eat[k]))
+ else
+ sort(ats)
+ for i=1,n do
+ local k=ats[i]
+ ats[i]=f_attribute(k,escaped(eat[k]))
+ end
+ ats=concat(ats," ")
end
end
if ern and trace_entities and ern~=ens then
ens=ern
end
+ local n=edt and #edt
if ens~="" then
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
+ handle("<",ens,":",etg," ",ats,">")
else
handle("<",ens,":",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10976,19 +10989,19 @@ local function verbose_element(e,handlers,escape)
handle("</",ens,":",etg,">")
else
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ handle("<",ens,":",etg," ",ats,"/>")
else
handle("<",ens,":",etg,"/>")
end
end
else
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",etg," ",concat(ats," "),">")
+ handle("<",etg," ",ats,">")
else
handle("<",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10999,7 +11012,7 @@ local function verbose_element(e,handlers,escape)
handle("</",etg,">")
else
if ats then
- handle("<",etg," ",concat(ats," "),"/>")
+ handle("<",etg," ",ats,"/>")
else
handle("<",etg,"/>")
end
@@ -18722,8 +18735,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 796212
--- stripped bytes : 288762
+-- original bytes : 796558
+-- stripped bytes : 288905
-- end library merge
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index d72be372e..176598941 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -9989,7 +9989,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 56627, stripped down to: 35669
+-- original size: 56973, stripped down to: 35872
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -10006,6 +10006,7 @@ local xml=xml
local concat,remove,insert=table.concat,table.remove,table.insert
local type,next,setmetatable,getmetatable,tonumber,rawset=type,next,setmetatable,getmetatable,tonumber,rawset
local lower,find,match,gsub=string.lower,string.find,string.match,string.gsub
+local sort=table.sort
local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
@@ -10950,22 +10951,34 @@ local function verbose_element(e,handlers,escape)
local ats=eat and next(eat) and {}
if ats then
local n=0
- for k,v in next,eat do
+ for k in next,eat do
n=n+1
- ats[n]=f_attribute(k,escaped(v))
+ ats[n]=k
+ end
+ if n==1 then
+ local k=ats[1]
+ ats=f_attribute(k,escaped(eat[k]))
+ else
+ sort(ats)
+ for i=1,n do
+ local k=ats[i]
+ ats[i]=f_attribute(k,escaped(eat[k]))
+ end
+ ats=concat(ats," ")
end
end
if ern and trace_entities and ern~=ens then
ens=ern
end
+ local n=edt and #edt
if ens~="" then
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
+ handle("<",ens,":",etg," ",ats,">")
else
handle("<",ens,":",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10976,19 +10989,19 @@ local function verbose_element(e,handlers,escape)
handle("</",ens,":",etg,">")
else
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ handle("<",ens,":",etg," ",ats,"/>")
else
handle("<",ens,":",etg,"/>")
end
end
else
- if edt and #edt>0 then
+ if n and n>0 then
if ats then
- handle("<",etg," ",concat(ats," "),">")
+ handle("<",etg," ",ats,">")
else
handle("<",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e=edt[i]
if type(e)=="string" then
handle(escaped(e))
@@ -10999,7 +11012,7 @@ local function verbose_element(e,handlers,escape)
handle("</",etg,">")
else
if ats then
- handle("<",etg," ",concat(ats," "),"/>")
+ handle("<",etg," ",ats,"/>")
else
handle("<",etg,"/>")
end
@@ -18722,8 +18735,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 796212
--- stripped bytes : 288762
+-- original bytes : 796558
+-- stripped bytes : 288905
-- end library merge
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 99c955885..e00a93a64 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/mkii/colo-xwi.mkii b/tex/context/base/mkii/colo-xwi.mkii
index 13d04759e..48ffe234e 100644
--- a/tex/context/base/mkii/colo-xwi.mkii
+++ b/tex/context/base/mkii/colo-xwi.mkii
@@ -128,7 +128,7 @@
\definecolor [snow] [r=1.00,g=0.98,b=0.98]
\definecolor [springgreen] [r=0.00,g=1.00,b=0.50]
\definecolor [steelblue] [r=0.27,g=0.51,b=0.71]
-\definecolor [tan ] [r=0.82,g=0.71,b=0.55]
+\definecolor [tancolor] [r=0.82,g=0.71,b=0.55] % we don't want it to clash
\definecolor [thistle] [r=0.85,g=0.75,b=0.85]
\definecolor [tomato] [r=1.00,g=0.39,b=0.28]
\definecolor [turquoise] [r=0.25,g=0.88,b=0.82]
diff --git a/tex/context/base/mkii/mult-de.mkii b/tex/context/base/mkii/mult-de.mkii
index b85a88918..e0099f4e7 100644
--- a/tex/context/base/mkii/mult-de.mkii
+++ b/tex/context/base/mkii/mult-de.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{reverse}
\setinterfaceconstant{right}{rechts}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{rechterfarbe}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{rechtekante}
diff --git a/tex/context/base/mkii/mult-en.mkii b/tex/context/base/mkii/mult-en.mkii
index 969b9c475..116868cc9 100644
--- a/tex/context/base/mkii/mult-en.mkii
+++ b/tex/context/base/mkii/mult-en.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{reverse}
\setinterfaceconstant{right}{right}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{rightcolor}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{rightedge}
diff --git a/tex/context/base/mkii/mult-fr.mkii b/tex/context/base/mkii/mult-fr.mkii
index fd848c789..44251d727 100644
--- a/tex/context/base/mkii/mult-fr.mkii
+++ b/tex/context/base/mkii/mult-fr.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{inverse}
\setinterfaceconstant{right}{droite}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{couleurdroite}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{borddroit}
diff --git a/tex/context/base/mkii/mult-it.mkii b/tex/context/base/mkii/mult-it.mkii
index 7fa59db62..e6de935e6 100644
--- a/tex/context/base/mkii/mult-it.mkii
+++ b/tex/context/base/mkii/mult-it.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{invertito}
\setinterfaceconstant{right}{destra}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{coloredestra}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{bordodestro}
diff --git a/tex/context/base/mkii/mult-nl.mkii b/tex/context/base/mkii/mult-nl.mkii
index 375bbe835..8c2063ea8 100644
--- a/tex/context/base/mkii/mult-nl.mkii
+++ b/tex/context/base/mkii/mult-nl.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{omgekeerd}
\setinterfaceconstant{right}{rechts}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{rechterkleur}
\setinterfaceconstant{rightcompoundhyphen}{rechterkoppelteken}
\setinterfaceconstant{rightedge}{rechterrand}
diff --git a/tex/context/base/mkii/mult-pe.mkii b/tex/context/base/mkii/mult-pe.mkii
index f889726a8..a058f9ad3 100644
--- a/tex/context/base/mkii/mult-pe.mkii
+++ b/tex/context/base/mkii/mult-pe.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{برعکس}
\setinterfaceconstant{right}{راست}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{رنگ‌راست}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{لبه‌راست}
diff --git a/tex/context/base/mkii/mult-ro.mkii b/tex/context/base/mkii/mult-ro.mkii
index d8d731a05..062230226 100644
--- a/tex/context/base/mkii/mult-ro.mkii
+++ b/tex/context/base/mkii/mult-ro.mkii
@@ -1084,6 +1084,7 @@
\setinterfaceconstant{resources}{resources}
\setinterfaceconstant{reverse}{reverse}
\setinterfaceconstant{right}{dreapta}
+\setinterfaceconstant{rightchars}{rightchars}
\setinterfaceconstant{rightcolor}{culoaredreapta}
\setinterfaceconstant{rightcompoundhyphen}{rightcompoundhyphen}
\setinterfaceconstant{rightedge}{borduradreapta}
diff --git a/tex/context/base/mkiv/back-exp.lua b/tex/context/base/mkiv/back-exp.lua
index 5390911bb..681996d48 100644
--- a/tex/context/base/mkiv/back-exp.lua
+++ b/tex/context/base/mkiv/back-exp.lua
@@ -116,13 +116,13 @@ local tonut = nuts.tonut
local getnext = nuts.getnext
local getsubtype = nuts.getsubtype
local getfont = nuts.getfont
-local getchar = nuts.getchar
+local getdisc = nuts.getdisc
local getlist = nuts.getlist
local getid = nuts.getid
local getfield = nuts.getfield
local getattr = nuts.getattr
-
local setattr = nuts.setattr
+local isglyph = nuts.isglyph
local traverse_id = nuts.traverse_id
local traverse_nodes = nuts.traverse
@@ -2535,8 +2535,8 @@ end
local function collectresults(head,list,pat,pap) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
- local id = getid(n) -- 14: image, 8: literal (mp)
- if id == glyph_code then
+ local c, id = isglyph(n) -- 14: image, 8: literal (mp)
+ if c then
local at = getattr(n,a_tagged) or pat
if not at then
-- we need to tag the pagebody stuff as being valid skippable
@@ -2545,7 +2545,6 @@ local function collectresults(head,list,pat,pap) -- is last used (we also have c
else
-- we could add tonunicodes for ligatures (todo)
local components = getfield(n,"components")
- local c = getchar(n)
if components and (not characterdata[c] or overloads[c]) then -- we loose data
collectresults(components,nil,at) -- this assumes that components have the same attribute as the glyph ... we should be more tolerant (see math)
else
@@ -2640,14 +2639,13 @@ local function collectresults(head,list,pat,pap) -- is last used (we also have c
end
end
elseif id == disc_code then -- probably too late
+ local pre, post, replace = getdisc(n)
if keephyphens then
- local pre = getfield(n,"pre")
- if pre and not getnext(pre) and getid(pre) == glyph_code and getchar(pre) == hyphencode then
+ if pre and not getnext(pre) and isglyph(pre) == hyphencode then
nofcurrentcontent = nofcurrentcontent + 1
currentcontent[nofcurrentcontent] = hyphen
end
end
- local replace = getfield(n,"replace")
if replace then
collectresults(replace,nil)
end
diff --git a/tex/context/base/mkiv/back-exp.mkiv b/tex/context/base/mkiv/back-exp.mkiv
index 6a8177faf..48f4d3c48 100644
--- a/tex/context/base/mkiv/back-exp.mkiv
+++ b/tex/context/base/mkiv/back-exp.mkiv
@@ -89,14 +89,14 @@
\fi}%
% brrr, we need to tag empty cells (unless we start numbering)
\unexpanded\def\dotagTABLEsignal
- {\char\zerocount}%
+ {\signalcharacter}%
\to \everyenableelements
\appendtoks
\unexpanded\def\dotagtabulatecell
{\iftrialtypesetting\else\clf_settagtabulatecell\c_tabl_tabulate_align\fi}%
\unexpanded\def\dotagtabulatesignal
- {\dontleavehmode\char\zerocount\ignorespaces}%
+ {\dontleavehmode\signalcharacter\ignorespaces}%
\to \everyenableelements
\appendtoks
diff --git a/tex/context/base/mkiv/blob-ini.lua b/tex/context/base/mkiv/blob-ini.lua
index b837250ce..106c10f4f 100644
--- a/tex/context/base/mkiv/blob-ini.lua
+++ b/tex/context/base/mkiv/blob-ini.lua
@@ -6,16 +6,10 @@ if not modules then modules = { } end modules ['blob-ini'] = {
license = "see context related readme files"
}
--- Experimental ... names and functionality will change ... just a
--- place to collect code, so:
---
--- DON'T USE THESE FUNCTIONS AS THEY WILL CHANGE!
---
--- This module is just a playground. Occasionally we need to typeset
--- at the lua and and this is one method. In principle we can construct
--- pages this way too which sometimes makes sense in dumb cases. Actually,
--- if one only needs this, one does not really need tex, okay maybe the
--- parbuilder but that one can be simplified as well then.
+-- This module is just a playground. Occasionally we need to typeset at the lua and and
+-- this is one method. In principle we can construct pages this way too which sometimes
+-- makes sense in dumb cases. Actually, if one only needs this, one does not really need
+-- tex, okay maybe the parbuilder but that one can be simplified as well then.
-- set fonts, attributes
-- rest already done in packers etc
@@ -41,21 +35,18 @@ local write_node = node.write
local typesetters = nodes.typesetters
local tonodes = typesetters.tonodes
local tohpack = typesetters.tohpack
-local tohpackfast = typesetters.tohpackfast
local tovpack = typesetters.tovpack
-local tovpackfast = typesetters.tovpackfast
local implement = interfaces.implement
-blobs = blobs or { }
-
-- provide copies here (nicer for manuals)
+blobs = blobs or { }
+local blobs = blobs
+
blobs.tonodes = tonodes
blobs.tohpack = tohpack
-blobs.tohpackfast = tohpackfast
blobs.tovpack = tovpack
-blobs.tovpackfast = tovpackfast
-- end of helpers
@@ -146,21 +137,21 @@ end
-- blob.paragraph
-- blob.page
---~ local lineblob = {
---~ type = "line",
---~ head = false,
---~ tail = false,
---~ pack = false,
---~ properties = { },
---~ end
-
---~ local parblob = {
---~ type = "line",
---~ head = false,
---~ tail = false,
---~ pack = false,
---~ properties = { },
---~ end
+-- local lineblob = {
+-- type = "line",
+-- head = false,
+-- tail = false,
+-- pack = false,
+-- properties = { },
+-- end
+
+-- local parblob = {
+-- type = "line",
+-- head = false,
+-- tail = false,
+-- pack = false,
+-- properties = { },
+-- end
-- for the moment here:
diff --git a/tex/context/base/mkiv/colo-imp-crayola.mkiv b/tex/context/base/mkiv/colo-imp-crayola.mkiv
new file mode 100644
index 000000000..dbae02d5a
--- /dev/null
+++ b/tex/context/base/mkiv/colo-imp-crayola.mkiv
@@ -0,0 +1,254 @@
+%D \module
+%D [ file=colo-imp-crayola
+%D version=2016.03.21,
+%D title=\CONTEXT\ Color Macros,
+%D subtitle=Crayola,
+%D author=Alan Braslau]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA, See mreadme.pdf for
+%C details.
+
+%D Crayola (Binney & Smith) crayon colors.
+
+\startprotectedcolors
+
+\definecolor [Red] [h=ED0A3F] % produced 1903–present
+\definecolor [Maroon] [h=C32148] % produced 1949–present, known as "dark red", 1949–1958.
+\definecolor [Scarlet] [h=FD0E35] % produced 1998–present, known as "torch red", 1998.
+\definecolor [BrickRed] [h=C62D42] % produced 1958–present
+\definecolor [EnglishVermilion] [h=CC474B] % also spelled "vermillion", produced 1903–1935.
+\definecolor [MadderLake] [h=CC3336] % produced 1903–1935
+\definecolor [PermanentGeraniumLake] [h=E12C2C] % produced 1903–circa 1910
+\definecolor [MaximumRed] [h=D92121] % part of the munsell line, 1926–1944
+\definecolor [IndianRed] [h=B94E48] % produced 1903–present, known as "chestnut" since 1999.
+\definecolor [OrangeRed] [h=FF5349] % produced 1958–1990
+\definecolor [SunsetOrange] [h=FE4C40] % produced 1997–present
+\definecolor [Bittersweet] [h=FE6F5E] % produced 1958–present
+\definecolor [DarkVenetianRed] [h=B33B24] % "venetian red, dark" on labels, produced 1903–circa 1910.
+\definecolor [VenetianRed] [h=CC553D] % produced 1903–1944
+\definecolor [LightVenetianRed] [h=E6735C] % "venetian red, light" on labels, produced 1903–circa 1910.
+\definecolor [VividTangerine] [h=FF9980] % produced 1990–present
+\definecolor [MiddleRed] [h=E58E73] % part of the munsell line, 1926–1944.
+\definecolor [BurntOrange] [h=FF7F49] % produced 1958–present
+\definecolor [RedOrange] [h=FF681F] % produced 1930–present
+\definecolor [Orange] [h=FF8833] % produced 1903–present
+\definecolor [MacaroniandCheese] [h=FFB97B] % produced 1993–present, also found as "macaroni & cheese" and "macaroni-n-cheese".
+\definecolor [MiddleYellowRed] [h=ECB176] % part of the munsell line, 1926–1944, same color as "medium orange" (1949–1958).
+\definecolor [MangoTango] [h=E77200] % produced 2003–present
+\definecolor [YellowOrange] [h=FFAE42] % produced 1930–present
+\definecolor [MaximumYellowRed] [h=F2BA49] % part of the munsell line, 1926–1944.
+\definecolor [BananaMania] [h=FBE7B2] % produced 1998–present
+\definecolor [Maize] [h=F2C649] % produced 1903–1990, known as "gold ochre", 1903–1958, "golden ochre" on some labels.
+\definecolor [OrangeYellow] [h=F8D568] % produced 1958–1990.
+\definecolor [Goldenrod] [h=FCD667] % produced 1903–present, known as "medium chrome yellow" (1903–?) and "medium yellow" (1903–1958).
+\definecolor [Dandelion] [h=FED85D] % produced 1990–present
+\definecolor [Yellow] [h=FBE870] % produced 1903–present
+\definecolor [GreenYellow] [h=F1E788] % produced 1958–present
+\definecolor [MiddleYellow] [h=FFEB00] % part of the munsell line, 1926–1944.
+\definecolor [OliveGreen] [h=B5B35C] % produced 1903–present
+\definecolor [SpringGreen] [h=ECEBBD] % produced 1958–present
+\definecolor [MaximumYellow] [h=FAFA37] % part of the munsell line, 1926–1944.
+\definecolor [Canary] [h=FFFF99] % produced 1998–present
+\definecolor [LemonYellow] [h=FFFF9F] % produced 1903–1990, also known as "light chrome yellow" (on labels "chrome yellow, light") or "light yellow", 1903–1958, on labels "chrome yellow, light."
+\definecolor [MaximumGreenYellow] [h=D9E650] % part of the munsell line, 1926–1944.
+\definecolor [MiddleGreenYellow] [h=ACBF60] % part of the munsell line, 1926–1944.
+\definecolor [Inchworm] [h=AFE313] % produced 1993–present
+\definecolor [LightChromeGreen] [h=BEE64B] % "chrome green, light" on labels, produced 1903–1935, same color as "light green" (1903–1935).
+\definecolor [YellowGreen] [h=C5E17A] % produced 1930–present
+\definecolor [MaximumGreen] [h=5E8C31] % part of the munsell line, 1926–1944.
+\definecolor [Asparagus] [h=7BA05B] % produced 1993–present
+\definecolor [GrannySmithApple] [h=9DE093] % produced 1993–present
+\definecolor [Fern] [h=63B76C] % produced 1998–present
+\definecolor [MiddleGreen] [h=4D8C57] % part of the munsell line, 1926–1944.
+\definecolor [Green] [h=3AA655] % produced 1903–present
+\definecolor [MediumChromeGreen] [h=6CA67C] % "chrome green, medium" on labels, produced 1903–1939, same color as "medium green" (1903–1939).
+\definecolor [ForestGreen] [h=5FA777] % produced 1949–present, known as "dark green", 1949–1958.
+\definecolor [SeaGreen] [h=93DFB8] % produced 1949–present, known as "light green", 1949–1958.
+\definecolor [Shamrock] [h=33CC99] % produced 1993–present
+\definecolor [MountainMeadow] [h=1AB385] % produced 1998–present
+\definecolor [JungleGreen] [h=29AB87] % produced 1990–present
+\definecolor [CaribbeanGreen] [h=00CC99] % produced 1997–present
+\definecolor [TropicalRainForest] [h=00755E] % produced 1993–present
+\definecolor [MiddleBlueGreen] [h=8DD9CC] % part of the munsell line, 1926–1944.
+\definecolor [PineGreen] [h=01786F] % produced 1903–1949, 1958–present, known as "dark chrome green" ("chrome green, dark" on labels) or "dark green", 1903–1949.
+\definecolor [MaximumBlueGreen] [h=30BFBF] % part of the munsell line, 1926–1944.
+\definecolor [RobinsEggBlue] [h=00CCCC] % produced 1993–present
+\definecolor [TealBlue] [h=008080] % produced 1990–2003.
+\definecolor [LightBlue] [h=8FD8D8] % produced only in 1958.
+\definecolor [Aquamarine] [h=95E0E8] % produced 1949–present, known as "light turquoise blue", 1949–1958.
+\definecolor [TurquoiseBlue] [h=6CDAE7] % produced 1935–present, available only in bulk, 1935–1949.
+\definecolor [OuterSpace] [h=2D383A] % produced 1998–present
+\definecolor [SkyBlue] [h=76D7EA] % produced 1958–present
+\definecolor [MiddleBlue] [h=7ED4E6] % part of the munsell line, 1926–1944.
+\definecolor [BlueGreen] [h=0095B7] % produced 1949–present, known as "middle blue-green", 1949–1958.
+\definecolor [PacificBlue] [h=009DC4] % produced 1993–present
+\definecolor [Cerulean] [h=02A4D3] % produced 1990–present
+\definecolor [MaximumBlue] [h=47ABCC] % produced 1926–1958, part of the munsell line, 1926–1944, also known as "blue-green", 1930–1958.
+\definecolor [BlueI] [h=4997D0] % produced 1903–1958, known as "celestial blue", 1935–1949, and "azure blue", 1949–1958.
+\definecolor [CeruleanBlue] [h=339ACC] % produced 1949–1958.
+\definecolor [Cornflower] [h=93CCEA] % produced 1958–present
+\definecolor [GreenBlue] [h=2887C8] % produced 1958–1990.
+\definecolor [MidnightBlue] [h=00468C] % produced 1903–present, known as "prussian blue", 1903–1958.
+\definecolor [NavyBlue] [h=0066CC] % produced 1958–present
+\definecolor [Denim] [h=1560BD] % produced 1993–present
+\definecolor [BlueIII] [h=0066FF] % produced 1949–present
+\definecolor [CadetBlue] [h=A9B2C3] % produced 1958–present
+\definecolor [Periwinkle] [h=C3CDE6] % produced 1958–present
+\definecolor [BlueII] [h=4570E6] % produced 1935–1958, known as "medium blue", 1949–1958.
+\definecolor [WildBlueYonder] [h=7A89B8] % produced 2003–present
+\definecolor [Indigo] [h=4F69C6] % produced 1999–present
+\definecolor [Manatee] [h=8D90A1] % produced 1998–present
+\definecolor [CobaltBlue] [h=8C90C8] % produced 1903–1958.
+\definecolor [CelestialBlue] [h=7070CC] % produced 1903–circa 1910.
+\definecolor [BlueBell] [h=9999CC] % produced 1998–present
+\definecolor [MaximumBluePurple] [h=ACACE6] % part of the munsell line, 1926–1944.
+\definecolor [VioletBlue] [h=766EC8] % produced 1930–1990, known as blue-violet, 1930–1958.
+\definecolor [BlueViolet] [h=6456B7] % produced 1949–present, known as "violet" 1949–1958.
+\definecolor [UltramarineBlue] [h=3F26BF] % produced 1903–1944.
+\definecolor [MiddleBluePurple] [h=8B72BE] % part of the munsell line, 1926–1944.
+\definecolor [PurpleHeart] [h=652DC1] % produced 1998–present
+\definecolor [RoyalPurple] [h=6B3FA0] % produced 1990–present
+\definecolor [VioletII] [h=8359A3] % produced 1930–1949, 1958–present, on labels "violet (purple)".
+\definecolor [MediumViolet] [h=8F47B3] % produced 1949–1958.
+\definecolor [Wisteria] [h=C9A0DC] % produced 1993–present
+\definecolor [LavenderI] [h=BF8FCC] % produced 1949–1958.
+\definecolor [VividViolet] [h=803790] % produced 1997–present
+\definecolor [MaximumPurple] [h=733380] % part of the munsell line, 1926–1944.
+\definecolor [PurpleMountainsMajesty] [h=D6AEDD] % produced 1993–present, also found as "purple mountain majesty" and "purple mountain's majesty."
+\definecolor [Fuchsia] [h=C154C1] % produced 1990–present
+\definecolor [PinkFlamingo] [h=FC74FD] % produced 1997–present
+\definecolor [VioletI] [h=732E6C] % produced 1903–1930, also known as "purple" (1903–circa 1914).
+\definecolor [BrilliantRose] [h=E667CE] % produced 1949–1958.
+\definecolor [Orchid] [h=E29CD2] % produced 1949–present, known as "medium red-violet", 1949–1958.
+\definecolor [Plum] [h=8E3179] % produced 1958–present
+\definecolor [MediumRose] [h=D96CBE] % produced 1949–1958.
+\definecolor [Thistle] [h=EBB0D7] % produced 1949–1999, known as "light magenta", 1949–1958.
+\definecolor [Mulberry] [h=C8509B] % produced 1958–2003.
+\definecolor [RedViolet] [h=BB3385] % produced 1930–present
+\definecolor [MiddlePurple] [h=D982B5] % part of the munsell line, 1926–1944.
+\definecolor [MaximumRedPurple] [h=A63A79] % part of the munsell line, 1926–1944.
+\definecolor [JazzberryJam] [h=A50B5E] % produced 2003–present
+\definecolor [Eggplant] [h=614051] % produced 1998–present
+\definecolor [Magenta] [h=F653A6] % produced 1903–present, same color as "permanent magenta" (1903–?).
+\definecolor [Cerise] [h=DA3287] % produced 1993–present
+\definecolor [WildStrawberry] [h=FF3399] % produced 1990–present
+\definecolor [LavenderII] [h=FBAED2] % produced 1958–present
+\definecolor [CottonCandy] [h=FFB7D5] % produced 1998–present
+\definecolor [CarnationPink] [h=FFA6C9] % produced 1903–present, known as "rose pink" (1903–1958) and "pink" (1903–1917).
+\definecolor [VioletRed] [h=F7468A] % produced 1958–present
+\definecolor [Razzmatazz] [h=E30B5C] % produced 1993–present
+\definecolor [PigPink] [h=FDD7E4] % produced 1998–present, also called "piggy pink."
+\definecolor [Carmine] [h=E62E6B] % produced 1935–1958, known as "carmine red", 1949–1958.
+\definecolor [Blush] [h=DB5079] % produced 1998–present, known as "cranberry", 1998–2005.
+\definecolor [TickleMePink] [h=FC80A5] % produced 1993–present
+\definecolor [Mauvelous] [h=F091A9] % produced 1993–present
+\definecolor [Salmon] [h=FF91A4] % produced 1949–present
+\definecolor [MiddleRedPurple] [h=A55353] % part of the munsell line, 1926–1944.
+\definecolor [Mahogany] [h=CA3435] % produced 1949–present
+\definecolor [Melon] [h=FEBAAD] % produced 1958–present
+\definecolor [PinkSherbert] [h=F7A38E] % produced 1998–present, known as "brink pink", 1998–2005.
+\definecolor [BurntSienna] [h=E97451] % produced 1903–present
+\definecolor [Brown] [h=AF593E] % produced 1935–present
+\definecolor [Sepia] [h=9E5B40] % produced 1935–1944, 1958–present, available only in bulk, 1935–1939.
+\definecolor [FuzzyWuzzy] [h=87421F] % produced 1998–present, known as "fuzzy wuzzy brown", 1998–2005.
+\definecolor [Beaver] [h=926F5B] % produced 1998–present
+\definecolor [Tumbleweed] [h=DEA681] % produced 1993–present
+\definecolor [RawSienna] [h=D27D46] % produced 1958–present
+\definecolor [VanDykeBrown] [h=664228] % produced 1903–1935, same color as "brown" (1903–1935)
+\definecolor [Tan] [h=D99A6C] % produced 1958–present
+\definecolor [DesertSand] [h=EDC9AF] % produced 1998–present
+\definecolor [Peach] [h=FFCBA4] % produced 1903–present, known as "flesh tint" (1903–1949), "flesh" (1949–1956, 1958–1962), and "pink beige" (1956–1958)
+\definecolor [BurntUmber] [h=805533] % produced 1903–1944
+\definecolor [Apricot] [h=FDD5B1] % produced 1958–present
+\definecolor [Almond] [h=EED9C4] % produced 1998–present
+\definecolor [RawUmber] [h=665233] % produced 1903–1990
+\definecolor [Shadow] [h=837050] % produced 1998–present
+\definecolor [RawSiennaI] [h=E6BC5C] % produced 1903–circa 1910
+\definecolor [Timberwolf] [h=D9D6CF] % produced 1993–present
+\definecolor [GoldI] [h=92926E] % metallic, swatch represents nominal hue only, produced 1903–1944, available only in bulk after 1915
+\definecolor [GoldII] [h=E6BE8A] % metallic, swatch represents nominal hue only, produced 1953–present, available only in bulk, 1953–1956
+\definecolor [Silver] [h=C9C0BB] % metallic, swatch represents nominal hue only, produced 1903–present, available only in bulk, 1915–1944
+\definecolor [Copper] [h=DA8A67] % metallic, swatch represents nominal hue only, produced 1903–1915, 1958–present
+\definecolor [AntiqueBrass] [h=C88A65] % metallic, swatch represents nominal hue only, produced 1998–present
+\definecolor [Black] [h=000000] % produced 1903–present
+\definecolor [CharcoalGray] [h=736A62] % produced 1903–1910
+\definecolor [Gray] [h=8B8680] % as "middle grey", part of the munsell line, 1926–1944, spelled "grey" on labels, but "gray" on boxes, also called "neutral grey", 1930–1956
+\definecolor [BlueGray] [h=C8C8CD] % produced 1958–1990
+\definecolor [White] [h=FFFFFF] % produced 1903–present
+\definecolor [RadicalRed] [h=FF355E] % introduced in 1990
+\definecolor [WildWatermelon] [h=FD5B78] % same color as "ultra red" (1972–1990)
+\definecolor [OutrageousOrange] [h=FF6037] % same color as "ultra orange" (1972–1990)
+\definecolor [AtomicTangerine] [h=FF9966] % same color as "ultra yellow" (1972–1990)
+\definecolor [NeonCarrot] [h=FF9933] % introduced in 1990
+\definecolor [Sunglow] [h=FFCC33] % introduced in 1990
+\definecolor [LaserLemon] [h=FFFF66] % same color as "chartreuse" (1972–1990)
+\definecolor [UnmellowYellow] [h=FFFF66] % introduced in 1990
+\definecolor [ElectricLime] [h=CCFF00] % introduced in 1990
+\definecolor [ScreaminGreen] [h=66FF66] % same color as "ultra green" (1972–1990)
+\definecolor [MagicMint] [h=AAF0D1] % produced 1990–2003
+\definecolor [BlizzardBlue] [h=50BFE6] % same color as "ultra blue" (1972–1990), discontinued in 2003
+\definecolor [ShockingPink] [h=FF6EFF] % same color as "ultra pink" (1972–1990)
+\definecolor [RazzleDazzleRose] [h=EE34D2] % same color as "hot magenta" (1972–1990)
+\definecolor [HotMagenta] [h=FF00CC] % introduced in 1990
+\definecolor [PurplePizzazz] [h=FF00CC] % introduced in 1990
+\definecolor [AztecGold] [h=C39953]
+\definecolor [BurnishedBrown] [h=A17A74]
+\definecolor [CeruleanFrost] [h=6D9BC3]
+\definecolor [CinnamonSatin] [h=CD607E]
+\definecolor [CopperPenny] [h=AD6F69]
+\definecolor [CosmicCobalt] [h=2E2D88]
+\definecolor [GlossyGrape] [h=AB92B3]
+\definecolor [GraniteGray] [h=676767]
+\definecolor [GreenSheen] [h=6EAEA1]
+\definecolor [LilacLuster] [h=AE98AA]
+\definecolor [MistyMoss] [h=BBB477]
+\definecolor [MysticMaroon] [h=AD4379]
+\definecolor [PearlyPurple] [h=B768A2]
+\definecolor [PewterBlue] [h=8BA8B7]
+\definecolor [PolishedPine] [h=5DA493]
+\definecolor [QuickSilver] [h=A6A6A6]
+\definecolor [RoseDust] [h=9E5E6F]
+\definecolor [RustyRed] [h=DA2C43]
+\definecolor [ShadowBlue] [h=778BA5]
+\definecolor [ShinyShamrock] [h=5FA778]
+\definecolor [SteelTeal] [h=5F8A8B]
+\definecolor [SugarPlum] [h=914E75]
+\definecolor [TwilightLavender] [h=8A496B]
+\definecolor [WintergreenDream] [h=56887D]
+\definecolor [Amethyst] [h=64609A]
+\definecolor [Citrine] [h=933709]
+\definecolor [Emerald] [h=14A989]
+\definecolor [Jade] [h=469A84]
+\definecolor [Jasper] [h=D05340]
+\definecolor [LapisLazuli] [h=436CB9]
+\definecolor [Malachite] [h=469496]
+\definecolor [Moonstone] [h=3AA8C1]
+\definecolor [Onyx] [h=353839]
+\definecolor [Peridot] [h=ABAD48]
+\definecolor [PinkPearl] [h=B07080]
+\definecolor [RoseQuartz] [h=BD559C]
+\definecolor [Ruby] [h=AA4069]
+\definecolor [Sapphire] [h=2D5DA1]
+\definecolor [SmokeyTopaz] [h=832A0D]
+\definecolor [TigersEye] [h=B56917]
+\definecolor [AquaPearl] [h=5FBED7]
+\definecolor [BlackCoralPearl] [h=54626F]
+\definecolor [CaribbeanGreenPearl] [h=6ADA8E]
+\definecolor [CulturedPearl] [h=F5F5F5]
+\definecolor [KeyLimePearl] [h=E8F48C]
+\definecolor [MandarinPearl] [h=F37A48]
+\definecolor [MidnightPearl] [h=702670]
+\definecolor [MysticPearl] [h=D65282]
+\definecolor [OceanBluePearl] [h=4F42B5]
+\definecolor [OceanGreenPearl] [h=48BF91]
+\definecolor [OrchidPearl] [h=7B4259]
+\definecolor [RosePearl] [h=F03865]
+\definecolor [SalmonPearl] [h=F1444A]
+\definecolor [SunnyPearl] [h=F2F27A]
+\definecolor [SunsetPearl] [h=F1CC79]
+\definecolor [TurquoisePearl] [h=3BBCD0]
+
+\stopprotectedcolors
+
+\endinput
diff --git a/tex/context/base/mkiv/colo-imp-dem.mkiv b/tex/context/base/mkiv/colo-imp-dem.mkiv
index 5b794c551..7744a216e 100644
--- a/tex/context/base/mkiv/colo-imp-dem.mkiv
+++ b/tex/context/base/mkiv/colo-imp-dem.mkiv
@@ -11,8 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Like colors, we first define the english colorgroups. These
-%D colorgroups are tuned for distinctive gray scale printing.
+%D Like colors, we first define the english colorgroups. These colorgroups are
+%D tuned for distinctive gray scale printing. The yellow range is not really
+%D all yellow but is at least visible.
\definecolorgroup
[gray]
@@ -200,6 +201,20 @@
\definecolorgroup [giallo*] [yellow*]
\stopinterface
+\startinterface french
+ \definecolorgroup [gris] [gray]
+ \definecolorgroup [rouge] [red]
+ \definecolorgroup [vert] [green]
+ \definecolorgroup [bleu] [blue]
+ \definecolorgroup [cyan] [cyan]
+ \definecolorgroup [magenta] [magenta]
+ \definecolorgroup [jaune] [yellow]
+ \definecolorgroup [rouge*] [red*]
+ \definecolorgroup [vert*] [green*]
+ \definecolorgroup [bleu*] [blue*]
+ \definecolorgroup [jaune*] [yellow*]
+\stopinterface
+
%D The next set of color palets is quite language independant.
%D These palets are meant as examples.
diff --git a/tex/context/base/mkiv/colo-imp-ema.mkiv b/tex/context/base/mkiv/colo-imp-ema.mkiv
index 11d76f342..ff1db60fb 100644
--- a/tex/context/base/mkiv/colo-imp-ema.mkiv
+++ b/tex/context/base/mkiv/colo-imp-ema.mkiv
@@ -8,7 +8,7 @@
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
% author : Peter Rolf <peter.rolf@arcor.de>
@@ -17,28 +17,30 @@
%
% USAGE:
%
-% - select emacs menu: Edit -> Text Properties -> Display
+% - select emacs menu: Edit -> Text Properties -> Display
% Colors, if you don't know what I'm talking about
-% - copy this file to your /texmf/tex/context/user directory
+% - copy this file to your /texmf/tex/context/user directory
% and add "\input colo-emacs" to your tex-file
%
% IMPORTANT NOTES:
%
-% - all color names are written as ONE word in lowercase
-% letters (no redundancy as in rgb.txt) so if you want to
-% use the color "Light Sky Blue"/"light sky blue" it's new
+% - all color names are written as ONE word in lowercase
+% letters (no redundancy as in rgb.txt) so if you want to
+% use the color "Light Sky Blue"/"light sky blue" it's new
% name is lightskyblue
% - the grayX values [X=0..100] can only be used with \color
% [grayX]
% - use it at your own risk :)
%
% HINT:
-%
-% I only include this file (\showcolor [ema]) until I've
-% found the colors I want. Copy and paste the color
-% definitions to a seperate file, to decrease compilation
-% time (540 color definitions).
-
+%
+% I only include this file (\showcolor [ema]) until I've
+% found the colors I want. Copy and paste the color
+% definitions to a seperate file, to decrease compilation
+% time (540 color definitions).
+
+\startprotectedcolors
+
\definecolor [snow] [r=1,g=.980392,b=.980392]
\definecolor [ghostwhite] [r=.972549,g=.972549,b=1]
\definecolor [whitesmoke] [s=.960784]
@@ -481,110 +483,18 @@
\definecolor [thistle2] [r=.933333,g=.823529,b=.933333]
\definecolor [thistle3] [r=.803922,g=.709804,b=.803922]
\definecolor [thistle4] [r=.545098,g=.482353,b=.545098]
-\definecolor [gray0] [black]
-\definecolor [gray1] [s=.01]
-\definecolor [gray2] [s=.02]
-\definecolor [gray3] [s=.03]
-\definecolor [gray4] [s=.04]
-\definecolor [gray5] [s=.05]
-\definecolor [gray6] [s=.06]
-\definecolor [gray7] [s=.07]
-\definecolor [gray8] [s=.08]
-\definecolor [gray9] [s=.09]
-\definecolor [gray10] [s=.1]
-\definecolor [gray11] [s=.11]
-\definecolor [gray12] [s=.12]
-\definecolor [gray13] [s=.13]
-\definecolor [gray14] [s=.14]
-\definecolor [gray15] [s=.15]
-\definecolor [gray16] [s=.16]
-\definecolor [gray17] [s=.17]
-\definecolor [gray18] [s=.18]
-\definecolor [gray19] [s=.19]
-\definecolor [gray20] [s=.2]
-\definecolor [gray21] [s=.21]
-\definecolor [gray22] [s=.22]
-\definecolor [gray23] [s=.23]
-\definecolor [gray24] [s=.24]
-\definecolor [gray25] [s=.25]
-\definecolor [gray26] [s=.26]
-\definecolor [gray27] [s=.27]
-\definecolor [gray28] [s=.28]
-\definecolor [gray29] [s=.29]
-\definecolor [gray30] [s=.3]
-\definecolor [gray31] [s=.31]
-\definecolor [gray32] [s=.32]
-\definecolor [gray33] [s=.33]
-\definecolor [gray34] [s=.34]
-\definecolor [gray35] [s=.35]
-\definecolor [gray36] [s=.36]
-\definecolor [gray37] [s=.37]
-\definecolor [gray38] [s=.38]
-\definecolor [gray39] [s=.39]
-\definecolor [gray40] [s=.4]
-\definecolor [gray41] [s=.41]
-\definecolor [gray42] [s=.42]
-\definecolor [gray43] [s=.43]
-\definecolor [gray44] [s=.44]
-\definecolor [gray45] [s=.45]
-\definecolor [gray46] [s=.46]
-\definecolor [gray47] [s=.47]
-\definecolor [gray48] [s=.48]
-\definecolor [gray49] [s=.49]
-\definecolor [gray50] [s=.5]
-\definecolor [gray51] [s=.51]
-\definecolor [gray52] [s=.52]
-\definecolor [gray53] [s=.53]
-\definecolor [gray54] [s=.54]
-\definecolor [gray55] [s=.55]
-\definecolor [gray56] [s=.56]
-\definecolor [gray57] [s=.57]
-\definecolor [gray58] [s=.58]
-\definecolor [gray59] [s=.59]
-\definecolor [gray60] [s=.6]
-\definecolor [gray61] [s=.61]
-\definecolor [gray62] [s=.62]
-\definecolor [gray63] [s=.63]
-\definecolor [gray64] [s=.64]
-\definecolor [gray65] [s=.65]
-\definecolor [gray66] [s=.66]
-\definecolor [gray67] [s=.67]
-\definecolor [gray68] [s=.68]
-\definecolor [gray69] [s=.69]
-\definecolor [gray70] [s=.7]
-\definecolor [gray71] [s=.71]
-\definecolor [gray72] [s=.72]
-\definecolor [gray73] [s=.73]
-\definecolor [gray74] [s=.74]
-\definecolor [gray75] [s=.75]
-\definecolor [gray76] [s=.76]
-\definecolor [gray77] [s=.77]
-\definecolor [gray78] [s=.78]
-\definecolor [gray79] [s=.79]
-\definecolor [gray80] [s=.8]
-\definecolor [gray81] [s=.81]
-\definecolor [gray82] [s=.82]
-\definecolor [gray83] [s=.83]
-\definecolor [gray84] [s=.84]
-\definecolor [gray85] [s=.85]
-\definecolor [gray86] [s=.86]
-\definecolor [gray87] [s=.87]
-\definecolor [gray88] [s=.88]
-\definecolor [gray89] [s=.89]
-\definecolor [gray90] [s=.9]
-\definecolor [gray91] [s=.91]
-\definecolor [gray92] [s=.92]
-\definecolor [gray93] [s=.93]
-\definecolor [gray94] [s=.94]
-\definecolor [gray95] [s=.95]
-\definecolor [gray96] [s=.96]
-\definecolor [gray97] [s=.97]
-\definecolor [gray98] [s=.98]
-\definecolor [gray99] [s=.99]
-\definecolor [gray100] [white]
+
+\dorecurse{100}{
+ \definecolor [gray#1] [s=\cldcontext{#1/100}]
+}
+
\definecolor [darkgray] [s=.662745]
\definecolor [darkblue] [b=.545098]
\definecolor [darkcyan] [g=.545098,b=.545098]
\definecolor [darkmagenta] [r=.545098,b=.545098]
\definecolor [darkred] [r=.545098]
\definecolor [lightgreen] [r=.564706,g=.933333,b=.564706]
+
+\stopprotectedcolors
+
+\endinput
diff --git a/tex/context/base/mkiv/colo-imp-rgb.mkiv b/tex/context/base/mkiv/colo-imp-rgb.mkiv
index d7b691fcc..58b2ca42c 100644
--- a/tex/context/base/mkiv/colo-imp-rgb.mkiv
+++ b/tex/context/base/mkiv/colo-imp-rgb.mkiv
@@ -38,6 +38,9 @@
\definecolor [lightred] [r=1, g=0, b=0]
\definecolor [lightgreen] [r=0, g=1, b=0]
\definecolor [lightblue] [r=0, g=0, b=1]
+\definecolor [lightcyan] [r=0, g=1, b=1]
+\definecolor [lightmagenta] [r=1, g=0, b=1]
+\definecolor [lightyellow] [r=1, g=1, b=0]
\definecolor [middlered] [r=.8, g=0, b=0]
\definecolor [middlegreen] [r=0, g=.8, b=0]
@@ -87,12 +90,15 @@
\definecolor [donkercyaan] [darkcyan]
\definecolor [middelcyaan] [middlecyan]
+ \definecolor [lichtcyaan] [lightcyan]
\definecolor [donkermagenta] [darkmagenta]
\definecolor [middelmagenta] [middlemagenta]
+ \definecolor [lichtmagenta] [lightmagenta]
\definecolor [donkergeel] [darkyellow]
\definecolor [middelgeel] [middleyellow]
+ \definecolor [lichtgeel] [lightyellow]
\definecolor [donkergrijs] [darkgray]
\definecolor [middengrijs] [middlegray]
@@ -128,12 +134,15 @@
\definecolor [dunkelcyan] [darkcyan]
\definecolor [mittelcyan] [middlecyan]
+ \definecolor [hellcyan] [lightcyan]
\definecolor [dunkelmagenta] [darkmagenta]
\definecolor [mittelmagenta] [middlemagenta]
+ \definecolor [hellmagenta] [lightmagenta]
\definecolor [dunkelgelb] [darkyellow]
\definecolor [mittelgelb] [middleyellow]
+ \definecolor [hellgelb] [lightyellow]
\definecolor [dunkelgrau] [darkgray]
\definecolor [mittelgrau] [middlegray]
@@ -168,12 +177,15 @@
\definecolor [tmaveazurova] [darkcyan]
\definecolor [stredneazurova] [middlecyan]
+ \definecolor [svelteazurova] [lightcyan]
\definecolor [tmavefialova] [darkmagenta]
\definecolor [strednefialova] [middlemagenta]
+ \definecolor [sveltefialova] [lightmagenta]
\definecolor [tmavezluta] [darkyellow]
\definecolor [strednezluta] [middleyellow]
+ \definecolor [sveltezluta] [lightyellow]
\definecolor [tmaveseda] [darkgray]
\definecolor [stredneseda] [middlegray]
@@ -210,14 +222,16 @@
\definecolor [bluchiaro] [lightblue]
\definecolor [azzurroscuro] [darkcyan]
- \definecolor [azzurrochiaro] [middlecyan]
+ \definecolor [azzurromedio] [middlecyan]
+ \definecolor [azzurrochiaro] [lightcyan]
\definecolor [cremisiscuro] [darkmagenta]
- \definecolor [cremisichiaro] [middlemagenta]
-
+ \definecolor [cremisimedio] [middlemagenta]
+ \definecolor [cremisichiaro] [lightmagenta]
\definecolor [gialloscuro] [darkyellow]
\definecolor [giallomedio] [middleyellow]
+ \definecolor [giallochiaro] [lightyellow]
\definecolor [grigioscuro] [darkgray]
\definecolor [grigiomedio] [middlegray]
@@ -225,6 +239,50 @@
\stopinterface
+\startinterface french
+
+ \definecolor [rouge] [red]
+ \definecolor [vert] [green]
+ \definecolor [bleu] [blue]
+
+ \definecolor [cyan] [cyan]
+ \definecolor [magenta] [magenta]
+ \definecolor [jaune] [yellow]
+
+ \definecolor [blanche] [white]
+ \definecolor [gris] [gray]
+ \definecolor [noir] [black]
+
+ \definecolor [rougefoncé] [darkred]
+ \definecolor [rougemoyen] [middlered]
+ \definecolor [rougeclair] [lightred]
+
+ \definecolor [vertfoncé] [darkgreen]
+ \definecolor [vertmoyen] [middlegreen]
+ \definecolor [vertclair] [lightgreen]
+
+ \definecolor [bleufoncé] [darkblue]
+ \definecolor [bleumoyen] [middleblue]
+ \definecolor [bleuclair] [lightblue]
+
+ \definecolor [cyanfoncé] [darkcyan]
+ \definecolor [cyanmoyen] [middlecyan]
+ \definecolor [cyanclair] [lightcyan]
+
+ \definecolor [magentafoncé] [darkmagenta]
+ \definecolor [magentamoyen] [middlemagenta]
+ \definecolor [magentaclair] [lightmagenta]
+
+ \definecolor [jaunefoncé] [darkyellow]
+ \definecolor [jaunemoyen] [middleyellow]
+ \definecolor [jauneclair] [lightyellow]
+
+ \definecolor [grisfoncé] [darkgray]
+ \definecolor [grismoyen] [middlegray]
+ \definecolor [grisclair] [lightgray]
+
+\stopinterface
+
\startinterface romanian
\definecolor [rosu] [red]
@@ -253,12 +311,15 @@
\definecolor [cianinchis] [darkcyan]
\definecolor [cianmediu] [middlecyan]
+ \definecolor [ciandeschis] [lightcyan]
\definecolor [magentainchis] [darkmagenta]
\definecolor [magentamediu] [middlemagenta]
+ \definecolor [magentadeschis] [lightmagenta]
\definecolor [galbeninchis] [darkyellow]
\definecolor [galbenmediu] [middleyellow]
+ \definecolor [galbendeschis] [lightyellow]
\definecolor [griinchis] [darkgray]
\definecolor [grimediu] [middlegray]
@@ -269,6 +330,7 @@
%D Bonus (needed for FO test):
\definecolor [orange] [r=1, g=.5]
+\definecolor [lightorange] [r=1, g=.5]
\definecolor [middleorange] [r=.6,g=.3]
\definecolor [darkorange] [r=.4,g=.2]
diff --git a/tex/context/base/mkiv/colo-imp-x11.mkiv b/tex/context/base/mkiv/colo-imp-x11.mkiv
index 45d3aac62..a83466e8d 100644
--- a/tex/context/base/mkiv/colo-imp-x11.mkiv
+++ b/tex/context/base/mkiv/colo-imp-x11.mkiv
@@ -1,5 +1,5 @@
%D \module
-%D [ file=colo-x11,
+%D [ file=colo-imp-x11,
%D version=2009.11.13,
%D title=\CONTEXT\ Color Macros,
%D subtitle=X11,
@@ -11,9 +11,7 @@
%D Standard X11 rgb colors (from \type {/usr/share/X11/rgb.txt}):
-\doifnotmode{mkiv} {
- \input colo-hex.mkii
-}
+\startprotectedcolors
\definecolor [snow] [h=fffafa]
\definecolor [ghostwhite] [h=f8f8ff]
@@ -43,20 +41,14 @@
\definecolor [white] [s=1]
\definecolor [black] [s=0]
\definecolor [darkslategray] [h=2f4f4f]
-\definecolor [darkslategrey] [darkslategray]
\definecolor [dimgray] [s=0.41]
-\definecolor [dimgrey] [dimgray]
\definecolor [slategray] [h=708090]
-\definecolor [slategrey] [slategray]
\definecolor [lightslategray] [h=778899]
-\definecolor [lightslategrey] [lightslategray]
\definecolor [gray] [s=0.75]
-\definecolor [grey] [gray]
\definecolor [lightgrey] [s=0.83]
-\definecolor [lightgray] [lightgrey]
\definecolor [midnightblue] [h=191970]
\definecolor [navy] [h=000080]
-\definecolor [navyblue] [navy]
+\definecolor [navyblue] [h=000080]
\definecolor [cornflowerblue] [h=6495ed]
\definecolor [darkslateblue] [h=483d8b]
\definecolor [slateblue] [h=6a5acd]
@@ -464,214 +456,20 @@
\definecolor [thistle2] [h=eed2ee]
\definecolor [thistle3] [h=cdb5cd]
\definecolor [thistle4] [h=8b7b8b]
-\definecolor [gray0] [s=0.00]
-\definecolor [grey0] [gray0]
-\definecolor [gray1] [s=0.01]
-\definecolor [grey1] [gray1]
-\definecolor [gray2] [s=0.02]
-\definecolor [grey2] [gray2]
-\definecolor [gray3] [s=0.03]
-\definecolor [grey3] [gray3]
-\definecolor [gray4] [s=0.04]
-\definecolor [grey4] [gray4]
-\definecolor [gray5] [s=0.05]
-\definecolor [grey5] [gray5]
-\definecolor [gray6] [s=0.06]
-\definecolor [grey6] [gray6]
-\definecolor [gray7] [s=0.07]
-\definecolor [grey7] [gray7]
-\definecolor [gray8] [s=0.08]
-\definecolor [grey8] [gray8]
-\definecolor [gray9] [s=0.09]
-\definecolor [grey9] [gray9]
-\definecolor [gray10] [s=0.10]
-\definecolor [grey10] [gray10]
-\definecolor [gray11] [s=0.11]
-\definecolor [grey11] [gray11]
-\definecolor [gray12] [s=0.12]
-\definecolor [grey12] [gray12]
-\definecolor [gray13] [s=0.13]
-\definecolor [grey13] [gray13]
-\definecolor [gray14] [s=0.14]
-\definecolor [grey14] [gray14]
-\definecolor [gray15] [s=0.15]
-\definecolor [grey15] [gray15]
-\definecolor [gray16] [s=0.16]
-\definecolor [grey16] [gray16]
-\definecolor [gray17] [s=0.17]
-\definecolor [grey17] [gray17]
-\definecolor [gray18] [s=0.18]
-\definecolor [grey18] [gray18]
-\definecolor [gray19] [s=0.19]
-\definecolor [grey19] [gray19]
-\definecolor [gray20] [s=0.20]
-\definecolor [grey20] [gray20]
-\definecolor [gray21] [s=0.21]
-\definecolor [grey21] [gray21]
-\definecolor [gray22] [s=0.22]
-\definecolor [grey22] [gray22]
-\definecolor [gray23] [s=0.23]
-\definecolor [grey23] [gray23]
-\definecolor [gray24] [s=0.24]
-\definecolor [grey24] [gray24]
-\definecolor [gray25] [s=0.25]
-\definecolor [grey25] [gray25]
-\definecolor [gray26] [s=0.26]
-\definecolor [grey26] [gray26]
-\definecolor [gray27] [s=0.27]
-\definecolor [grey27] [gray27]
-\definecolor [gray28] [s=0.28]
-\definecolor [grey28] [gray28]
-\definecolor [gray29] [s=0.29]
-\definecolor [grey29] [gray29]
-\definecolor [gray30] [s=0.30]
-\definecolor [grey30] [gray30]
-\definecolor [gray31] [s=0.31]
-\definecolor [grey31] [gray31]
-\definecolor [gray32] [s=0.32]
-\definecolor [grey32] [gray32]
-\definecolor [gray33] [s=0.33]
-\definecolor [grey33] [gray33]
-\definecolor [gray34] [s=0.34]
-\definecolor [grey34] [gray34]
-\definecolor [gray35] [s=0.35]
-\definecolor [grey35] [gray35]
-\definecolor [gray36] [s=0.36]
-\definecolor [grey36] [gray36]
-\definecolor [gray37] [s=0.37]
-\definecolor [grey37] [gray37]
-\definecolor [gray38] [s=0.38]
-\definecolor [grey38] [gray38]
-\definecolor [gray39] [s=0.39]
-\definecolor [grey39] [gray39]
-\definecolor [gray40] [s=0.40]
-\definecolor [grey40] [gray40]
-\definecolor [gray41] [s=0.41]
-\definecolor [grey41] [gray41]
-\definecolor [gray42] [s=0.42]
-\definecolor [grey42] [gray42]
-\definecolor [gray43] [s=0.43]
-\definecolor [grey43] [gray43]
-\definecolor [gray44] [s=0.44]
-\definecolor [grey44] [gray44]
-\definecolor [gray45] [s=0.45]
-\definecolor [grey45] [gray45]
-\definecolor [gray46] [s=0.46]
-\definecolor [grey46] [gray46]
-\definecolor [gray47] [s=0.47]
-\definecolor [grey47] [gray47]
-\definecolor [gray48] [s=0.48]
-\definecolor [grey48] [gray48]
-\definecolor [gray49] [s=0.49]
-\definecolor [grey49] [gray49]
-\definecolor [gray50] [s=0.50]
-\definecolor [grey50] [gray50]
-\definecolor [gray51] [s=0.51]
-\definecolor [grey51] [gray51]
-\definecolor [gray52] [s=0.52]
-\definecolor [grey52] [gray52]
-\definecolor [gray53] [s=0.53]
-\definecolor [grey53] [gray53]
-\definecolor [gray54] [s=0.54]
-\definecolor [grey54] [gray54]
-\definecolor [gray55] [s=0.55]
-\definecolor [grey55] [gray55]
-\definecolor [gray56] [s=0.56]
-\definecolor [grey56] [gray56]
-\definecolor [gray57] [s=0.57]
-\definecolor [grey57] [gray57]
-\definecolor [gray58] [s=0.58]
-\definecolor [grey58] [gray58]
-\definecolor [gray59] [s=0.59]
-\definecolor [grey59] [gray59]
-\definecolor [gray60] [s=0.60]
-\definecolor [grey60] [gray60]
-\definecolor [gray61] [s=0.61]
-\definecolor [grey61] [gray61]
-\definecolor [gray62] [s=0.62]
-\definecolor [grey62] [gray62]
-\definecolor [gray63] [s=0.63]
-\definecolor [grey63] [gray63]
-\definecolor [gray64] [s=0.64]
-\definecolor [grey64] [gray64]
-\definecolor [gray65] [s=0.65]
-\definecolor [grey65] [gray65]
-\definecolor [gray66] [s=0.66]
-\definecolor [grey66] [gray66]
-\definecolor [gray67] [s=0.67]
-\definecolor [grey67] [gray67]
-\definecolor [gray68] [s=0.68]
-\definecolor [grey68] [gray68]
-\definecolor [gray69] [s=0.69]
-\definecolor [grey69] [gray69]
-\definecolor [gray70] [s=0.70]
-\definecolor [grey70] [gray70]
-\definecolor [gray71] [s=0.71]
-\definecolor [grey71] [gray71]
-\definecolor [gray72] [s=0.72]
-\definecolor [grey72] [gray72]
-\definecolor [gray73] [s=0.73]
-\definecolor [grey73] [gray73]
-\definecolor [gray74] [s=0.74]
-\definecolor [grey74] [gray74]
-\definecolor [gray75] [s=0.75]
-\definecolor [grey75] [gray75]
-\definecolor [gray76] [s=0.76]
-\definecolor [grey76] [gray76]
-\definecolor [gray77] [s=0.77]
-\definecolor [grey77] [gray77]
-\definecolor [gray78] [s=0.78]
-\definecolor [grey78] [gray78]
-\definecolor [gray79] [s=0.79]
-\definecolor [grey79] [gray79]
-\definecolor [gray80] [s=0.80]
-\definecolor [grey80] [gray80]
-\definecolor [gray81] [s=0.81]
-\definecolor [grey81] [gray81]
-\definecolor [gray82] [s=0.82]
-\definecolor [grey82] [gray82]
-\definecolor [gray83] [s=0.83]
-\definecolor [grey83] [gray83]
-\definecolor [gray84] [s=0.84]
-\definecolor [grey84] [gray84]
-\definecolor [gray85] [s=0.85]
-\definecolor [grey85] [gray85]
-\definecolor [gray86] [s=0.86]
-\definecolor [grey86] [gray86]
-\definecolor [gray87] [s=0.87]
-\definecolor [grey87] [gray87]
-\definecolor [gray88] [s=0.88]
-\definecolor [grey88] [gray88]
-\definecolor [gray89] [s=0.89]
-\definecolor [grey89] [gray89]
-\definecolor [gray90] [s=0.90]
-\definecolor [grey90] [gray90]
-\definecolor [gray91] [s=0.91]
-\definecolor [grey91] [gray91]
-\definecolor [gray92] [s=0.92]
-\definecolor [grey92] [gray92]
-\definecolor [gray93] [s=0.93]
-\definecolor [grey93] [gray93]
-\definecolor [gray94] [s=0.94]
-\definecolor [grey94] [gray94]
-\definecolor [gray95] [s=0.95]
-\definecolor [grey95] [gray95]
-\definecolor [gray96] [s=0.96]
-\definecolor [grey96] [gray96]
-\definecolor [gray97] [s=0.97]
-\definecolor [grey97] [gray97]
-\definecolor [gray98] [s=0.98]
-\definecolor [grey98] [gray98]
-\definecolor [gray99] [s=0.99]
-\definecolor [grey99] [gray99]
-\definecolor [gray100] [s=1.00]
-\definecolor [grey100] [gray100]
+
+% A lot of useless grays:
+
+\dorecurse{100}{
+ \definecolor [gray#1] [s=\cldcontext{#1/100}]
+}
+
\definecolor [darkgrey] [s=0.66]
-\definecolor [darkgray] [darkgrey]
\definecolor [darkblue] [h=00008b]
\definecolor [darkcyan] [h=008b8b]
\definecolor [darkmagenta] [h=8b008b]
\definecolor [darkred] [h=8b0000]
\definecolor [lightgreen] [h=90ee90]
+\stopprotectedcolors
+
\endinput
diff --git a/tex/context/base/mkiv/colo-imp-xwi.mkiv b/tex/context/base/mkiv/colo-imp-xwi.mkiv
index 557e9c57c..57fe7965b 100644
--- a/tex/context/base/mkiv/colo-imp-xwi.mkiv
+++ b/tex/context/base/mkiv/colo-imp-xwi.mkiv
@@ -8,11 +8,13 @@
%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
%C
%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
%D I've forgotten where I got these definitions from, but maybe
-%D they can be of use.
+%D they can be of use.
+
+\startprotectedcolors
\definecolor [aliceblue] [r=0.94,g=0.97,b=1.00]
\definecolor [antiquewhite] [r=0.98,g=0.92,b=0.84]
@@ -127,7 +129,7 @@
\definecolor [snow] [r=1.00,g=0.98,b=0.98]
\definecolor [springgreen] [r=0.00,g=1.00,b=0.50]
\definecolor [steelblue] [r=0.27,g=0.51,b=0.71]
-\definecolor [tan ] [r=0.82,g=0.71,b=0.55]
+\definecolor [x11tan] [r=0.82,g=0.71,b=0.55]
\definecolor [thistle] [r=0.85,g=0.75,b=0.85]
\definecolor [tomato] [r=1.00,g=0.39,b=0.28]
\definecolor [turquoise] [r=0.25,g=0.88,b=0.82]
@@ -139,4 +141,6 @@
\definecolor [yellow] [r=1.00,g=1.00,b=0.00]
\definecolor [yellowgreen] [r=0.60,g=0.80,b=0.20]
+\stopprotectedcolors
+
\endinput
diff --git a/tex/context/base/mkiv/colo-ini.lua b/tex/context/base/mkiv/colo-ini.lua
index 495a09809..1a055242b 100644
--- a/tex/context/base/mkiv/colo-ini.lua
+++ b/tex/context/base/mkiv/colo-ini.lua
@@ -362,7 +362,7 @@ local function defineprocesscolor(name,str,global,freeze) -- still inconsistent
definecolor(name, register_color(name,'rgb', tonumber(r) or 0, tonumber(g) or 0, tonumber(b) or 0), global)
else
local c, m, y, k = settings.c, settings.m, settings.y, settings.k
- if c or m or y or b then
+ if c or m or y or k then
definecolor(name, register_color(name,'cmyk',tonumber(c) or 0, tonumber(m) or 0, tonumber(y) or 0, tonumber(k) or 0), global)
else
local h, s, v = settings.h, settings.s, settings.v
diff --git a/tex/context/base/mkiv/colo-ini.mkiv b/tex/context/base/mkiv/colo-ini.mkiv
index d8267becc..26208edd4 100644
--- a/tex/context/base/mkiv/colo-ini.mkiv
+++ b/tex/context/base/mkiv/colo-ini.mkiv
@@ -363,26 +363,73 @@
%D This saves us some typing in for instance the modules that
%D deal with pretty verbatim typesetting.
-\let\m_colo_palets_tmp\empty
+\installcorenamespace{paletlist}
+\installcorenamespace{paletsize}
+
+\let\m_colo_palet\relax
+\let\c_colo_palet\relax
+
+\def\colo_palet_allocate#1%
+ {\expandafter\let \csname\??paletlist#1\endcsname\empty
+ \expandafter\newcount\csname\??paletsize#1\endcsname}
+
+\def\colo_palet_prepare#1%
+ {\edef\colo_palet_name{#1}%
+ \ifcsname\??paletlist\colo_palet_name\endcsname\else
+ \colo_palet_allocate\colo_palet_name
+ \fi
+ \edef\m_colo_palet{\begincsname\??paletlist\colo_palet_name\endcsname}%
+ \expandafter\let\expandafter\c_colo_palet\csname\??paletsize\colo_palet_name\endcsname}
+
+\def\colo_palet_extend#1%
+ {\addtocommalist{#1}\m_colo_palet
+ \expandafter\let\csname\??paletlist\colo_palet_name\endcsname\m_colo_palet
+ \advance\c_colo_palet\plusone}
+
+\unexpanded\def\doifelsecolorpalet#1%
+ {\ifcsname\??paletlist#1\endcsname
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\let\paletlist\empty
+\let\paletsize\!!zerocount
+
+\unexpanded\def\getpaletlist[#1]%
+ {\edef\paletlist{\begincsname\??paletlist#1\endcsname}}
+
+\unexpanded\def\getpaletsize[#1]%
+ {\ifcsname\??paletsize#1\endcsname
+ \edef\paletsize{\the\lastnamedcs}%
+ \else
+ \let\paletsize\!!zerocount
+ \fi}
\unexpanded\def\definepalet
- {\dodoubleargument\colo_palets_define}
-
-\unexpanded\def\colo_palets_define[#1][#2]% todo
- {\doifelseassignment{#2}
- {%\colo_helpers_show_message\m!colors6{#1}%
- \let\m_colo_palets_tmp\empty
- \setevalue{\??colorpaletspecification#1}{#2}%
- \processcommalist[#2]{\colo_palets_define_one{#1}}%
- \letvalue{\??colorpalet#1}\m_colo_palets_tmp
- \let\m_colo_palets_tmp\empty}
+ {\dotripleempty\colo_palets_define}
+
+\def\colo_palets_define
+ {\ifthirdargument
+ \expandafter\colo_palets_define_b
+ \else
+ \expandafter\colo_palets_define_a
+ \fi}
+
+\unexpanded\def\colo_palets_define_a[#1][#2][#3]%
+ {\colo_palet_prepare{#1}%
+ \doifelseassignment{#2}
+ {\setevalue{\??colorpaletspecification#1}{#2}%
+ \processcommalist[#2]{\colo_palets_define_one{#1}}}
{\ifcsname\??colorpaletspecification#2\endcsname
- %\normalexpanded{\colo_palets_define[#1][\csname\??colorpaletspecification#2\endcsname]}%
\normalexpanded{\colo_palets_define[#1][\lastnamedcs]}%
\fi}}
-% \def\colo_palets_define_one#1#2% get rid of { } in #2
-% {\colo_palets_define_two{#1}[#2]}%
+\unexpanded\def\colo_palets_define_b[#1][#2][#3]%
+ {\colo_palet_prepare{#1}%
+ \setevalue{\??colorpaletspecification#1}{#2}%
+ \colo_palet_extend{#2}%
+ \colo_palets_define_assign{#1}{#2}{#3}}
\def\colo_palets_define_one#1#2% get rid of { }
{\doifelseassignment{#2} % catch empty entries
@@ -390,21 +437,14 @@
{\colo_palets_define_three{#1}{#2}}}
\def\colo_palets_define_two#1[#2=#3]%
- {\edef\m_colo_palets_tmp{\ifx\m_colo_palets_tmp\empty\else\m_colo_palets_tmp,\fi#2}%
+ {\colo_palet_extend{#2}%
\colo_palets_define_set{#1}{#2}{#3}}%
\def\colo_palets_define_three#1#2%
{\ifcsname\??colorpaletspecification#2\endcsname
- %\processcommacommand[\csname\??colorpaletspecification#2\endcsname]{\colo_palets_define_one{#1}}%
\processcommacommand[\lastnamedcs]{\colo_palets_define_one{#1}}%
\fi}
-\let\paletsize\!!zerocount
-
-\unexpanded\def\getpaletsize[#1]% only works for valid k=v definitions
- {\getcommacommandsize[\csname\??colorpaletspecification#1\endcsname]%
- \edef\paletsize{\number\commalistsize}}
-
%D Instead of refering to colors, one can also directly specify
%D a color:
%D
@@ -431,7 +471,7 @@
\ifx\currentcolorpalet\empty
% seems to be a reset
\let\currentcolorprefix\empty
- \else\ifcsname\??colorpalet\currentcolorpalet\endcsname
+ \else\ifcsname\??paletlist\currentcolorpalet\endcsname
\edef\currentcolorprefix{#1:}%
\else
\colo_helpers_show_message\m!colors7\currentcolorpalet
@@ -510,13 +550,11 @@
%D
%D These speak for themselves. See \type {colo-ext} for usage.
-\def\negatecolorcomponent#1% #1 = \macro
+\unexpanded\def\negatecolorcomponent#1% #1 = \macro
{\scratchdimen\onepoint\advance\scratchdimen-#1\onepoint
\ifdim\scratchdimen<\zeropoint\scratchdimen\zeropoint\fi
\edef#1{\withoutpt\the\scratchdimen}}
-\let\negatedcolorcomponent\firstofoneargument
-
\def\negatedcolorcomponent#1%
{\ifdim\dimexpr\onepoint-#1\onepoint\relax<\zeropoint
\!!zerocount
@@ -524,7 +562,7 @@
\expandafter\withoutpt\the\dimexpr\onepoint-#1\onepoint\relax
\fi}
-\def\negatecolorcomponent#1% #1 = \macro
+\unexpanded\def\negatecolorcomponent#1% #1 = \macro
{\edef#1{\negatedcolorcomponent{#1}}}
%D \macros
@@ -989,15 +1027,15 @@
\def\colo_helpers_inherited_direct_ts#1{\ifcsname\??transparencysetter #1\endcsname\lastnamedcs\fi}
\def\colo_helpers_inherited_direct_ta#1{\ifcsname\??transparencyattribute#1\endcsname\lastnamedcs\else\!!zerocount\fi}
-%def\colo_helpers_inherited_palet_ca#1#2{\csname\??colorattribute \ifcsname\??colorattribute \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
-%def\colo_helpers_inherited_palet_cs#1#2{\csname\??colorsetter \ifcsname\??colorsetter \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
-%def\colo_helpers_inherited_palet_ta#1#2{\csname\??transparencyattribute\ifcsname\??transparencyattribute\??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
-%def\colo_helpers_inherited_palet_ts#1#2{\csname\??transparencysetter \ifcsname\??transparencysetter \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
-
-\def\colo_helpers_inherited_palet_ca#1#2{\ifcsname\??colorattribute \??colorpalet#1:#2\endcsname\lastnamedcs\fi}
-\def\colo_helpers_inherited_palet_cs#1#2{\ifcsname\??colorsetter \??colorpalet#1:#2\endcsname\lastnamedcs\else\!!zerocount\fi}
-\def\colo_helpers_inherited_palet_ta#1#2{\ifcsname\??transparencyattribute\??colorpalet#1:#2\endcsname\lastnamedcs\fi}
-\def\colo_helpers_inherited_palet_ts#1#2{\ifcsname\??transparencysetter \??colorpalet#1:#2\endcsname\lastnamedcs\else\!!zerocount\fi}
+% %def\colo_helpers_inherited_palet_cs#1#2{\csname\??colorsetter \ifcsname\??colorsetter \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
+% %def\colo_helpers_inherited_palet_ca#1#2{\csname\??colorattribute \ifcsname\??colorattribute \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
+% %def\colo_helpers_inherited_palet_ts#1#2{\csname\??transparencysetter \ifcsname\??transparencysetter \??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
+% %def\colo_helpers_inherited_palet_ta#1#2{\csname\??transparencyattribute\ifcsname\??transparencyattribute\??colorpalet#1:#2\endcsname\??colorpalet#1:#2\fi\endcsname}
+%
+% \def\colo_helpers_inherited_palet_cs#1#2{\ifcsname\??colorsetter \??colorpalet#1:#2\endcsname\lastnamedcs\fi}
+% \def\colo_helpers_inherited_palet_ca#1#2{\ifcsname\??colorattribute \??colorpalet#1:#2\endcsname\lastnamedcs\else\!!zerocount\fi}
+% \def\colo_helpers_inherited_palet_ts#1#2{\ifcsname\??transparencysetter \??colorpalet#1:#2\endcsname\lastnamedcs}
+% \def\colo_helpers_inherited_palet_ta#1#2{\ifcsname\??transparencyattribute\??colorpalet#1:#2\endcsname\lastnamedcs\else\!!zerocount\fi}
\let\colo_helpers_set_value\setvalue
@@ -1009,25 +1047,56 @@
\fi
\to \everysetupcolors
+% \def\colo_palets_define_set#1#2#3%
+% {\doifelseassignment{#3}% \definepalet[test][xx={y=.4}]
+% {\definecolor[\??colorpalet#1:#2][#3]%
+% \colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_palet_ca{#1}{#2}}%
+% \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_palet_cs{#1}{#2}}%
+% \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_palet_ta{#1}{#2}}%
+% \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_palet_ts{#1}{#2}}}
+% {\ifcsname\??colorsetter#3\endcsname % \definepalet[test][xx=green]
+% \colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_direct_cs{#3}}%
+% \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_direct_ca{#3}}%
+% \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_direct_ts{#3}}%
+% \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_direct_ta{#3}}%
+% \else
+% % not entered when making format
+% \localundefine{\??colorsetter #1:#2}%
+% \localundefine{\??colorattribute #1:#2}%
+% \localundefine{\??transparencysetter #1:#2}%
+% \localundefine{\??transparencyattribute#1:#2}%
+% \fi}}
+
\def\colo_palets_define_set#1#2#3%
{\doifelseassignment{#3}% \definepalet[test][xx={y=.4}]
- {\definecolor[\??colorpalet#1:#2][#3]%
- \colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_palet_ca{#1}{#2}}%
- \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_palet_cs{#1}{#2}}%
- \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_palet_ta{#1}{#2}}%
- \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_palet_ts{#1}{#2}}}
- {\ifcsname\??colorsetter#3\endcsname % \definepalet[test][xx=green]
- \colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_direct_cs{#3}}%
- \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_direct_ca{#3}}%
- \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_direct_ts{#3}}%
- \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_direct_ta{#3}}%
+ {\colo_palets_define_assign}%
+ {\ifcsname\??colorsetter#3\endcsname
+ % \definepalet[test][xx=green]
+ \expandafter\colo_palets_define_inherit
\else
% not entered when making format
- \localundefine{\??colorsetter #1:#2}%
- \localundefine{\??colorattribute #1:#2}%
- \localundefine{\??transparencysetter #1:#2}%
- \localundefine{\??transparencyattribute#1:#2}%
- \fi}}
+ \expandafter\colo_palets_define_undefine
+ \fi}%
+ {#1}{#2}{#3}}
+
+\def\colo_palets_define_inherit#1#2#3%
+ {\colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_direct_cs{#3}}%
+ \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_direct_ca{#3}}%
+ \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_direct_ts{#3}}%
+ \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_direct_ta{#3}}}
+
+\def\colo_palets_define_undefine#1#2#3%
+ {\localundefine{\??colorsetter #1:#2}%
+ \localundefine{\??colorattribute #1:#2}%
+ \localundefine{\??transparencysetter #1:#2}%
+ \localundefine{\??transparencyattribute#1:#2}}
+
+\def\colo_palets_define_assign#1#2#3%
+ {\definecolor[\??colorpalet#1:#2][#3]%
+ \colo_helpers_set_value{\??colorsetter #1:#2}{\colo_helpers_inherited_direct_cs{\??colorpalet#1:#2}}%
+ \colo_helpers_set_value{\??colorattribute #1:#2}{\colo_helpers_inherited_direct_ca{\??colorpalet#1:#2}}%
+ \colo_helpers_set_value{\??transparencysetter #1:#2}{\colo_helpers_inherited_direct_ts{\??colorpalet#1:#2}}%
+ \colo_helpers_set_value{\??transparencyattribute#1:#2}{\colo_helpers_inherited_direct_ta{\??colorpalet#1:#2}}}
\setvalue{\??colorattribute currentcolor}{\the\attribute\colorattribute} % for mpcolor
\setvalue{\??transparencyattribute currentcolor}{\the\attribute\transparencyattribute} % for mpcolor
diff --git a/tex/context/base/mkiv/colo-run.lua b/tex/context/base/mkiv/colo-run.lua
index 27f7c6b12..6368b3307 100644
--- a/tex/context/base/mkiv/colo-run.lua
+++ b/tex/context/base/mkiv/colo-run.lua
@@ -11,27 +11,31 @@ if not modules then modules = { } end modules ['colo-run'] = {
local colors, commands, context, utilities = colors, commands, context, utilities
-local colors= attributes.colors
+local colors = attributes.colors
+
+local private = table.tohash { "d_u_m_m_y", "maintextcolor", "themaintextcolor" }
function commands.showcolorset(name)
local set = colors.setlist(name)
context.starttabulate { "|l|l|l|l|l|l|l|" }
for i=1,#set do
local s = set[i]
- local r = { width = "4em", height = "max", depth = "max", color = s }
- context.NC()
- context.setcolormodel { "gray" }
- context.blackrule(r)
- context.NC()
- context.blackrule(r)
- context.NC()
- context.grayvalue(s)
- context.NC()
- context.colorvalue(s)
- context.NC()
- context(s)
- context.NC()
- context.NR()
+ if not private[s] then
+ local r = { width = "4em", height = "max", depth = "max", color = s }
+ context.NC()
+ context.setcolormodel { "gray" }
+ context.blackrule(r)
+ context.NC()
+ context.blackrule(r)
+ context.NC()
+ context.grayvalue(s)
+ context.NC()
+ context.colorvalue(s)
+ context.NC()
+ context(s)
+ context.NC()
+ context.NR()
+ end
end
context.stoptabulate()
end
@@ -52,16 +56,18 @@ function commands.showcolorcomponents(list)
context.TB()
for i=1,#set do
local s = set[i]
- context.NC()
- context.showcolorbar { s }
- context.NC()
- context(s)
- context.NC()
- context.transparencycomponents(s)
- context.NC()
- context.colorcomponents(s)
- context.NC()
- context.NR()
+ if not private[s] then
+ context.NC()
+ context.showcolorbar { s }
+ context.NC()
+ context(s)
+ context.NC()
+ context.transparencycomponents(s)
+ context.NC()
+ context.colorcomponents(s)
+ context.NC()
+ context.NR()
+ end
end
context.stoptabulate()
end
diff --git a/tex/context/base/mkiv/colo-run.mkiv b/tex/context/base/mkiv/colo-run.mkiv
index 1b8b87c75..37a9de190 100644
--- a/tex/context/base/mkiv/colo-run.mkiv
+++ b/tex/context/base/mkiv/colo-run.mkiv
@@ -45,32 +45,33 @@
{\dodoubleargument\colo_show_palet}
\gdef\colo_show_palet[#1][#2]%
- {\ifcsname\??colorpalet#1\endcsname
- \doifelseinset\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal [#1][#2]%
- \fi}
-
-\gdef\colo_palets_show_vertical[#1][#2]%
- {\vbox
- {\forgetall
- \setlocalhsize
- \hsize\localhsize
- \offinterlineskip
- \setuppalet[#1]
- \tabskip\zeropoint
- \def\colo_palets_show_palet##1%
- {\doifelseinset\v!number{#2}{##1\hskip.5\emwidth}{}%
- \aligntab
- \color [##1]{\vrule\s!width3\emwidth\s!height\strutht\s!depth\strutdp}%
- \graycolor[##1]{\vrule\s!width3\emwidth\s!height\strutht\s!depth\strutdp}%
- \aligntab
- \doifinset\v!value{#2}{\hskip.5\emwidth\colorvalue{##1}}%
- \crcr}
- \halign
- {\hss\alignmark\alignmark\aligntab\hss\alignmark\alignmark\hss\aligntab\alignmark\alignmark\cr
- \aligntab\doifinset\v!name{#2}{\strut#1}\aligntab\cr
- \colo_palets_process[#1]\colo_palets_show_palet\crcr}}}
-
-\gdef\colo_palets_show_horizontal[#1][#2]% todo: bTABLE etc
+ {\doifelsecolorpalet{#1}
+ {\doifelseinset\v!vertical{#2} \colo_palets_show_vertical \colo_palets_show_horizontal {#1}{#2}}%
+ {}}
+
+\gdef\colo_palets_show_vertical#1#2%
+ {\begingroup
+ \forgetall
+ \setlocalhsize
+ \hsize\localhsize
+ \offinterlineskip
+ \setuppalet[#1]
+ \tabskip\zeropoint
+ \def\colo_palets_show_palet##1%
+ {\doifelseinset\v!number{#2}{##1\hskip.5\emwidth}{}%
+ \aligntab
+ \color [##1]{\vrule\s!width3\emwidth\s!height\strutht\s!depth\strutdp}%
+ \graycolor[##1]{\vrule\s!width3\emwidth\s!height\strutht\s!depth\strutdp}%
+ \aligntab
+ \doifinset\v!value{#2}{\hskip.5\emwidth\colorvalue{##1}}%
+ \crcr}
+ \halign
+ {\hss\alignmark\alignmark\aligntab\hss\alignmark\alignmark\hss\aligntab\alignmark\alignmark\cr
+ \aligntab\doifinset\v!name{#2}{\strut#1}\aligntab\cr
+ \colo_palets_process[#1]\colo_palets_show_palet\crcr}%
+ \endgroup}
+
+\gdef\colo_palets_show_horizontal#1#2% todo: bTABLE etc
{\vbox
{\forgetall
\setlocalhsize
@@ -116,29 +117,31 @@
\crcr}}}
\gdef\colo_palets_process[#1]%
- {\expanded{\globalprocesscommalist[\getvalue{\??colorpalet#1}]}}
+ {\getpaletlist[#1]%
+ \normalexpanded{\globalprocesscommalist[\paletlist]}}
\unexpanded\gdef\comparepalet
{\dosingleargument\colo_palets_compare}
\gdef\colo_palets_compare[#1]%
- {\ifcsname\??colorpalet#1\endcsname
- \hbox
- {\colo_palets_compare_indeed\color[#1]%
- \quad
- \colo_palets_compare_indeed\graycolor[#1]}%
- \fi}
-
-\gdef\colo_palets_compare_indeed#1[#2]%
+ {\doifelsecolorpalet{#1}%
+ {\hbox
+ {\colo_palets_compare_indeed\color{#1}%
+ \quad
+ \colo_palets_compare_indeed\graycolor{#1}}}%
+ {}}
+
+\gdef\colo_palets_compare_indeed#1#2%
{\vbox
{\forgetall
\setlocalhsize
\hsize\localhsize
\offinterlineskip
\setuppalet[#2]
- \getcommacommandsize[\getvalue{\??colorpalet#2}]
+ \getpaletsize[#2]%
+ \getpaletlist[#2]%
\scratchwidth2\emwidth\relax
- \hsize\commalistsize\scratchwidth
+ \hsize\paletsize\scratchwidth
\def\colo_palets_compare##1%
{\hbox
{\setbox0\hbox
@@ -149,9 +152,9 @@
{\def\colo_palets_compare####1%
{\hbox to \scratchwidth
{\hss#1[####1]{\vrule\s!width.5\scratchwidth\s!height2.25\exheight\s!depth-.75\exheight}\hss}}%
- \processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
+ \processcommacommand[\paletlist]\colo_palets_compare}}
\endgraf}
- \processcommacommand[\getvalue{\??colorpalet#2}]\colo_palets_compare}}
+ \processcommacommand[\paletlist]\colo_palets_compare}}
%D Groups
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index 91c8e6d22..33e820ee3 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2016.03.13 23:40}
+\newcontextversion{2016.03.26 13:05}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/context-performance.tex b/tex/context/base/mkiv/context-performance.tex
index c012c992c..dd3768664 100644
--- a/tex/context/base/mkiv/context-performance.tex
+++ b/tex/context/base/mkiv/context-performance.tex
@@ -4,6 +4,8 @@
\starttext
+\subject {About performance}
+
If you use \PDFTEX, \XETEX\ and \LUATEX\ you will notice a difference in speed.
Don't draw conclusions too easily from simple tests like:
@@ -30,9 +32,9 @@ So, it's best to compare speeds with a mixed content document: multiple fonts,
text and math, images, \METAPOST\ graphics, structural components, tables, etc.
On the average \PDFTEX\ is the fastest, but offering less functionality, while
-\LUATEX\ with \MKIV\ is faster than \XETEX\ with \MKII. On complex products like
-the \METAFUN\ manual or when processing complex \XML\ files a \LUATEX\ is much
-faster than a \PDFTEX\ run.
+\LUATEX\ with \MKIV\ is often faster than \XETEX\ with \MKII. On complex products
+like the \METAFUN\ manual or when processing complex \XML\ files a \LUATEX\ is
+much faster than a \PDFTEX\ run.
There is some startup time involved which is normally not that much, and initial
font loading is also not really a burden, but of course for a few page document
@@ -40,7 +42,8 @@ it brings down the number of pages processed per second. Normalizing the input
takes a bit but applying \OPENTYPE\ font features takes much more. If you find
unacceptable bottlenecks just let me know (but better first check performance in
the other engines). Of course inefficient coding of styles (massive font switches
-where a simple one could do) are no reason for a complaint.
+where a simple one could do) are no reason for a complaint. Keep in mind:
+flexibility comes at a price.
A lot of time went into making sure that \CONTEXT\ runs efficiently on \LUATEX\
and we keep improving the performance. This is not so much an engine issue but
@@ -48,4 +51,16 @@ more one of the macro package. Of course what is true for \CONTEXT\ \MKIV\ can b
different for other macro packages but comparing with them makes no sense because
the differences in functionality.
+There are many features in \CONTEXT\ \MKIV\ that demand analysis of the text
+stream and when a spot is found where that feature has to be applied some actions
+takes place. Most such features don't have an impact on overall performance (due
+to the analysis) unless they are used. Of course font processing has the largest
+impact, but they are also the most flexible and extensible feature, especially
+when you use dynamic features. And font processing is always enabled. Speed has
+been improved over time and we're currently at a point that further speedup makes
+not much sense and would only make the code more complex. This is not to say that
+we cannot improve performance in the future.
+
+Hans Hagen \crlf Hasselt NL
+
\stoptext
diff --git a/tex/context/base/mkiv/context-todo.tex b/tex/context/base/mkiv/context-todo.tex
new file mode 100644
index 000000000..933b15e6f
--- /dev/null
+++ b/tex/context/base/mkiv/context-todo.tex
@@ -0,0 +1,39 @@
+% language=uk
+
+\usemodule[art-01,abr-01]
+
+\starttext
+
+\subject {On the agenda}
+
+\subsubject{\LUATEX}
+
+\startitemize
+ \startitem
+ head||tail cleanup in disc nodes (get rid of temp i.e.\ delay till linebreak)
+ \stopitem
+ \startitem
+ optimize some callback resolution (more direct)
+ \stopitem
+ \startitem
+ move glue specs into glue nodes
+ \stopitem
+\stopitemize
+
+\subsubject{\CONTEXT}
+
+\startitemize
+ \startitem
+ play with par callback and properties
+ \stopitem
+ \startitem
+ play with box attributes
+ \stopitem
+ \startitem
+ redo some of the spacing (adapt to improvements in engine)
+ \stopitem
+\stopitemize
+
+\vfill {\em Feel free to suggest additions.}
+
+\stoptext
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index dfc4d0bd1..52c346f62 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2016.03.13 23:40}
+\edef\contextversion{2016.03.26 13:05}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -268,8 +268,8 @@
%loadmarkfile{strc-reg}
\loadmkvifile{strc-lev} % experiment
-\loadmarkfile{spac-ali}
\loadmarkfile{spac-hor}
+\loadmarkfile{spac-ali}
\loadmarkfile{spac-flr}
\loadmarkfile{spac-ver}
\loadmarkfile{spac-lin}
diff --git a/tex/context/base/mkiv/core-con.lua b/tex/context/base/mkiv/core-con.lua
index 53f9734dc..bfe5357a6 100644
--- a/tex/context/base/mkiv/core-con.lua
+++ b/tex/context/base/mkiv/core-con.lua
@@ -28,8 +28,10 @@ local implement = interfaces.implement
local settings_to_array = utilities.parsers.settings_to_array
local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
local variables = interfaces.variables
+local constants = interfaces.constants
converters = converters or { }
local converters = converters
@@ -609,12 +611,23 @@ function converters.define(name,set) -- ,language)
sequences[name] = settings_to_array(set)
end
+function converters.max(name)
+ local s = sequences[name]
+ return s and #s or 0
+end
+
implement {
name = "defineconversion",
actions = converters.define,
arguments = { "string", "string" }
}
+implement {
+ name = "nofconversions",
+ actions = { converters.max, context },
+ arguments = "string",
+}
+
local function convert(method,n,language)
local converter = language and converters[method..":"..language] or converters[method]
if converter then
@@ -1137,7 +1150,7 @@ local v_WEEKDAY = upper(v_weekday)
local convert = converters.convert
-local days = { -- not variables.sunday
+local days = { -- not variables
"sunday",
"monday",
"tuesday",
@@ -1147,7 +1160,7 @@ local days = { -- not variables.sunday
"saturday",
}
-local months = { -- not variables.january
+local months = { -- not variables
"january",
"february",
"march",
@@ -1162,21 +1175,28 @@ local months = { -- not variables.january
"december",
}
+local monthmnems = { -- not variables
+ -- virtual table
+}
+
+setmetatableindex(months, function(t,k) return "unknown" end)
+setmetatableindex(days, function(t,k) return "unknown" end)
+setmetatableindex(monthmnems, function(t,k) return months[k] .. ":mnem" end)
+
local function dayname(n)
- return days[n] or "unknown"
+ return days[n]
end
local function weekdayname(day,month,year)
- return days[weekday(day,month,year)] or "unknown"
+ return days[weekday(day,month,year)]
end
local function monthname(n)
- return months[n] or "unknown"
+ return months[n]
end
local function monthmnem(n)
- local m = months[n]
- return m and (m ..":mnem") or "unknown"
+ return monthmnems[n]
end
implement {
@@ -1252,19 +1272,19 @@ local function currentdate(str,currentlanguage) -- second argument false : no la
context("%02i",year % 100)
elseif tag == v_month or tag == "m" then
if currentlanguage == false then
- context(months[month] or "unknown")
+ context(months[month])
elseif mnemonic then
- context.labeltext(monthmnem(month))
+ context.labeltext(variables[monthmnems[month]])
else
- context.labeltext(monthname(month))
+ context.labeltext(variables[months[month]])
end
elseif tag == v_MONTH then
if currentlanguage == false then
- context.WORD(months[month] or "unknown")
+ context.WORD(variables[months[month]])
elseif mnemonic then
- context.LABELTEXT(monthmnem(month))
+ context.LABELTEXT(variables[monthmnems[month]])
else
- context.LABELTEXT(monthname(month))
+ context.LABELTEXT(variables[months[month]])
end
elseif tag == "mm" then
context("%02i",month)
@@ -1272,7 +1292,7 @@ local function currentdate(str,currentlanguage) -- second argument false : no la
context(month)
elseif tag == v_day or tag == "d" then
if currentlanguage == false then
- context(days[day] or "unknown")
+ context(days[day])
else
context.convertnumber(v_day,day) -- why not direct
end
@@ -1286,16 +1306,16 @@ local function currentdate(str,currentlanguage) -- second argument false : no la
elseif tag == v_weekday or tag == "w" then
local wd = weekday(day,month,year)
if currentlanguage == false then
- context(days[wd] or "unknown")
+ context(days[wd])
else
- context.labeltext(days[wd] or "unknown")
+ context.labeltext(variables[days[wd]])
end
elseif tag == v_WEEKDAY then
local wd = weekday(day,month,year)
if currentlanguage == false then
- context.WORD(days[wd] or "unknown")
+ context.WORD(days[wd])
else
- context.LABELTEXT(days[wd] or "unknown")
+ context.LABELTEXT(variables[days[wd]])
end
elseif tag == "W" then
context(weekday(day,month,year))
diff --git a/tex/context/base/mkiv/core-con.mkiv b/tex/context/base/mkiv/core-con.mkiv
index b440672bb..1f6dc54d5 100644
--- a/tex/context/base/mkiv/core-con.mkiv
+++ b/tex/context/base/mkiv/core-con.mkiv
@@ -296,7 +296,7 @@
%D
%D The number of days is available in the macro \type {\numberofdays}.
-\def\doifelseleapyear#1%
+\def\doifelseleapyear#1% expandable check
{\clf_doifelseleapyear\numexpr#1\relax}
\let\doifleapyearelse\doifelseleapyear
@@ -571,7 +571,7 @@
% if we have definitions that are not matched at the lua end .. eventually we might do
% that when more shows up
-\def\doifelseconversiondefined#1%
+\def\doifelseconversiondefined#1% expandable
{\ifcsname\??conversion\currentlanguage#1\endcsname
\expandafter\firstoftwoarguments
\else\ifcsname\??conversion#1\endcsname
@@ -580,11 +580,11 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
-\def\doifelseconversionnumber#1#2%
- {\ifcsname\??conversion#1#2\endcsname
- \expandafter\firstoftwoarguments
- \else
+\def\doifelseconversionnumber#1#2% expandable
+ {\ifnum#2>\numexpr\clf_nofconversions{#1}\relax
\expandafter\secondoftwoarguments
+ \else
+ \expandafter\firstoftwoarguments
\fi}
\let\doifconversiondefinedelse\doifelseconversiondefined
diff --git a/tex/context/base/mkiv/font-ext.lua b/tex/context/base/mkiv/font-ext.lua
index fc161f59a..79144aa70 100644
--- a/tex/context/base/mkiv/font-ext.lua
+++ b/tex/context/base/mkiv/font-ext.lua
@@ -1037,4 +1037,3 @@ implement {
context(getprivatenode(fontdata[currentfont()],name))
end
}
-
diff --git a/tex/generic/context/luatex/luatex-fonts-cbk.lua b/tex/context/base/mkiv/font-gbn.lua
index 9da8151de..f81c877f2 100644
--- a/tex/generic/context/luatex/luatex-fonts-cbk.lua
+++ b/tex/context/base/mkiv/font-gbn.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
+if not modules then modules = { } end modules ['font-gbn'] = {
version = 1.001,
comment = "companion to luatex-*.tex",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
license = "see context related readme files"
}
+-- generic [base|node] mode handler
+
if context then
texio.write_nl("fatal error: this module is not for context")
os.exit()
@@ -14,59 +16,75 @@ end
local fonts = fonts
local nodes = nodes
--- Fonts: (might move to node-gef.lua)
+local nuts = nodes.nuts -- context abstraction of direct nodes
-local traverse_id = node.traverse_id
-local free_node = node.free
-local remove_node = node.remove
+local traverse_id = nuts.traverse_id
+local free_node = nuts.free
+local remove_node = nuts.remove
local glyph_code = nodes.nodecodes.glyph
local disc_code = nodes.nodecodes.disc
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getdisc = nuts.getdisc
+local setchar = nuts.setchar
+local setlink = nuts.setlink
+
-- from now on we apply ligaturing and kerning here because it might interfere with complex
-- opentype discretionary handling where the base ligature pass expect some weird extra
-- pointers (which then confuse the tail slider that has some checking built in)
-local ligaturing = node.ligaturing
-local kerning = node.kerning
+local n_ligaturing = node.ligaturing
+local n_kerning = node.kerning
-local basepass = true
+local ligaturing = nuts.ligaturing
+local kerning = nuts.kerning
+
+local basemodepass = true
local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning = nil end
local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning = nil end
function node.ligaturing(...)
- if basepass and l_warning then
+ if basemodepass and l_warning then
l_warning()
end
- return ligaturing(...)
+ return n_ligaturing(...)
end
function node.kerning(...)
- if basepass and k_warning then
+ if basemodepass and k_warning then
k_warning()
end
- return kerning(...)
+ return n_kerning(...)
end
-function nodes.handlers.setbasepass(v)
- basepass = v
+function nodes.handlers.setbasemodepass(v)
+ basemodepass = v
end
function nodes.handlers.nodepass(head)
local fontdata = fonts.hashes.identifiers
if fontdata then
+ local nuthead = tonut(head)
local usedfonts = { }
local basefonts = { }
local prevfont = nil
local basefont = nil
local variants = nil
local redundant = nil
- for n in traverse_id(glyph_code,head) do
- local font = n.font
+ for n in traverse_id(glyph_code,nuthead) do
+ local font = getfont(n)
if font ~= prevfont then
if basefont then
- basefont[2] = n.prev
+ basefont[2] = getprev(n)
end
prevfont = font
local used = usedfonts[font]
@@ -78,13 +96,13 @@ function nodes.handlers.nodepass(head)
local processors = shared.processes
if processors and #processors > 0 then
usedfonts[font] = processors
- elseif basepass then
+ elseif basemodepass then
basefont = { n, nil }
basefonts[#basefonts+1] = basefont
end
end
local resources = tfmdata.resources
- variants = resources and resources.variants
+ variants = resources and resources.variants
variants = variants and next(variants) and variants or false
end
else
@@ -97,15 +115,15 @@ function nodes.handlers.nodepass(head)
end
end
if variants then
- local char = n.char
+ local char = getchar(n)
if char >= 0xFE00 and (char <= 0xFE0F or (char >= 0xE0100 and char <= 0xE01EF)) then
local hash = variants[char]
if hash then
- local p = n.prev
- if p and p.id == glyph_code then
- local variant = hash[p.char]
+ local p = getprev(n)
+ if p and getid(p) == glyph_code then
+ local variant = hash[getchar(p)]
if variant then
- p.char = variant
+ setchar(p,variant)
if not redundant then
redundant = { n }
else
@@ -120,15 +138,15 @@ function nodes.handlers.nodepass(head)
if redundant then
for i=1,#redundant do
local n = redundant[i]
- remove_node(head,n)
+ remove_node(nuthead,n)
free_node(n)
end
end
- for d in traverse_id(disc_code,head) do
- local r = d.replace
+ for d in traverse_id(disc_code,nuthead) do
+ local _, _, r = getdisc(d)
if r then
for n in traverse_id(glyph_code,r) do
- local font = n.font
+ local font = getfont(n)
if font ~= prevfont then
prevfont = font
local used = usedfonts[font]
@@ -155,35 +173,31 @@ function nodes.handlers.nodepass(head)
end
end
end
- if basepass and #basefonts > 0 then
+ if basemodepass and #basefonts > 0 then
for i=1,#basefonts do
local range = basefonts[i]
local start = range[1]
local stop = range[2]
- -- maybe even: if start and start ~= stop then
- if start or stop then
- local prev = nil
- local next = nil
- local front = start == head
+ if start then
+ local front = nuthead == start
+ local prev, next
if stop then
- next = stop.next
+ next = getnext(stop)
start, stop = ligaturing(start,stop)
start, stop = kerning(start,stop)
- elseif start then
- prev = start.prev
+ else
+ prev = getprev(start)
start = ligaturing(start)
start = kerning(start)
end
if prev then
- start.prev = prev
- prev.next = start
+ setlink(prev,start)
end
if next then
- stop.next = next
- next.prev = stop
+ setlink(stop,next)
end
- if front then
- head = start
+ if front and nuthead ~= start then
+ head = tonode(start)
end
end
end
@@ -195,9 +209,9 @@ function nodes.handlers.nodepass(head)
end
function nodes.handlers.basepass(head)
- if basepass then
- head = ligaturing(head)
- head = kerning(head)
+ if not basemodepass then
+ head = n_ligaturing(head)
+ head = n_kerning(head)
end
return head, true
end
@@ -211,7 +225,9 @@ function nodes.simple_font_handler(head)
if head then
head = nodepass(head)
head = injectpass(head)
- head = basepass(head)
+ if not basemodepass then
+ head = basepass(head)
+ end
protectpass(head)
return head, true
else
diff --git a/tex/context/base/mkiv/font-lib.mkvi b/tex/context/base/mkiv/font-lib.mkvi
index 848fa872f..7bb042a59 100644
--- a/tex/context/base/mkiv/font-lib.mkvi
+++ b/tex/context/base/mkiv/font-lib.mkvi
@@ -47,34 +47,18 @@
\registerctxluafile{font-oti}{1.001} % otf initialization
\registerctxluafile{font-ott}{1.001} % otf tables (first)
-\iffalse % use new loader
-% \iftrue % use old loader
-
- \registerctxluafile{font-otf}{1.001} % otf main
- \registerctxluafile{font-otb}{1.001} % otf main base
- \registerctxluafile{font-inj}{1.001} % otf kern injector
- \registerctxluafile{font-ota}{1.001} % otf analyzers
- \registerctxluafile{font-otn}{1.001} % otf main node
- \registerctxluafile{font-otd}{1.001} % otf dynamics (does an overload)
- \registerctxluafile{font-otp}{1.001} % otf pack
- \registerctxluafile{font-otc}{1.001} % otf context
- \registerctxluafile{font-oth}{1.001} % otf helpers
- \registerctxluafile{font-odv}{1.001} % otf devanagari (experimental)
-
-\else
-
- \registerctxluafile{font-otl}{1.001} % otf replacement
- \registerctxluafile{font-oto}{1.001} % otb replacement
- \registerctxluafile{font-otj}{1.001} % inj replacement
- \registerctxluafile{font-oup}{1.001} % otp replacement
- \registerctxluafile{font-ota}{1.001}
- \registerctxluafile{font-ots}{1.001} % otn replacement
- \registerctxluafile{font-otd}{1.001}
- \registerctxluafile{font-otc}{1.001}
- \registerctxluafile{font-oth}{1.001}
- \registerctxluafile{font-osd}{1.001} % odv replacement
-
-\fi
+\registerctxluafile{font-otl}{1.001}
+\registerctxluafile{font-oto}{1.001}
+\registerctxluafile{font-otj}{1.001}
+\registerctxluafile{font-oup}{1.001}
+\registerctxluafile{font-ota}{1.001}
+\registerctxluafile{font-ots}{1.001}
+\registerctxluafile{font-otd}{1.001}
+\registerctxluafile{font-otc}{1.001}
+\registerctxluafile{font-oth}{1.001}
+\registerctxluafile{font-osd}{1.001}
+
+% so far
\registerctxluafile{font-pat}{1.001} % patchers
diff --git a/tex/context/base/mkiv/font-nod.lua b/tex/context/base/mkiv/font-nod.lua
index e0e0ca377..7f30b6d5c 100644
--- a/tex/context/base/mkiv/font-nod.lua
+++ b/tex/context/base/mkiv/font-nod.lua
@@ -66,6 +66,7 @@ local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local getlist = nuts.getlist
local getdisc = nuts.getdisc
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setbox = nuts.setbox
@@ -117,14 +118,12 @@ function char_tracers.collect(head,list,tag,n)
n = n or 0
local ok, fn = false, nil
while head do
- local id = getid(head)
- if id == glyph_code then
+ local c, id = isglyph(head)
+ if c then
local f = getfont(head)
if f ~= fn then
ok, fn = false, f
end
- local c = getchar(head)
- -- local i = fontidentifiers[f].indices[c] or 0 -- zero anyway as indices is nilled
if not ok then
ok = true
n = n + 1
@@ -360,13 +359,14 @@ function step_tracers.codes(i,command,space)
local c = collection[i]
local function showchar(c)
+ local f = getfont(c)
+ local c = getchar(c)
if command then
- local f, c = getfont(c), getchar(c)
local d = fontdescriptions[f]
local d = d and d[c]
context[command](f,c,d and d.class or "")
else
- context("[%s:U+%05X]",getfont(c),getchar(c))
+ context("[%s:U+%05X]",f,c)
end
end
@@ -478,43 +478,40 @@ local threshold = 65536
local function toutf(list,result,nofresult,stopcriterium)
if list then
for n in traverse_nodes(tonut(list)) do
- local id = getid(n)
- if id == glyph_code then
+ local c, id = isglyph(n)
+ if c then
local components = getfield(n,"components")
if components then
result, nofresult = toutf(components,result,nofresult)
- else
- local c = getchar(n)
- if c > 0 then
- local fc = fontcharacters[getfont(n)]
- if fc then
- local fcc = fc[c]
- if fcc then
- local u = fcc.unicode
- if not u then
- nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
- elseif type(u) == "table" then
- for i=1,#u do
- nofresult = nofresult + 1
- result[nofresult] = utfchar(u[i])
- end
- else
+ elseif c > 0 then
+ local fc = fontcharacters[getfont(n)]
+ if fc then
+ local fcc = fc[c]
+ if fcc then
+ local u = fcc.unicode
+ if not u then
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ elseif type(u) == "table" then
+ for i=1,#u do
nofresult = nofresult + 1
- result[nofresult] = utfchar(u)
+ result[nofresult] = utfchar(u[i])
end
else
nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
+ result[nofresult] = utfchar(u)
end
else
nofresult = nofresult + 1
- result[nofresult] = f_unicode(c)
+ result[nofresult] = utfchar(c)
end
else
nofresult = nofresult + 1
- result[nofresult] = f_badcode(c)
+ result[nofresult] = f_unicode(c)
end
+ else
+ nofresult = nofresult + 1
+ result[nofresult] = f_badcode(c)
end
elseif id == disc_code then
result, nofresult = toutf(getfield(n,"replace"),result,nofresult) -- needed?
diff --git a/tex/context/base/mkiv/font-osd.lua b/tex/context/base/mkiv/font-osd.lua
index 23579ea48..6ff2e38b6 100644
--- a/tex/context/base/mkiv/font-osd.lua
+++ b/tex/context/base/mkiv/font-osd.lua
@@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
license = "see context related readme files"
}
+-- I'll optimize this one with ischar (much faster) when I see a reason (read: I need a
+-- proper test case first).
+
-- This is a version of font-odv.lua adapted to the new font loader and more
-- direct hashing. The initialization code has been adapted (more efficient). One day
-- I'll speed this up ... char swapping and properties.
@@ -53,6 +56,9 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
--
-- Some data will move to char-def.lua (some day).
--
+-- By now we have yet another incremental improved version. In the end I might rewrite the
+-- code.
+
-- Hans Hagen, PRAGMA-ADE, Hasselt NL
--
-- We could have c_nukta, c_halant, c_ra is we know that they are never used mixed within
@@ -61,34 +67,6 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
-- Matras: according to Microsoft typography specifications "up to one of each type:
-- pre-, above-, below- or post- base", but that does not seem to be right. It could
-- become an option.
---
--- The next code looks weird anyway: the "and boolean" should move inside the if
--- or we should check differently (case vs successive).
---
--- local function ms_matra(c)
--- local prebase, abovebase, belowbase, postbase = true, true, true, true
--- local n = getnext(c)
--- while n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font do
--- local char = getchar(n)
--- if not dependent_vowel[char] then
--- break
--- elseif pre_mark[char] and prebase then
--- prebase = false
--- elseif above_mark[char] and abovebase then
--- abovebase = false
--- elseif below_mark[char] and belowbase then
--- belowbase = false
--- elseif post_mark[char] and postbase then
--- postbase = false
--- else
--- return c
--- end
--- c = getnext(c)
--- end
--- return c
--- end
-
--- todo: first test for font then for subtype
local insert, imerge, copy = table.insert, table.imerge, table.copy
local next, type = next, type
@@ -116,6 +94,7 @@ local tonut = nuts.tonut
local getnext = nuts.getnext
local getprev = nuts.getprev
+local getboth = nuts.getboth
local getid = nuts.getid
local getchar = nuts.getchar
local getfont = nuts.getfont
@@ -127,6 +106,8 @@ local setchar = nuts.setchar
local getprop = nuts.getprop
local setprop = nuts.setprop
+local ischar = nuts.is_char
+
local insert_node_after = nuts.insert_after
local copy_node = nuts.copy
local free_node = nuts.free
@@ -481,7 +462,6 @@ local both_joiners_true = {
}
local sequence_reorder_matras = {
- chain = 0, -- obsolete
features = { dv01 = dev2_defaults },
flags = false_flags,
name = "dv01_reorder_matras",
@@ -497,7 +477,6 @@ local sequence_reorder_matras = {
}
local sequence_reorder_reph = {
- chain = 0, -- obsolete
features = { dv02 = dev2_defaults },
flags = false_flags,
name = "dv02_reorder_reph",
@@ -513,7 +492,6 @@ local sequence_reorder_reph = {
}
local sequence_reorder_pre_base_reordering_consonants = {
- chain = 0, -- obsolete
features = { dv03 = dev2_defaults },
flags = false_flags,
name = "dv03_reorder_pre_base_reordering_consonants",
@@ -529,7 +507,6 @@ local sequence_reorder_pre_base_reordering_consonants = {
}
local sequence_remove_joiners = {
- chain = 0, -- obsolete
features = { dv04 = deva_defaults },
flags = false_flags,
name = "dv04_remove_joiners",
@@ -799,7 +776,7 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local lastcons = nil
local basefound = false
- if ra[getchar(start)] and halant[getchar(n)] and reph then
+ if reph and ra[getchar(start)] and halant[getchar(n)] then
-- if syllable starts with Ra + H and script has 'Reph' then exclude Reph
-- from candidates for base consonants
if n == stop then
@@ -871,7 +848,8 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
while not basefound do
-- find base consonant
- if consonant[getchar(current)] then
+ local char = getchar(current)
+ if consonant[char] then
setprop(current,a_state,s_half)
if not firstcons then
firstcons = current
@@ -879,7 +857,7 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
lastcons = current
if not base then
base = current
- elseif blwfcache[getchar(current)] then
+ elseif blwfcache[char] then
-- consonant has below-base (or post-base) form
setprop(current,a_state,s_blwf)
else
@@ -893,12 +871,14 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
if base ~= lastcons then
-- if base consonant is not last one then move halant from base consonant to last one
local np = base
- local n = getnext(base)
- if nukta[getchar(n)] then
+ local n = getnext(base)
+ local ch = getchar(n)
+ if nukta[ch] then
np = n
- n = getnext(n)
+ n = getnext(n)
+ ch = getchar(n)
end
- if halant[getchar(n)] then
+ if halant[ch] then
if lastcons ~= stop then
local ln = getnext(lastcons)
if nukta[getchar(ln)] then
@@ -938,7 +918,6 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local nn = getnext(n)
local mn = getnext(matra)
setlink(sp,nn)
- setprev(nn,sp)
setlink(matra,start)
setlink(n,mn)
if head == start then
@@ -982,25 +961,30 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
local n = getnext(current)
local l = nil -- used ?
if c ~= stop then
- if nukta[getchar(n)] then
- c = n
- n = getnext(n)
+ local ch = getchar(n)
+ if nukta[ch] then
+ c = n
+ n = getnext(n)
+ ch = getchar(n)
end
if c ~= stop then
- if halant[getchar(n)] then
- c = n
- n = getnext(n)
+ if halant[ch] then
+ c = n
+ n = getnext(n)
+ ch = getchar(n)
end
- while c ~= stop and dependent_vowel[getchar(n)] do
- c = n
- n = getnext(n)
+ while c ~= stop and dependent_vowel[ch] do
+ c = n
+ n = getnext(n)
+ ch = getchar(n)
end
if c ~= stop then
- if vowel_modifier[getchar(n)] then
- c = n
- n = getnext(n)
+ if vowel_modifier[ch] then
+ c = n
+ n = getnext(n)
+ ch = getchar(n)
end
- if c ~= stop and stress_tone_mark[getchar(n)] then
+ if c ~= stop and stress_tone_mark[ch] then
c = n
n = getnext(n)
end
@@ -1016,8 +1000,7 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
if bp then
setnext(bp,cn)
end
- local next = getnext(cn)
- local prev = getprev(cn)
+ local prev, next = getboth(cn)
if next then
setprev(next,prev)
end
@@ -1074,12 +1057,12 @@ local function deva_reorder(head,start,stop,font,attr,nbspaces)
setlink(prev,n)
local next = getnext(b)
setlink(c,next)
- setnext(c,next)
setlink(b,current)
end
elseif cns and getnext(cns) ~= current then -- todo: optimize next
-- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
- local cp, cnsn = getprev(current), getnext(cns)
+ local cp = getprev(current)
+ local cnsn = getnext(cns)
setlink(cp,n)
setlink(cns,current)
setlink(c,cnsn)
@@ -1132,20 +1115,26 @@ function handlers.devanagari_reorder_matras(head,start) -- no leak
local current = start -- we could cache attributes here
local startfont = getfont(start)
local startattr = getprop(start,a_syllabe)
- -- can be fast loop
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and getprop(current,a_syllabe) == startattr do
+ while current do
+ local char = ischar(current,startfont)
local next = getnext(current)
- if halant[getchar(current)] and not getprop(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
- current = next
+ if char and getprop(current,a_syllabe) == startattr then
+ if halant[char] and not getprop(current,a_state) then
+ if next then
+ local char = ischar(next,startfont)
+ if char and zw_char[char] and getprop(next,a_syllabe) == startattr then
+ current = next
+ next = getnext(current)
+ end
+ end
+ -- can be optimzied
+ local startnext = getnext(start)
+ head = remove_node(head,start)
+ setlink(start,next)
+ setlink(current,start)
+ start = startnext
+ break
end
- local startnext = getnext(start)
- head = remove_node(head,start)
- local next = getnext(current)
- setlink(start,next)
- setlink(current,start)
- start = startnext
- break
end
current = next
end
@@ -1184,54 +1173,68 @@ function handlers.devanagari_reorder_reph(head,start)
local startprev = nil
local startfont = getfont(start)
local startattr = getprop(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 2
- if halant[getchar(current)] and not getprop(current,a_state) then
- local next = getnext(current)
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr and zw_char[getchar(next)] then
- current = next
+ while current do
+ local char = ischar(current,font)
+ if char and getprop(current,a_syllabe) == startattr then -- step 2
+ if halant[char] and not getprop(current,a_state) then
+ local next = getnext(current)
+ if next then
+ local nextchar = ischar(next,font)
+ if nextchar and zw_char[nextchar] and getprop(next,a_syllabe) == startattr then
+ current = next
+ next = getnext(current)
+ end
+ end
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ setlink(start,next)
+ setlink(current,start)
+ start = startnext
+ startattr = getprop(start,a_syllabe)
+ break
end
- startnext = getnext(start)
- head = remove_node(head,start)
- local next = getnext(current)
- setlink(start,next)
- setlink(current,start)
- start = startnext
- startattr = getprop(start,a_syllabe)
+ current = getnext(current)
+ else
break
end
- current = getnext(current)
end
if not startnext then
current = getnext(start)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 4
- if getprop(current,a_state) == s_pstf then --post-base
- startnext = getnext(start)
- head = remove_node(head,start)
- local prev = getprev(current)
- setlink(prev,start)
- setlink(start,"next",current)
- start = startnext
- startattr = getprop(start,a_syllabe)
+ while current do
+ local char = ischar(current,font)
+ if char and getprop(current,a_syllabe) == startattr then -- step 4
+ if getprop(current,a_state) == s_pstf then -- post-base
+ startnext = getnext(start)
+ head = remove_node(head,start)
+ local prev = getprev(current)
+ setlink(prev,start)
+ setlink(start,current)
+ start = startnext
+ startattr = getprop(start,a_syllabe)
+ break
+ end
+ current = getnext(current)
+ else
break
end
- current = getnext(current)
end
end
- -- ToDo: determine position for reph with reordering position other than 'before postscript'
+ -- todo: determine position for reph with reordering position other than 'before postscript'
-- (required for scripts other than dev2)
-- leaks
if not startnext then
current = getnext(start)
local c = nil
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do --step 5
- if not c then
- local char = getchar(current)
- -- todo: combine in one
- if mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
+ while current do
+ local char = ischar(current,font)
+ if char and getprop(current,a_syllabe) == startattr then -- step 5
+ if not c and mark_above_below_post[char] and reorder_class[char] ~= "after subscript" then
c = current
end
+ current = getnext(current)
+ else
+ break
end
- current = getnext(current)
end
-- here we can loose the old start node: maybe best split cases
if c then
@@ -1249,9 +1252,14 @@ function handlers.devanagari_reorder_reph(head,start)
if not startnext then
current = start
local next = getnext(current)
- while next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == startfont and getprop(next,a_syllabe) == startattr do --step 6
- current = next
- next = getnext(current)
+ while next do
+ local nextchar = ischar(next,font)
+ if nextchar and getprop(next,a_syllabe) == startattr then --step 6
+ current = next
+ next = getnext(current)
+ else
+ break
+ end
end
if start ~= current then
startnext = getnext(start)
@@ -1278,56 +1286,96 @@ end
-- UNTESTED: NOT CALLED IN EXAMPLE
function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
- local current = start
+ local current = start
local startnext = nil
local startprev = nil
local startfont = getfont(start)
local startattr = getprop(start,a_syllabe)
-- can be fast for loop + caching state
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
- local next = getnext(current)
- if halant[getchar(current)] and not getprop(current,a_state) then
- if next and getid(next) == glyph_code and getsubtype(next) < 256 and getfont(next) == font and getprop(next,a_syllabe) == startattr then
- local char = getchar(next)
- if char == c_zwnj or char == c_zwj then
- current = next
+ while current do
+ local char = ischar(current,font)
+ if char and getprop(current,a_syllabe) == startattr then
+ local next = getnext(current)
+ if halant[char] and not getprop(current,a_state) then
+ if next then
+ local nextchar = ischar(next,font)
+ if nextchar and getprop(next,a_syllabe) == startattr then
+ if nextchar == c_zwnj or nextchar == c_zwj then
+ current = next
+ next = getnext(current)
+ end
+ end
end
+ startnext = getnext(start)
+ removenode(start,start)
+ setlink(start,next)
+ setlink(current,start)
+ start = startnext
+ break
end
- startnext = getnext(start)
- removenode(start,start)
- local next = getnext(current)
- setlink(start,next)
- setlink(current,start)
- start = startnext
+ current = next
+ else
break
end
- current = next
end
if not startnext then
- current = getnext(start)
+ current = getnext(start)
startattr = getprop(start,a_syllabe)
- while current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == startfont and getprop(current,a_syllabe) == startattr do
- if not consonant[getchar(current)] and getprop(current,a_state) then --main
- startnext = getnext(start)
- removenode(start,start)
- local prev = getprev(current)
- setlink(prev,"next",start)
- setlink(start,"next",current)
- start = startnext
+ while current do
+ local char = ischar(current,font)
+ if char and getprop(current,a_syllabe) == startattr then
+ if not consonant[char] and getprop(current,a_state) then -- main
+ startnext = getnext(start)
+ removenode(start,start)
+ local prev = getprev(current)
+ setlink(start,prev)
+ setlink(start,current)
+ start = startnext
+ break
+ end
+ current = getnext(current)
+ else
break
end
- current = getnext(current)
end
end
return head, start, true
end
-function handlers.devanagari_remove_joiners(head,start)
+-- function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
+-- local stop = getnext(start)
+-- local font = getfont(start)
+-- while stop do
+-- local char = ischar(stop)
+-- if char and (char == c_zwnj or char == c_zwj) then
+-- stop = getnext(stop)
+-- else
+-- break
+-- end
+-- end
+-- if stop then
+-- setnext(getprev(stop))
+-- setprev(stop,getprev(start))
+-- end
+-- local prev = getprev(start)
+-- if prev then
+-- setnext(prev,stop)
+-- end
+-- if head == start then
+-- head = stop
+-- end
+-- flush_list(start)
+-- return head, stop, true
+-- end
+
+function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
local stop = getnext(start)
- local startfont = getfont(start)
- while stop and getid(stop) == glyph_code and getsubtype(stop) < 256 and getfont(stop) == startfont do
- local char = getchar(stop)
- if char == c_zwnj or char == c_zwj then
+ local font = getfont(start)
+ local last = start
+ while stop do
+ local char = ischar(stop,font)
+ if char and (char == c_zwnj or char == c_zwj) then
+ last = stop
stop = getnext(stop)
else
break
@@ -1335,9 +1383,11 @@ function handlers.devanagari_remove_joiners(head,start)
end
local prev = getprev(start)
if stop then
- setnext(getprev(stop))
+ setnext(last)
+ setlink(prev,stop)
+ elseif prev then
+ setnext(prev)
end
- setlink(prev,stop)
if head == start then
head = stop
end
@@ -1624,16 +1674,16 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
local extra = copy_node(current)
copyinjection(extra,current)
char = tpm[1]
- setchar(current,"char",char)
- setchar(extra,"char",tpm[2])
+ setchar(current,char)
+ setchar(extra,tpm[2])
head = insert_node_after(head,current,extra)
end
--
if not moved[current] and dependent_vowel[char] then
if pre_mark[char] then -- Before first half form in the syllable
moved[current] = true
- local prev = getprev(current)
- local next = getnext(current)
+ -- can be helper to remove one node
+ local prev, next = getboth(current)
setlink(prev,next)
if current == stop then
stop = getprev(current)
@@ -1747,47 +1797,59 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
return c
end
if variant == 1 then
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
- if v and nukta[getchar(n)] then
+ local v = ischar(n,font)
+ if v and nukta[v] then
n = getnext(n)
if n then
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
end
end
if n and v then
local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and getsubtype(nnn) < 256 and getfont(nnn) == font then
- local nnc = getchar(nn)
- local nnnc = getchar(nnn)
- if nnc == c_zwj and consonant[nnnc] then
- c = nnn
- elseif (nnc == c_zwnj or nnc == c_zwj) and halant[nnnc] then
- local nnnn = getnext(nnn)
- if nnnn and getid(nnnn) == glyph_code and consonant[getchar(nnnn)] and getsubtype(nnnn) < 256 and getfont(nnnn) == font then
- c = nnnn
+ if nn then
+ local vv = ischar(nn,font)
+ if vv then
+ local nnn = getnext(nn)
+ if nnn then
+ local vvv = ischar(nnn,font)
+ if vvv then
+ if vv == c_zwj and consonant[vvv] then
+ c = nnn
+ elseif (vv == c_zwnj or vv == c_zwj) and halant[vvv] then
+ local nnnn = getnext(nnn)
+ if nnnn then
+ local vvvv = ischar(nnnn)
+ if vvvv and consonant[vvvv] then
+ c = nnnn
+ end
+ end
+ end
end
end
end
end
end
elseif variant == 2 then
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local v = ischar(n,font)
+ if v and nukta[v] then
c = n
end
n = getnext(c)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- local nn = getnext(n)
- if nn then
- local nv = getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
- if nv and zw_char[getchar(n)] then
- n = nn
- nn = getnext(nn)
- nv = nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font
- end
- if nv and halant[getchar(n)] and consonant[getchar(nn)] then
- c = nn
+ if n then
+ v = ischar(n,font)
+ if v then
+ local nn = getnext(n)
+ if nn then
+ local vv = ischar(nn,font)
+ if vv and zw_char[vv] then
+ n = nn
+ v = vv
+ nn = getnext(nn)
+ vv = nn and ischar(nn,font)
+ end
+ if vv and halant[v] and consonant[vv] then
+ c = nn
+ end
end
end
end
@@ -1797,72 +1859,66 @@ local function analyze_next_chars_one(c,font,variant) -- skip one dependent vowe
if not n then
return c
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = ischar(n,font)
if not v then
return c
end
- local char = getchar(n)
- if dependent_vowel[char] then
+ if dependent_vowel[v] then
c = getnext(c)
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if nukta[char] then
+ if nukta[v] then
c = getnext(c)
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if halant[char] then
+ if halant[v] then
c = getnext(c)
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if vowel_modifier[char] then
+ if vowel_modifier[v] then
c = getnext(c)
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if stress_tone_mark[char] then
+ if stress_tone_mark[v] then
c = getnext(c)
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if stress_tone_mark[char] then
+ if stress_tone_mark[v] then
return n
else
return c
@@ -1874,37 +1930,56 @@ local function analyze_next_chars_two(c,font)
if not n then
return c
end
- if getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local v = ischar(n,font)
+ if v and nukta[v] then
c = n
end
n = c
while true do
local nn = getnext(n)
- if nn and getid(nn) == glyph_code and getsubtype(nn) < 256 and getfont(nn) == font then
- local char = getchar(nn)
- if halant[char] then
- n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and zw_char[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
- end
- elseif char == c_zwnj or char == c_zwj then
- -- n = nn -- not here (?)
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and halant[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
+ if nn then
+ local vv = ischar(nn,font)
+ if vv then
+ if halant[vv] then
+ n = nn
+ local nnn = getnext(nn)
+ if nnn then
+ local vvv = ischar(nnn,font)
+ if vvv and zw_char[vvv] then
+ n = nnn
+ end
+ end
+ elseif vv == c_zwnj or vv == c_zwj then
+ -- n = nn -- not here (?)
+ local nnn = getnext(nn)
+ if nnn then
+ local vvv = ischar(nnn,font)
+ if vvv and halant[vvv] then
+ n = nnn
+ end
+ end
+ else
+ break
end
- else
- break
- end
- local nn = getnext(n)
- if nn and getid(nn) == glyph_code and consonant[getchar(nn)] and getsubtype(nn) < 256 and getfont(nn) == font then
- n = nn
- local nnn = getnext(nn)
- if nnn and getid(nnn) == glyph_code and nukta[getchar(nnn)] and getsubtype(nnn) < 256 and getfont(nnn) == font then
- n = nnn
+ local nn = getnext(n)
+ if nn then
+ local vv = ischar(nn,font)
+ if vv and consonant[vv] then
+ n = nn
+ local nnn = getnext(nn)
+ if nnn then
+ local vvv = ischar(nnn,font)
+ if vvv and nukta[vvv] then
+ n = nnn
+ end
+ end
+ c = n
+ else
+ break
+ end
+ else
+ break
end
- c = n
else
break
end
@@ -1921,112 +1996,103 @@ local function analyze_next_chars_two(c,font)
if not n then
return c
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = ischar(n,font)
if not v then
return c
end
- local char = getchar(n)
- if char == c_anudatta then
+ if v == c_anudatta then
c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if halant[char] then
- c = getnext(c)
+ if halant[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
- if char == c_zwnj or char == c_zwj then
- c = getnext(c)
+ if v == c_zwnj or v == c_zwj then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
else
-- c = ms_matra(c)
-- same as one
- if dependent_vowel[char] then
- c = getnext(c)
+ if dependent_vowel[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if nukta[char] then
- c = getnext(c)
+ if nukta[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if halant[char] then
- c = getnext(c)
+ if halant[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
end
-- same as one
- if vowel_modifier[char] then
- c = getnext(c)
+ if vowel_modifier[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if stress_tone_mark[char] then
- c = getnext(c)
+ if stress_tone_mark[v] then
+ c = n
n = getnext(c)
if not n then
return c
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
return c
end
- char = getchar(n)
end
- if stress_tone_mark[char] then
+ if stress_tone_mark[v] then
return n
else
return c
@@ -2054,29 +2120,41 @@ function methods.deva(head,font,attr)
local done = false
local nbspaces = 0
while current do
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ local char = ischar(current,font)
+ if char then
done = true
local syllablestart = current
- local syllableend = nil
+ local syllableend = nil
local c = current
local n = getnext(c)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- c = n
+ local first = char
+ if n and ra[first] then
+ local second = ischar(n,font)
+ if second and halant[second] then
+ local n = getnext(n)
+ if n then
+ local third = ischar(n,font)
+ if third then
+ c = n
+ first = third
+ end
+ end
end
end
- local standalone = getchar(c) == c_nbsp
+ local standalone = first == c_nbsp
if standalone then
local prev = getprev(current)
- if not prev then
- -- begin of paragraph or box
- elseif getid(prev) ~= glyph_code or getsubtype(prev) >= 256 or getfont(prev) ~= font then
- -- different font or language so quite certainly a different word
- elseif not separator[getchar(prev)] then
- -- something that separates words
+ if prev then
+ local prevchar = ischar(prev,font)
+ if not prevchar then
+ -- different font or language so quite certainly a different word
+ elseif not separator[prevchar] then
+ -- something that separates words
+ else
+ standalone = false
+ end
else
- standalone = false
+ -- begin of paragraph or box
end
end
if standalone then
@@ -2091,7 +2169,6 @@ function methods.deva(head,font,attr)
-- we can delay the getsubtype(n) and getfont(n) and test for say halant first
-- as an table access is faster than two function calls (subtype and font are
-- pseudo fields) but the code becomes messy (unless we make it a function)
- local char = getchar(current)
if consonant[char] then
-- syllable containing consonant
local prevc = true
@@ -2101,64 +2178,66 @@ function methods.deva(head,font,attr)
if not n then
break
end
- local v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ local v = ischar(n,font)
if not v then
break
end
- local c = getchar(n)
- if nukta[c] then
+ if nukta[v] then
n = getnext(n)
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
break
end
- c = getchar(n)
end
- if halant[c] then
+ if halant[v] then
n = getnext(n)
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
break
end
- c = getchar(n)
- if c == c_zwnj or c == c_zwj then
+ if v == c_zwnj or v == c_zwj then
n = getnext(n)
if not n then
break
end
- v = getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font
+ v = ischar(n,font)
if not v then
break
end
- c = getchar(n)
end
- if consonant[c] then
+ if consonant[v] then
prevc = true
current = n
end
end
end
local n = getnext(current)
- if n and getid(n) == glyph_code and nukta[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- -- nukta (not specified in Microsft Devanagari OpenType specification)
- current = n
- n = getnext(current)
+ if n then
+ local v = ischar(n,font)
+ if v and nukta[v] then
+ -- nukta (not specified in Microsft Devanagari OpenType specification)
+ current = n
+ n = getnext(current)
+ end
end
syllableend = current
current = n
if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- if halant[getchar(current)] then
- -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
- local n = getnext(current)
- if n and getid(n) == glyph_code and zw_char[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
+ local v = ischar(current,font)
+ if not v then
+ -- skip
+ elseif halant[v] then
+ -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
+ local n = getnext(current)
+ if n then
+ local v = ischar(n,font)
+ if v and zw_char[v] then
-- code collapsed, probably needs checking with intention
syllableend = n
current = getnext(n)
@@ -2167,28 +2246,24 @@ function methods.deva(head,font,attr)
current = n
end
else
- -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
- local c = getchar(current)
- if dependent_vowel[c] then
- syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
- end
- if v and vowel_modifier[c] then
- syllableend = current
- current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
- end
- if v and stress_tone_mark[c] then
- syllableend = current
- current = getnext(current)
- end
+ syllableend = current
+ current = n
+ end
+ else
+ -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
+ if dependent_vowel[v] then
+ syllableend = current
+ current = getnext(current)
+ v = ischar(current,font)
+ end
+ if v and vowel_modifier[v] then
+ syllableend = current
+ current = getnext(current)
+ v = ischar(current,font)
+ end
+ if v and stress_tone_mark[v] then
+ syllableend = current
+ current = getnext(current)
end
end
end
@@ -2201,18 +2276,14 @@ function methods.deva(head,font,attr)
syllableend = current
current = getnext(current)
if current then
- local v = getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
+ local v = ischar(current,font)
if v then
- local c = getchar(current)
- if vowel_modifier[c] then
+ if vowel_modifier[v] then
syllableend = current
current = getnext(current)
- v = current and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font
- if v then
- c = getchar(current)
- end
+ v = ischar(current,font)
end
- if v and stress_tone_mark[c] then
+ if v and stress_tone_mark[v] then
syllableend = current
current = getnext(current)
end
@@ -2252,19 +2323,27 @@ function methods.dev2(head,font,attr)
local syllabe = 0
local nbspaces = 0
while current do
- local syllablestart, syllableend = nil, nil
- if getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font then
+ local syllablestart = nil
+ local syllableend = nil
+ local char = ischar(current,font)
+ if char then
done = true
syllablestart = current
local c = current
local n = getnext(current)
- if n and ra[getchar(c)] and getid(n) == glyph_code and halant[getchar(n)] and getsubtype(n) < 256 and getfont(n) == font then
- local n = getnext(n)
- if n and getid(n) == glyph_code and getsubtype(n) < 256 and getfont(n) == font then
- c = n
+ if n and ra[char] then
+ local nextchar = ischar(n,font)
+ if nextchar and halant[nextchar] then
+ local n = getnext(n)
+ if n then
+ local nextnextchar = ischar(n,font)
+ if nextnextchar then
+ c = n
+ char = nextnextchar
+ end
+ end
end
end
- local char = getchar(c)
if independent_vowel[char] then
-- vowel-based syllable: [Ra+H]+V+[N]+[<[<ZWJ|ZWNJ>]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
current = analyze_next_chars_one(c,font,1)
@@ -2276,7 +2355,7 @@ function methods.dev2(head,font,attr)
local p = getprev(current)
if not p then
-- begin of paragraph or box
- elseif getid(p) ~= glyph_code or getsubtype(p) >= 256 or getfont(p) ~= font then
+ elseif ischar(p,font) then
-- different font or language so quite certainly a different word
elseif not separator[getchar(p)] then
-- something that separates words
@@ -2309,10 +2388,13 @@ function methods.dev2(head,font,attr)
if syllableend and syllablestart ~= syllableend then
head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
end
- if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getprop(current,a_state) then
- local mark = mark_four[getchar(current)]
- if mark then
- head, current = inject_syntax_error(head,current,mark)
+ if not syllableend then
+ local char = ischar(current,font)
+ if char and not getprop(current,a_state) then
+ local mark = mark_four[char]
+ if mark then
+ head, current = inject_syntax_error(head,current,mark)
+ end
end
end
start = false
diff --git a/tex/context/base/mkiv/font-ota.lua b/tex/context/base/mkiv/font-ota.lua
index 08f69f92a..6a3804a74 100644
--- a/tex/context/base/mkiv/font-ota.lua
+++ b/tex/context/base/mkiv/font-ota.lua
@@ -35,19 +35,20 @@ local tonut = nuts.tonut
local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
-local getid = nuts.getid
+local getprev = nuts.getprev
local getprop = nuts.getprop
local setprop = nuts.setprop
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
+local ischar = nuts.is_char
local traverse_id = nuts.traverse_id
local traverse_node_list = nuts.traverse
local end_of_math = nuts.end_of_math
local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
+----- glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local math_code = nodecodes.math
@@ -75,7 +76,10 @@ local s_rest = 6
local states = {
init = s_init,
medi = s_medi,
+ med2 = s_medi,
fina = s_fina,
+ fin2 = s_fina,
+ fin3 = s_fina,
isol = s_isol,
mark = s_mark,
rest = s_rest,
@@ -89,7 +93,10 @@ local states = {
local features = {
init = s_init,
medi = s_medi,
+ med2 = s_medi,
fina = s_fina,
+ fin2 = s_fina,
+ fin3 = s_fina,
isol = s_isol,
-- mark = s_mark,
-- rest = s_rest,
@@ -114,10 +121,9 @@ function analyzers.setstate(head,font)
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font then
+ local char, id = ischar(current,font)
+ if char and not getprop(current,a_state) then
done = true
- local char = getchar(current)
local d = descriptions[char]
if d then
if d.class == "mark" then
@@ -141,6 +147,17 @@ function analyzers.setstate(head,font)
end
first, last, n = nil, nil, 0
end
+ elseif char == false then
+ -- other font
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
elseif id == disc_code then
-- always in the middle .. it doesn't make much sense to assign a property
-- here ... we might at some point decide to flag the components when present
@@ -240,38 +257,43 @@ local mappers = {
u = s_isol, -- nonjoiner
}
-local classifiers = { } -- we can also use this trick for devanagari
-
-local first_arabic, last_arabic = characters.blockrange("arabic")
-local first_syriac, last_syriac = characters.blockrange("syriac")
-local first_mandiac, last_mandiac = characters.blockrange("mandiac")
-local first_nko, last_nko = characters.blockrange("nko")
-
-table.setmetatableindex(classifiers,function(t,k)
- local c = chardata[k]
- local v = false
- if c then
- local arabic = c.arabic
- if arabic then
- v = mappers[arabic]
- if not v then
- log.report("analyze","error in mapping arabic %C",k)
- -- error
- v = false
- end
- elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
- k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
- if categories[k] == "mn" then
- v = s_mark
- else
- v = s_rest
+-- we can also use this trick for devanagari
+
+local classifiers = characters.classifiers
+
+if not classifiers then
+
+ local first_arabic, last_arabic = characters.blockrange("arabic")
+ local first_syriac, last_syriac = characters.blockrange("syriac")
+ local first_mandiac, last_mandiac = characters.blockrange("mandiac")
+ local first_nko, last_nko = characters.blockrange("nko")
+
+ classifiers = table.setmetatableindex(function(t,k)
+ local c = chardata[k]
+ local v = false
+ if c then
+ local arabic = c.arabic
+ if arabic then
+ v = mappers[arabic]
+ if not v then
+ log.report("analyze","error in mapping arabic %C",k)
+ -- error
+ v = false
+ end
+ elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
+ k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
+ if categories[k] == "mn" then
+ v = s_mark
+ else
+ v = s_rest
+ end
end
- else
end
- end
- t[k] = v
- return v
-end)
+ t[k] = v
+ return v
+ end)
+
+end
function methods.arab(head,font,attr)
local first, last = nil, nil
@@ -279,10 +301,9 @@ function methods.arab(head,font,attr)
local current, done = head, false
current = tonut(current)
while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
+ local char, id = ischar(current,font)
+ if char and not getprop(current,a_state) then
done = true
- local char = getchar(current)
local classifier = classifiers[char]
if not classifier then
if last then
diff --git a/tex/context/base/mkiv/font-otj.lua b/tex/context/base/mkiv/font-otj.lua
index 678afa64c..aae70d1f3 100644
--- a/tex/context/base/mkiv/font-otj.lua
+++ b/tex/context/base/mkiv/font-otj.lua
@@ -30,20 +30,27 @@ local next, rawget = next, rawget
local utfchar = utf.char
local fastcopy = table.fastcopy
-local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
-local trace_marks = false trackers.register("fonts.injections.marks", function(v) trace_marks = v end)
-local trace_cursive = false trackers.register("fonts.injections.cursive", function(v) trace_cursive = v end)
+local registertracker = trackers.register
+
+local trace_injections = false registertracker("fonts.injections", function(v) trace_injections = v end)
+local trace_marks = false registertracker("fonts.injections.marks", function(v) trace_marks = v end)
+local trace_cursive = false registertracker("fonts.injections.cursive", function(v) trace_cursive = v end)
+local trace_spaces = false registertracker("otf.spaces", function(v) trace_spaces = v end)
-- use_advance is just an experiment: it makes copying glyphs (instead of new_glyph) dangerous
local use_advance = false directives.register("fonts.injections.advance", function(v) use_advance = v end)
local report_injections = logs.reporter("fonts","injections")
+local report_spaces = logs.reporter("fonts","spaces")
local attributes, nodes, node = attributes, nodes, node
fonts = fonts
-local fontdata = fonts.hashes.identifiers
+local hashes = fonts.hashes
+local fontdata = hashes.identifiers
+local parameters = fonts.hashes.parameters
+local resources = fonts.hashes.resources
nodes.injections = nodes.injections or { }
local injections = nodes.injections
@@ -56,6 +63,7 @@ local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
local nuts = nodes.nuts
local nodepool = nuts.pool
@@ -73,6 +81,9 @@ local getid = nuts.getid
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
+local getboth = nuts.getboth
+
+local ischar = nuts.is_char
local getdisc = nuts.getdisc
local setdisc = nuts.setdisc
@@ -539,14 +550,17 @@ local function inject_kerns_only(head,where)
if trace_injections then
trace(head,"kerns")
end
- local current = head
- local prev = nil
- local next = nil
- local prevdisc = nil
- local prevglyph = nil
- local pre = nil -- saves a lookup
- local post = nil -- saves a lookup
- local replace = nil -- saves a lookup
+ local current = head
+ local prev = nil
+ local next = nil
+ local prevdisc = nil
+ local prevglyph = nil
+ local pre = nil -- saves a lookup
+ local post = nil -- saves a lookup
+ local replace = nil -- saves a lookup
+ local pretail = nil -- saves a lookup
+ local posttail = nil -- saves a lookup
+ local replacetail = nil -- saves a lookup
while current do
local id = getid(current)
local next = getnext(current)
@@ -576,7 +590,6 @@ local function inject_kerns_only(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local posttail = find_tail(post)
if use_advance then
setfield(post,"xadvance",leftkern)
else
@@ -592,7 +605,6 @@ local function inject_kerns_only(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local replacetail = find_tail(replace)
if use_advance then
setfield(replace,"xadvance",leftkern)
else
@@ -601,6 +613,16 @@ local function inject_kerns_only(head,where)
end
end
end
+ else
+ -- local i = rawget(p,"emptyinjections")
+ local i = p.emptyinjections
+ if i then
+ -- glyph|disc|glyph (special case)
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ end
+ end
end
if done then
setdisc(prevdisc,pre,post,replace)
@@ -611,7 +633,7 @@ local function inject_kerns_only(head,where)
prevdisc = nil
prevglyph = current
elseif id == disc_code then
- pre, post, replace = getdisc(current)
+ pre, post, replace, pretail, posttail, replacetail = getdisc(current,true)
local done = false
if pre then
-- left|pre glyphs|right
@@ -705,14 +727,17 @@ local function inject_pairs_only(head,where)
if trace_injections then
trace(head,"pairs")
end
- local current = head
- local prev = nil
- local next = nil
- local prevdisc = nil
- local prevglyph = nil
- local pre = nil -- saves a lookup
- local post = nil -- saves a lookup
- local replace = nil -- saves a lookup
+ local current = head
+ local prev = nil
+ local next = nil
+ local prevdisc = nil
+ local prevglyph = nil
+ local pre = nil -- saves a lookup
+ local post = nil -- saves a lookup
+ local replace = nil -- saves a lookup
+ local pretail = nil -- saves a lookup
+ local posttail = nil -- saves a lookup
+ local replacetail = nil -- saves a lookup
while current do
local id = getid(current)
local next = getnext(current)
@@ -741,10 +766,10 @@ local function inject_pairs_only(head,where)
local i = p.emptyinjections
if i then
-- glyph|disc|glyph (special case)
+-- is this okay?
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
if next and getid(next) == disc_code then
- local replace = getfield(next,"replace")
if replace then
-- error, we expect an empty one
else
@@ -754,7 +779,7 @@ local function inject_pairs_only(head,where)
end
end
end
- if prevdisc and p then
+ if prevdisc then
local done = false
if post then
-- local i = rawget(p,"postinjections")
@@ -762,7 +787,6 @@ local function inject_pairs_only(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local posttail = find_tail(post)
insert_node_after(post,posttail,newkern(leftkern))
done = true
end
@@ -774,11 +798,19 @@ local function inject_pairs_only(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local replacetail = find_tail(replace)
insert_node_after(replace,replacetail,newkern(leftkern))
done = true
end
end
+ else
+ local i = p.emptyinjections
+ if i then
+-- new .. okay?
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ end
+ end
end
if done then
setdisc(prevdisc,pre,post,replace)
@@ -789,7 +821,7 @@ local function inject_pairs_only(head,where)
prevdisc = nil
prevglyph = current
elseif id == disc_code then
- pre, post, replace = getdisc(current)
+ pre, post, replace, pretail, posttail, replacetail = getdisc(current,true)
local done = false
if pre then
-- left|pre glyphs|right
@@ -957,20 +989,23 @@ local function inject_everything(head,where)
if trace_injections then
trace(head,"everything")
end
- local hascursives = nofregisteredcursives > 0
- local hasmarks = nofregisteredmarks > 0
+ local hascursives = nofregisteredcursives > 0
+ local hasmarks = nofregisteredmarks > 0
--
- local current = head
- local last = nil
- local font = font
- local markdata = nil
- local prev = nil
- local next = nil
- local prevdisc = nil
- local prevglyph = nil
- local pre = nil -- saves a lookup
- local post = nil -- saves a lookup
- local replace = nil -- saves a lookup
+ local current = head
+ local last = nil
+ local font = font
+ local markdata = nil
+ local prev = nil
+ local next = nil
+ local prevdisc = nil
+ local prevglyph = nil
+ local pre = nil -- saves a lookup
+ local post = nil -- saves a lookup
+ local replace = nil -- saves a lookup
+ local pretail = nil -- saves a lookup
+ local posttail = nil -- saves a lookup
+ local replacetail = nil -- saves a lookup
--
local cursiveanchor = nil
local minc = 0
@@ -1127,10 +1162,10 @@ local function inject_everything(head,where)
local i = p.emptyinjections
if i then
-- glyph|disc|glyph (special case)
+-- okay?
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
if next and getid(next) == disc_code then
- local replace = getfield(next,"replace")
if replace then
-- error, we expect an empty one
else
@@ -1149,7 +1184,6 @@ local function inject_everything(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local posttail = find_tail(post)
insert_node_after(post,posttail,newkern(leftkern))
done = true
end
@@ -1161,11 +1195,17 @@ local function inject_everything(head,where)
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- local replacetail = find_tail(replace)
insert_node_after(replace,replacetail,newkern(leftkern))
done = true
end
end
+ else
+ -- local i = rawget(p,"emptyinjections")
+ local i = p.emptyinjections
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ end
end
if done then
setdisc(prevdisc,pre,post,replace)
@@ -1189,7 +1229,7 @@ local function inject_everything(head,where)
prevdisc = nil
prevglyph = current
elseif id == disc_code then
- pre, post, replace = getdisc(current)
+ pre, post, replace, pretail, posttail, replacetail = getdisc(current,true)
local done = false
if pre then
-- left|pre glyphs|right
@@ -1368,7 +1408,118 @@ local function inject_everything(head,where)
return tonode(head), true
end
+-- space triggers
+
+local triggers = false
+
+function nodes.injections.setspacekerns(font,sequence)
+ if triggers then
+ triggers[font] = sequence
+ else
+ triggers = { [font] = sequence }
+ end
+end
+
+local function injectspaces(head)
+
+ if not triggers then
+ return head, false
+ end
+
+ local lastfont = nil
+ local spacekerns = nil
+ local leftkerns = nil
+ local rightkerns = nil
+ local factor = 0
+ local threshold = 0
+ local leftkern = false
+ local rightkern = false
+
+ local function updatefont(font,trig)
+ -- local resources = resources[font]
+ -- local spacekerns = resources.spacekerns
+ -- if spacekerns then
+ -- leftkerns = spacekerns.left
+ -- rightkerns = spacekerns.right
+ -- end
+ leftkerns = trig.left
+ rightkerns = trig.right
+ local par = parameters[font]
+ factor = par.factor
+ threshold = par.spacing.width - 1 -- get rid of rounding errors
+ lastfont = font
+ end
+
+ for n in traverse_id(glue_code,tonut(head)) do
+ local prev, next = getboth(n)
+ local prevchar = ischar(prev)
+ local nextchar = ischar(next)
+ if nextchar then
+ local font = getfont(next)
+ local trig = triggers[font]
+ if trig then
+ if lastfont ~= font then
+ updatefont(font,trig)
+ end
+ if rightkerns then
+ rightkern = rightkerns[nextchar]
+ end
+ end
+ end
+ if prevchar then
+ local font = getfont(next)
+ local trig = triggers[font]
+ if trig then
+ if lastfont ~= font then
+ updatefont(font,trig)
+ end
+ if leftkerns then
+ leftkern = leftkerns[prevchar]
+ end
+ end
+ end
+ if leftkern then
+ local old = getfield(n,"width")
+ if old >= threshold then
+ if rightkern then
+ local new = old + (leftkern + rightkern) * factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p] %C",prevchar,old,new,nextchar)
+ end
+ setfield(n,"width",new)
+ leftkern = false
+ else
+ local new = old + leftkern * factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p]",prevchar,old,new)
+ end
+ setfield(n,"width",new)
+ end
+ end
+ leftkern = false
+ elseif rightkern then
+ local old = getfield(n,"width")
+ if old >= threshold then
+ local new = old + rightkern * factor
+ if trace_spaces then
+ report_spaces("[%p -> %p] %C",nextchar,old,new)
+ end
+ setfield(n,"width",new)
+ end
+ rightkern = false
+ end
+ end
+
+ triggers = false
+ return head, true
+end
+
+--
+
function injections.handler(head,where)
+ if triggers then
+ head = injectspaces(head)
+ end
if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
return inject_everything(head,where)
elseif nofregisteredpairs > 0 then
diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua
index 99aae33b5..0deb4bce0 100644
--- a/tex/context/base/mkiv/font-otl.lua
+++ b/tex/context/base/mkiv/font-otl.lua
@@ -53,7 +53,7 @@ local report_otf = logs.reporter("fonts","otf loading")
local fonts = fonts
local otf = fonts.handlers.otf
-otf.version = 3.015 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.version = 3.016 -- beware: also sync font-mis.lua and in mtx-fonts
otf.cache = containers.define("fonts", "otl", otf.version, true)
local otfreaders = otf.readers
@@ -267,6 +267,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone
otfreaders.compact(data)
otfreaders.rehash(data,"unicodes")
otfreaders.addunicodetable(data)
+ otfreaders.extend(data)
otfreaders.pack(data)
report_otf("loading done")
report_otf("saving %a in cache",filename)
diff --git a/tex/context/base/mkiv/font-otr.lua b/tex/context/base/mkiv/font-otr.lua
index 5839cbfb5..1fc338b38 100644
--- a/tex/context/base/mkiv/font-otr.lua
+++ b/tex/context/base/mkiv/font-otr.lua
@@ -2122,6 +2122,25 @@ function readers.compact(fontdata)
report("the %a helper is not yet implemented","compact")
end
+-- plug in
+
+local extenders = { }
+
+function readers.registerextender(extender)
+ extenders[#extenders+1] = extender
+end
+
+function readers.extend(fontdata)
+ for i=1,#extenders do
+ local extender = extenders[i]
+ local name = extender.name or "unknown"
+ local action = extender.action
+ if action then
+ action(fontdata)
+ end
+ end
+end
+
--
if fonts.hashes then
diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua
index e2cccebe8..15de12867 100644
--- a/tex/context/base/mkiv/font-ots.lua
+++ b/tex/context/base/mkiv/font-ots.lua
@@ -6,24 +6,29 @@ if not modules then modules = { } end modules ['font-ots'] = { -- sequences
license = "see context related readme files",
}
+-- to be checked: discrun doesn't seem to do something useful now (except run the
+-- check again) so if we need it again we'll do a zwnjrun or so
+
+-- beware, on my development machine we test a slightly a more optimized version
+
-- assumptions:
--
-- cursives don't cross discretionaries
-- marks precede bases
-
+--
-- pitfalls:
--
-- when we append to a dics field we need to set the field in order to update tail
-
+--
-- This is a version of font-otn.lua adapted to the new font loader code. It
-- is a context version which can contain experimental code, but when we
-- have serious patches we will backport to the font-otn files. There will
-- be a generic variant too.
-
+--
-- todo: looks like we have a leak somewhere (probably in ligatures)
-- todo: copy attributes to disc
-- todo: get rid of components, better use the tounicode entry if needed (at all)
-
+--
-- we do some disc juggling where we need to keep in mind that the
-- pre, post and replace fields can have prev pointers to a nesting
-- node ... i wonder if that is still needed
@@ -94,6 +99,7 @@ results in different tables.</p>
local type, next, tonumber = type, next, tonumber
local random = math.random
local formatters = string.formatters
+local insert = table.insert
local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
@@ -123,6 +129,7 @@ local trace_directions = false registertracker("otf.directions", function(v
local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
+local trace_testruns = false registertracker("otf.testruns", function(v) trace_testruns = v end)
local quit_on_no_replacement = true -- maybe per font
local zwnjruns = true
@@ -154,25 +161,24 @@ local tonut = nuts.tonut
local getfield = nuts.getfield
local setfield = nuts.setfield
local getnext = nuts.getnext
+local setnext = nuts.setnext
local getprev = nuts.getprev
+local setprev = nuts.setprev
local getboth = nuts.getboth
+local setboth = nuts.setboth
local getid = nuts.getid
local getattr = nuts.getattr
+local setattr = nuts.setattr
local getprop = nuts.getprop
+local setprop = nuts.setprop
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
+local setsubtype = nuts.setsubtype
local getchar = nuts.getchar
+local setchar = nuts.setchar
local getdisc = nuts.getdisc
-
-local setattr = nuts.setattr
-local setprop = nuts.setprop
local setdisc = nuts.setdisc
-local setnext = nuts.setnext
-local setprev = nuts.setprev
local setlink = nuts.setlink
-local setboth = nuts.setboth
-local setchar = nuts.setchar
-local setsubtype = nuts.setsubtype
local ischar = nuts.is_char
@@ -219,7 +225,6 @@ local privateattribute = attributes.private
-- of only some.
local a_state = privateattribute('state')
-local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
local injections = nodes.injections
local setmark = injections.setmark
@@ -243,29 +248,38 @@ local onetimemessage = fonts.loggers.onetimemessage or function() end
otf.defaultnodealternate = "none" -- first last
-local handlers = { }
-
-- We use a few global variables. The handler can be called nested but this assumes that the
-- same font is used. Nested calls are normally not needed (only for devanagari).
-local tfmdata = false
-local characters = false
-local descriptions = false
-local marks = false
-local currentfont = false
-local factor = 0
+local tfmdata = false
+local characters = false
+local descriptions = false
+local marks = false
+local currentfont = false
+local factor = 0
+local threshold = 0
+
+local sweepnode = nil
+local sweepprev = nil
+local sweepnext = nil
+local sweephead = { }
-local sweepnode = nil
-local sweepprev = nil
-local sweepnext = nil
-local sweephead = { }
+local notmatchpre = { }
+local notmatchpost = { }
+local notmatchreplace = { }
-local notmatchpre = { }
-local notmatchpost = { }
-local notmatchreplace = { }
+local handlers = { }
--- handlers .whatever(head,start, dataset,sequence,kerns, step,i,injection)
--- chainprocs.whatever(head,start,stop,dataset,sequence,currentlookup,chainindex)
+-- helper
+
+local function isspace(n)
+ if getid(n) == glue_code then
+ local w = getfield(n,"width")
+ if w >= threshold then
+ return 32
+ end
+ end
+end
-- we use this for special testing and documentation
@@ -361,99 +375,6 @@ local function copy_glyph(g) -- next and prev are untouched !
end
end
--- temp here (context) - watch out: we need to set post/pre/replace in order to update its tail
-
--- local function collapsedisc(start,next)
--- local replace1 = getfield(start,"replace")
--- local replace2 = getfield(next,"replace")
--- if replace1 and replace2 then
--- local pre2 = getfield(next,"pre")
--- local post2 = getfield(next,"post")
--- setprev(replace1,nil)
--- if pre2 then
--- local pre1 = getfield(start,"pre")
--- if pre1 then
--- flush_node_list(pre1)
--- end
--- local pre1 = copy_node_list(replace1)
--- local tail1 = find_node_tail(pre1)
--- setnext(tail1,pre2)
--- setprev(pre2,tail1)
--- setfield(start,"pre",pre1)
--- setfield(next,"pre",nil)
--- else
--- setfield(start,"pre",nil)
--- end
--- if post2 then
--- local post1 = getfield(start,"post")
--- if post1 then
--- flush_node_list(post1)
--- end
--- setfield(start,"post",post2)
--- else
--- setfield(start,"post",nil)
--- end
--- local tail1 = find_node_tail(replace1)
--- setnext(tail1,replace2)
--- setprev(replace2,tail1)
--- setfield(start,"replace",replace1)
--- setfield(next,"replace",nil)
--- --
--- local nextnext = getnext(next)
--- setprev(nextnext,start)
--- setnext(start,nextnext)
--- free_node(next)
--- else
--- -- maybe remove it
--- end
--- end
-
--- local function prependdisc(first,last,prev)
--- local prev = prev or getprev(first)
--- local pre = getfield(last,"pre")
--- local replace = getfield(last,"replace")
--- local rs = getfield(first,"replace")
--- local ps = copy_node_list(rs)
--- local rt = ps and find_node_tail(rs)
--- local pt = rs and find_node_tail(ps)
--- if pre then
--- setprev(pre,pt)
--- setnext(pt,pre)
--- end
--- if replace then
--- setprev(replace,rt)
--- setnext(rt,replace)
--- end
--- setfield(last,"pre",ps)
--- setfield(last,"replace",rs)
--- setfield(first,"replace",nil)
--- free_node(first)
--- setprev(last,prev)
--- setnext(prev,last)
--- return prev -- if nil then last is head
--- end
-
--- local function prependglyph(first,last,prev)
--- local prev = prev or getprev(first)
--- local pre = getfield(last,"pre")
--- local replace = getfield(last,"replace")
--- local rs = first
--- local ps = copy_node(first)
--- if pre then
--- setprev(pre,ps)
--- setnext(ps,pre)
--- end
--- if replace then
--- setprev(replace,rs)
--- setnext(rs,replace)
--- end
--- setfield(last,"pre",ps)
--- setfield(last,"replace",rs)
--- setprev(last,prev)
--- setnext(prev,last)
--- return prev -- if nil then last is head
--- end
-
local function flattendisk(head,disc)
local _, _, replace, _, _, replacetail = getdisc(disc,true)
setfield(disc,"replace",nil)
@@ -566,8 +487,8 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
-- needs testing (side effects):
local components = getfield(start,"components")
if components then
--- we get a double free .. needs checking
--- flush_node_list(components)
+ -- we get a double free .. needs checking
+ -- flush_node_list(components)
end
--
local prev = getprev(start)
@@ -582,7 +503,7 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
resetinjection(base)
setchar(base,char)
setsubtype(base,ligature_code)
- setfield(base,"components",comp) -- start can have components .. do we need to flush?
+ setfield(base,"components",comp) -- start can have components ... do we need to flush?
if prev then
setnext(prev,base)
end
@@ -619,17 +540,21 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
-- we can have one accent as part of a lookup and another following
-- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
local start = getnext(current)
- while start and getid(start) == glyph_code do
- local char = getchar(start)
- if marks[char] then
- setligaindex(start,baseindex + getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
+ while start do
+ local char = ischar(start)
+ if char then
+ if marks[char] then
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
+ end
+ start = getnext(start)
+ else
+ break
end
else
break
end
- start = getnext(start)
end
else
-- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
@@ -775,9 +700,9 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
local startchar = getchar(start)
if marks[startchar] then
while current do
- local ch = ischar(current,currentfont)
- if ch then
- local lg = ligature[ch]
+ local char = ischar(current,currentfont)
+ if char then
+ local lg = ligature[char]
if lg then
stop = current
ligature = lg
@@ -809,30 +734,27 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
local discfound = false
local lastdisc = nil
while current do
- local id = getid(current)
- -- weird test here
- if id == glyph_code then -- not needed
- local char = ischar(current,currentfont)
- if char then
- if skipmark and marks[char] then
- current = getnext(current)
- else -- ligature is a tree
- local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
- if lg then
- if not discfound and lastdisc then
- discfound = lastdisc
- lastdisc = nil
- end
- stop = current -- needed for fake so outside then
- ligature = lg
- current = getnext(current)
- else
- break
+ local char, id = ischar(current,currentfont)
+ if char then
+ if skipmark and marks[char] then
+ current = getnext(current)
+ else -- ligature is a tree
+ local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
+ if lg then
+ if not discfound and lastdisc then
+ discfound = lastdisc
+ lastdisc = nil
end
+ stop = current -- needed for fake so outside then
+ ligature = lg
+ current = getnext(current)
+ else
+ break
end
- else
- break
end
+ elseif char == false then
+ -- kind of weird
+ break
elseif id == disc_code then
lastdisc = current
current = getnext(current)
@@ -866,8 +788,6 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
return head, start, false, discfound
end
--- todo: have this one directly (all are pair now)
-
function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
local startchar = getchar(start)
if step.format == "pair" then
@@ -938,6 +858,8 @@ function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,step,i,inje
end
done = true
break
+ else -- can't happen
+ break
end
else
break
@@ -952,8 +874,6 @@ end
we need to explicitly test for basechar, baselig and basemark entries.</p>
--ldx]]--
--- can we share with chains if we have a stop == nil ?
-
function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
local markchar = getchar(start)
if marks[markchar] then
@@ -962,7 +882,7 @@ function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
local basechar = ischar(base,currentfont)
if basechar then
if marks[basechar] then
- while true do
+ while base do
base = getprev(base)
if base then
basechar = ischar(base,currentfont)
@@ -1006,8 +926,6 @@ function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
return head, start, false
end
--- ONCE CHECK HERE?
-
function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlmode)
local markchar = getchar(start)
if marks[markchar] then
@@ -1016,7 +934,7 @@ function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlm
local basechar = ischar(base,currentfont)
if basechar then
if marks[basechar] then
- while true do
+ while base do
base = getprev(base)
if base then
basechar = ischar(base,currentfont)
@@ -1110,49 +1028,41 @@ function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
end
function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,step,i) -- to be checked
- local alreadydone = cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local done = false
- local startchar = getchar(start)
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt do
- local nextchar = ischar(nxt,currentfont)
- if not nextchar then
- break
- elseif marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local exit = exitanchors[3]
- if exit then
- local entry = exitanchors[1][nextchar]
+ local done = false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt do
+ local nextchar = ischar(nxt,currentfont)
+ if not nextchar then
+ break
+ elseif marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local exit = exitanchors[3]
+ if exit then
+ local entry = exitanchors[1][nextchar]
+ if entry then
+ entry = entry[2]
if entry then
- entry = entry[2]
- if entry then
- local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
- end
- done = true
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
end
+ done = true
end
end
- break
end
+ break
end
end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
- end
- return head, start, false
end
+ return head, start, done
end
--[[ldx--
@@ -1221,36 +1131,6 @@ single lookup case. The efficiency of the replacements can be improved by deleti
as less as needed but that would also make the code even more messy.</p>
--ldx]]--
--- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
--- local n = 1
--- if start == stop then
--- -- done
--- elseif ignoremarks then
--- repeat -- start x x m x x stop => start m
--- local next = getnext(start)
--- if not marks[getchar(next)] then
--- local components = getfield(next,"components")
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- end
--- n = n + 1
--- until next == stop
--- else -- start x x x stop => start
--- repeat
--- local next = getnext(start)
--- local components = getfield(next,"components")
--- if components then -- probably not needed
--- flush_node_list(components)
--- end
--- head = delete_node(head,next)
--- n = n + 1
--- until next == stop
--- end
--- return head, n
--- end
-
--[[ldx--
<p>Here we replace start by a single variant.</p>
--ldx]]--
@@ -1271,8 +1151,8 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,c
end
local current = start
while current do
- if getid(current) == glyph_code then
- local currentchar = getchar(current)
+ local currentchar = ischar(current)
+ if currentchar then
local replacement = steps[1].coverage[currentchar]
if not replacement or replacement == "" then
if trace_bugs then
@@ -1286,6 +1166,9 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,c
setchar(current,replacement)
end
return head, start, true
+ elseif currentchar == false then
+ -- can't happen
+ break
elseif current == stop then
break
else
@@ -1343,8 +1226,8 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
local value = what == true and tfmdata.shared.features[kind] or what
local current = start
while current do
- if getid(current) == glyph_code then -- is this check needed?
- local currentchar = getchar(current)
+ local currentchar = ischar(current)
+ if currentchar then
local alternatives = steps[1].coverage[currentchar]
if alternatives then
local choice, comment = get_alternative_glyph(current,alternatives,value)
@@ -1361,6 +1244,9 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
end
end
return head, start, true
+ elseif currentchar == false then
+ -- can't happen
+ break
elseif current == stop then
break
else
@@ -1567,7 +1453,7 @@ function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlooku
local basechar = ischar(base,currentfont)
if basechar then
if marks[basechar] then
- while true do
+ while base do
base = getprev(base)
if base then
local basechar = ischar(base,currentfont)
@@ -1631,7 +1517,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentl
local basechar = ischar(base,currentfont)
if basechar then
if marks[basechar] then
- while true do
+ while base do
base = getprev(base)
if base then
local basechar = ischar(base,currentfont)
@@ -1742,56 +1628,52 @@ function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,
if nofsteps > 1 then
reportmoresteps(dataset,sequence)
end
- local alreadydone = cursonce and getprop(start,a_cursbase) -- also mkmk?
- if not alreadydone then
- local startchar = getchar(start)
- local exitanchors = steps[1].coverage[startchar] -- always 1 step
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt do
- local nextchar = ischar(nxt,currentfont)
- if not nextchar then
- break
- elseif marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local exit = exitanchors[3]
- if exit then
- local entry = exitanchors[1][nextchar]
+ local startchar = getchar(start)
+ local exitanchors = steps[1].coverage[startchar] -- always 1 step
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt do
+ local nextchar = ischar(nxt,currentfont)
+ if not nextchar then
+ break
+ elseif marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local exit = exitanchors[3]
+ if exit then
+ local entry = exitanchors[1][nextchar]
+ if entry then
+ entry = entry[2]
if entry then
- entry = entry[2]
- if entry then
- local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
- end
- done = true
- break
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
end
+ done = true
+ break
end
- elseif trace_bugs then
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
- break
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
+ break
end
end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
- end
- return head, start, false
end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
end
- return head, start, false
end
-- what pointer to return, spec says stop
@@ -2178,6 +2060,7 @@ local noflags = { false, false, false, false }
local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
local sweepnode = sweepnode
local sweeptype = sweeptype
+ local currentfont = currentfont
local diskseen = false
local checkdisc = getprev(head)
local flags = sequence.flags or noflags
@@ -2198,7 +2081,9 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
if s == 1 then
-- never happens
local char = ischar(current,currentfont)
- match = char and seq[1][char]
+ if char then
+ match = seq[1][char]
+ end
else
-- maybe we need a better space check (maybe check for glue or category or combination)
-- we cannot optimize for n=2 because there can be disc nodes
@@ -2222,33 +2107,22 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
sweeptype = nil
end
if last then
- local id = getid(last)
- if id == glyph_code then
- local char = ischar(last,currentfont)
- if char then
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class or "base"
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
+ local char, id = ischar(last,currentfont)
+ if char then
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class or "base"
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
+ end
+ last = getnext(last)
+ elseif seq[n][char] then
+ if n < l then
last = getnext(last)
- elseif seq[n][char] then
- if n < l then
- last = getnext(last)
- end
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
end
+ n = n + 1
else
if discfound then
notmatchreplace[discfound] = true
@@ -2267,6 +2141,15 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
end
break
end
+ last = getnext(last)
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
+ break
elseif id == disc_code then
diskseen = true
discfound = last
@@ -2336,29 +2219,18 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
local n = f - 1
while n >= 1 do
if prev then
- local id = getid(prev)
- if id == glyph_code then
- local char = ischar(prev,currentfont)
- if char then
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
+ local char, id = ischar(prev,currentfont)
+ if char then
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
end
+ elseif seq[n][char] then
+ n = n -1
else
if discfound then
notmatchreplace[discfound] = true
@@ -2377,6 +2249,15 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
end
break
end
+ prev = getprev(prev)
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpost[discfound]
+ else
+ match = false
+ end
+ break
elseif id == disc_code then
-- the special case: f i where i becomes dottless i ..
diskseen = true
@@ -2439,7 +2320,7 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
-- skip 'm
end
elseif seq[n][32] then
- n = n -1
+ n = n - 1
else
match = false
break
@@ -2474,29 +2355,18 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
local n = l + 1
while n <= s do
if current then
- local id = getid(current)
- if id == glyph_code then
- local char = ischar(current,currentfont)
- if char then
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
+ local char, id = ischar(current,currentfont)
+ if char then
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
end
+ elseif seq[n][char] then
+ n = n + 1
else
if discfound then
notmatchreplace[discfound] = true
@@ -2515,6 +2385,15 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
end
break
end
+ current = getnext(current)
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ match = not notmatchpre[discfound]
+ else
+ match = false
+ end
+ break
elseif id == disc_code then
diskseen = true
discfound = current
@@ -2572,6 +2451,7 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
current = getnext(current)
elseif seq[n][32] then
n = n + 1
+current = getnext(current)
else
match = false
break
@@ -2619,7 +2499,7 @@ local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
local i = 1
while start and true do
if skipped then
- while true do -- todo: use properties
+ while start do -- todo: use properties
local char = getchar(start)
local ccd = descriptions[char]
if ccd then
@@ -2739,11 +2619,11 @@ end
local logwarning = report_process
-local function report_missing_cache(dataset,sequence)
+local function report_missing_coverage(dataset,sequence)
local t = missing[currentfont]
if not t[sequence] then
t[sequence] = true
- logwarning("missing cache for feature %a, lookup %a, type %a, font %a, name %a",
+ logwarning("missing coverage for feature %a, lookup %a, type %a, font %a, name %a",
dataset[4],sequence.name,sequence.type,currentfont,tfmdata.properties.fullname)
end
end
@@ -2836,20 +2716,23 @@ function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
return rl
end
--- assumptions:
---
--- * languages that use complex disc nodes
+local function report_disc(n)
+ report_run("kern: %s > %s",disc,languages.serializediscretionary(disc))
+end
-local function kernrun(disc,run)
+local function kernrun(disc,k_run,font,attr,...)
--
-- we catch <font 1><disc font 2>
--
if trace_kernruns then
- report_run("kern") -- will be more detailed
+ report_disc("kern")
end
--
local prev, next = getboth(disc)
--
+ local nextstart = next
+ local done = false
+ --
local pre, post, replace, pretail, posttail, replacetail = getdisc(disc,true)
--
local prevmarks = prev
@@ -2858,7 +2741,7 @@ local function kernrun(disc,run)
-- has happened but then it should be in the disc so basically this test indicates an error)
--
while prevmarks do
- local char = ischar(prevmarks,currentfont)
+ local char = ischar(prevmarks,font)
if char and marks[char] then
prevmarks = getprev(prevmarks)
else
@@ -2866,68 +2749,77 @@ local function kernrun(disc,run)
end
end
--
- if prev and (pre or replace) and not ischar(prev,currentfont) then
+ if prev and (pre or replace) and not ischar(prev,font) then
prev = false
end
- if next and (post or replace) and not ischar(next,currentfont) then
+ if next and (post or replace) and not ischar(next,font) then
next = false
end
--
if pre then
- run(pre,"injections")
+ if k_run(pre,"injections",nil,font,attr,...) then
+ done = true
+ end
if prev then
local nest = getprev(pre)
setlink(prev,pre)
- run(prevmarks,"preinjections",pre) -- getnext(pre))
+ if k_run(prevmarks,"preinjections",pre,font,attr,...) then -- getnext(pre))
+ done = true
+ end
setprev(pre,nest)
setnext(prev,disc)
end
end
--
if post then
- run(post,"injections")
+ if k_run(post,"injections",nil,font,attr,...) then
+ done = true
+ end
if next then
setlink(posttail,next)
- run(posttail,"postinjections",next)
+ if k_run(posttail,"postinjections",next,font,attr,...) then
+ done = true
+ end
setnext(posttail,nil)
setprev(next,disc)
end
end
--
if replace then
- run(replace,"injections")
+ if k_run(replace,"injections",nil,font,attr,...) then
+ done = true
+ end
if prev then
local nest = getprev(replace)
setlink(prev,replace)
- run(prevmarks,"replaceinjections",replace) -- getnext(replace))
+ if k_run(prevmarks,"replaceinjections",replace,font,attr,...) then -- getnext(replace))
+ done = true
+ end
setprev(replace,nest)
setnext(prev,disc)
end
if next then
setlink(replacetail,next)
- run(replacetail,"replaceinjections",next)
+ if k_run(replacetail,"replaceinjections",next,font,attr,...) then
+ done = true
+ end
setnext(replacetail,nil)
setprev(next,disc)
end
elseif prev and next then
setlink(prev,next)
- run(prevmarks,"emptyinjections",next)
+ if k_run(prevmarks,"emptyinjections",next,font,attr,...) then
+ done = true
+ end
setlink(prev,disc)
setlink(disc,next)
end
+ return nextstart, done
end
--- the if new test might be dangerous as luatex will check / set some tail stuff
--- in a temp node
-
-local function checkdisc(str,d) -- only used when debugging
- local pre, post, replace = getdisc(d)
- report_check("%s : [%s][%s][%s]",str,nodes.toutf(pre),nodes.toutf(post),nodes.toutf(replace))
-end
-
-local function comprun(disc,run)
+local function comprun(disc,c_run,...)
if trace_compruns then
- report_run("comp: %s",languages.serializediscretionary(disc))
+ report_disc("comp")
end
--
local pre, post, replace = getdisc(disc)
@@ -2935,8 +2827,8 @@ local function comprun(disc,run)
--
if pre then
sweepnode = disc
- sweeptype = "pre" -- in alternative code preinjections is used (also used then for proeprties, saves a variable)
- local new, done = run(pre)
+ sweeptype = "pre" -- in alternative code preinjections is uc_c_sed (also used then for proeprties, saves a variable)
+ local new, done = c_run(pre,...)
if done then
pre = new
renewed = true
@@ -2946,7 +2838,7 @@ local function comprun(disc,run)
if post then
sweepnode = disc
sweeptype = "post"
- local new, done = run(post)
+ local new, done = c_run(post,...)
if done then
post = new
renewed = true
@@ -2956,106 +2848,446 @@ local function comprun(disc,run)
if replace then
sweepnode = disc
sweeptype = "replace"
- local new, done = run(replace)
+ local new, done = c_run(replace,...)
if done then
replace = new
renewed = true
end
end
+ --
sweepnode = nil
sweeptype = nil
if renewed then
setdisc(disc,pre,post,replace)
end
+ --
+ return getnext(disc), done
end
-local function testrun(disc,trun,crun) -- use helper
- local next = getnext(disc)
- if next then
- local _, _, replace, _, _, tail = getdisc(disc,true)
- if replace then
- local prev = getprev(disc)
- if prev then
- -- only look ahead
- -- local nest = getprev(replace)
- setlink(tail,next)
- if trun(replace,next) then
- setfield(disc,"replace",nil) -- beware, side effects of nest so first
- setlink(prev,replace)
- setlink(tail,next)
- setboth(disc,nil,nil)
- flush_node_list(disc)
- return replace -- restart
- else
- setnext(tail,nil)
- setprev(next,disc)
+local function testrun(disc,t_run,c_run,...)
+ if trace_testruns then
+ report_disc("test")
+ end
+ local prev, next = getboth(disc)
+ if not next then
+ -- weird discretionary
+ return
+ end
+ local pre, post, replace, pretail, posttail, replacetail = getdisc(disc)
+ local done = false
+ if replace and prev then
+ -- only look ahead
+ -- local nest = getprev(replace)
+ setlink(replacetail,next)
+ if t_run(replace,next,...) then
+ setfield(disc,"replace",nil) -- beware, side effects of nest so first
+ setlink(prev,replace)
+ setlink(replacetail,next)
+ setboth(disc)
+ flush_node_list(disc)
+ return replace, true -- restart .. tricky !
+ else
+ setnext(replacetail)
+ setprev(next,disc)
+ end
+ -- pre, post, replace, pretail, posttail, replacetail = getdisc(disc)
+ end
+ --
+ -- like comprun
+ --
+ local renewed = false
+ --
+ if pre then
+ sweepnode = disc
+ sweeptype = "pre"
+ local new, ok = c_run(pre,...)
+ if ok then
+ pre = new
+ renewed = true
+ end
+ end
+ --
+ if post then
+ sweepnode = disc
+ sweeptype = "post"
+ local new, ok = c_run(post,...)
+ if ok then
+ post = new
+ renewed = true
+ end
+ end
+ --
+ if replace then
+ sweepnode = disc
+ sweeptype = "replace"
+ local new, ok = c_run(replace,...)
+ if ok then
+ replace = new
+ renewed = true
+ end
+ end
+ --
+ sweepnode = nil
+ sweeptype = nil
+ if renewed then
+ setdisc(disc,pre,post,replace)
+ return next, true
+ else
+ return next, done
+ end
+end
+
+-- A discrun happens when we have a zwnj. We're gpossing so it is unlikely that
+-- there has been a match changing the character. Now, as we check again here
+-- the question is: why do we do this ... needs checking as drun seems useless
+-- ... maybe that code can go away
+
+-- local function discrun(disc,drun,krun)
+-- local prev, next = getboth(disc)
+-- if trace_discruns then
+-- report_disc("disc")
+-- end
+-- if next and prev then
+-- setnext(prev,next)
+-- -- setprev(next,prev)
+-- drun(prev)
+-- setnext(prev,disc)
+-- -- setprev(next,disc)
+-- end
+-- --
+-- if krun then -- currently always false
+-- local pre = getfield(disc,"pre")
+-- if not pre then
+-- -- go on
+-- elseif prev then
+-- local nest = getprev(pre)
+-- setlink(prev,pre)
+-- krun(prev,"preinjections")
+-- setprev(pre,nest)
+-- setnext(prev,disc)
+-- else
+-- krun(pre,"preinjections")
+-- end
+-- end
+-- return next
+-- end
+
+-- We can make some assumptions with respect to discretionaries. First of all it is very
+-- unlikely that some of the analysis related attributes applies. Then we can also assume
+-- that the ConTeXt specific dynamic attribute is different, although we do use explicit
+-- discretionaries (maybe we need to tag those some day). So, at least for now, we don't
+-- have the following test in the sub runs:
+--
+-- -- local a = getattr(start,0)
+-- -- if a then
+-- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+-- -- else
+-- -- a = not attribute or getprop(start,a_state) == attribute
+-- -- end
+-- -- if a then
+--
+-- but use this instead:
+--
+-- -- local a = getattr(start,0)
+-- -- if not a or (a == attr) then
+--
+-- and even that one is probably not needed.
+
+local nesting = 0
+
+local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ local done = false
+ local start = sweephead[head]
+ if start then
+ sweephead[head] = nil
+ else
+ start = head
+ end
+ while start do
+ local char = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if not a or (a == attr) then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then
+ start = getnext(start)
end
else
- -- weird case
+ start = getnext(start)
end
+ elseif char == false then
+ return head, done
else
- -- no need
+ -- weird
+ start = getnext(start)
end
- else
- -- weird case
end
- comprun(disc,crun)
- return next
+ return head, done
end
-local function discrun(disc,drun,krun)
- local prev, next = getboth(disc)
- if trace_discruns then
- report_run("disc") -- will be more detailed
+local function t_run_single(start,stop,font,attr,lookupcache)
+ while start ~= stop do
+ local char = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if not a or (a == attr) then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
end
- if next and prev then
- setnext(prev,next)
- -- setprev(next,prev)
- drun(prev)
- setnext(prev,disc)
- -- setprev(next,disc)
+end
+
+-- local function d_run_single(prev,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+-- local a = getattr(prev,0)
+-- if not a or (a == attr) then
+-- local char = ischar(prev) -- can be disc
+-- if char then
+-- local lookupmatch = lookupcache[char]
+-- if lookupmatch then
+-- local h, d, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+-- if ok then
+-- done = true
+-- success = true
+-- end
+-- end
+-- end
+-- end
+-- end
+
+local function k_run_single(sub,injection,last,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ local a = getattr(sub,0)
+ if not a or (a == attr) then
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local char = ischar(n)
+ if char then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,step,1,injection)
+ if ok then
+ return true
+ end
+ end
+ end
+ end
end
- --
- local pre = getfield(disc,"pre")
- if not pre then
- -- go on
- elseif prev then
- local nest = getprev(pre)
- setlink(prev,pre)
- krun(prev,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
+end
+
+local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ local done = false
+ local start = sweephead[head]
+ if start then
+ sweephead[head] = nil
else
- krun(pre,"preinjections")
+ start = head
+ end
+ while start do
+ local char = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if not a or (a == attr) then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ if start then
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif char == false then
+ -- whatever glyph
+ return head, done
+ else
+ -- very unlikely
+ start = getnext(start)
+ end
+ end
+ return head, done
+end
+
+local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
+ while start ~= stop do
+ local char = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if not a or (a == attr) then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = getnext(start)
+ local l = nil
+ while s do
+ local lg = lookupmatch[getchar(s)]
+ if lg then
+ l = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ end
+ start = getnext(start)
+ else
+ break
+ end
end
- return next
end
--- todo: maybe run lr and rl stretches
+-- local function d_run_multiple(prev,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+-- local a = getattr(prev,0)
+-- if not a or (a == attr) then
+-- local char = ischar(prev) -- can be disc
+-- if char then
+-- for i=1,nofsteps do
+-- local step = steps[i]
+-- local lookupcache = step.coverage
+-- if lookupcache then
+-- local lookupmatch = lookupcache[char]
+-- if lookupmatch then
+-- -- we could move all code inline but that makes things even more unreadable
+-- local h, d, ok = handler(head,prev,dataset,sequence,lookupmatch,rlmode,step,i)
+-- if ok then
+-- done = true
+-- break
+-- end
+-- end
+-- else
+-- report_missing_coverage(dataset,sequence)
+-- end
+-- end
+-- end
+-- end
+-- end
+
+local function k_run_multiple(sub,injection,last,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ local a = getattr(sub,0)
+ if not a or (a == attr) then
+ for n in traverse_nodes(sub) do -- only gpos
+ if n == last then
+ break
+ end
+ local char = ischar(n)
+ if char then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(head,n,dataset,sequence,lookupmatch,step,rlmode,i,injection)
+ if ok then
+ return true
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ end
+ end
+ end
+end
+
+-- to be checkedL nowadays we probably can assume properly matched directions
+-- so maybe we no longer need a stack
local function txtdirstate(start,stack,top,rlparmode)
local dir = getfield(start,"dir")
+ local new = 1
if dir == "+TRT" then
top = top + 1
stack[top] = dir
- return top, -1
+ new = -1
elseif dir == "+TLT" then
top = top + 1
stack[top] = dir
- return top, 1
- end
- if dir == "-TRT" or dir == "-TLT" then
+ elseif dir == "-TRT" or dir == "-TLT" then
top = top - 1
- if dir == "+TRT" then
- return top, -1
- else
- return top, 1
+ if stack[top] == "+TRT" then
+ new = -1
end
+ else
+ new = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, level %a",dir,mref(rlparmode),mref(new),topstack)
end
- return top, rlparmode
+ return getnext(start), top, new
end
-local nesting = 0
+local function pardirstate(start)
+ local dir = getfield(start,"dir")
+ local new = 0
+ if dir == "TLT" then
+ new = 1
+ elseif dir == "TRT" then
+ new = -1
+ end
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a",dir,mref(new))
+ end
+ return getnext(start), new, new
+end
local function featuresprocessor(head,font,attr)
@@ -3075,6 +3307,7 @@ local function featuresprocessor(head,font,attr)
characters = tfmdata.characters
marks = tfmdata.resources.marks
factor = tfmdata.parameters.factor
+ threshold = tfmdata.parameters.spacing.width or 65536*10
elseif currentfont ~= font then
@@ -3095,7 +3328,7 @@ local function featuresprocessor(head,font,attr)
local done = false
local datasets = otf.dataset(tfmdata,font,attr)
- local dirstack = { } -- could move outside function
+ local dirstack = { } -- could move outside function btu we can have local runss
sweephead = { }
@@ -3110,8 +3343,6 @@ local function featuresprocessor(head,font,attr)
-- We don't goto the next node of a disc node is created so that we can then treat
-- the pre, post and replace. It's a bit of a hack but works out ok for most cases.
- -- there can be less subtype and attr checking in the comprun etc helpers
-
for s=1,#datasets do
local dataset = datasets[s]
----- featurevalue = dataset[1] -- todo: pass to function instead of using a global
@@ -3121,25 +3352,30 @@ local function featuresprocessor(head,font,attr)
local topstack = 0
local success = false
local typ = sequence.type
- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
local handler = handlers[typ]
local steps = sequence.steps
local nofsteps = sequence.nofsteps
- if typ == "gsub_reversecontextchain" then -- chain < 0
+ if not steps then
+ -- this permits injection, watch the different arguments
+ local h, d, ok = handler(head,start,dataset,sequence,nil,nil,nil,0,font,attr)
+ if ok then
+ success = true
+ if h then
+ head = h
+ end
+ if d then
+ start = d
+ end
+ end
+ elseif typ == "gsub_reversecontextchain" then
-- this is a limited case, no special treatments like 'init' etc
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ local start = find_node_tail(head)
while start do
- local id = getid(start)
local char = ischar(start,font)
if char then
local a = getattr(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
+ if not a or (a == attr) then
for i=1,nofsteps do
local step = steps[i]
local lookupcache = step.coverage
@@ -3147,16 +3383,20 @@ local function featuresprocessor(head,font,attr)
local lookupmatch = lookupcache[char]
if lookupmatch then
-- todo: disc?
- head, start, success = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if success then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ success = true
break
end
end
else
- report_missing_cache(dataset,sequence)
+ report_missing_coverage(dataset,sequence)
end
end
- if start then start = getprev(start) end
+ if start then
+ start = getprev(start)
+ end
else
start = getprev(start)
end
@@ -3168,230 +3408,63 @@ local function featuresprocessor(head,font,attr)
local start = head -- local ?
rlmode = 0 -- to be checked ?
if nofsteps == 1 then -- happens often
+
local step = steps[1]
local lookupcache = step.coverage
- if not lookupcache then -- also check for empty cache
- report_missing_cache(dataset,sequence)
+ if not lookupcache then
+ -- can't happen, no check in loop either
+ report_missing_coverage(dataset,sequence)
else
- local function c_run(head) -- no need to check for 256 and attr probably also the same
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely (if so we could use ischar)
- start = getnext(start)
- else
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
- if ok then
- done = true
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
- end
- end
- if done then
- success = true -- needed in this subrun?
- end
- return head, done
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- local lookupmatch = lookupcache[char]
- if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- end
- start = getnext(start)
+ while start do
+ local char, id = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- break
- end
- end
- end
-
- local function d_run(prev) -- we can assume that prev and next are glyphs
- local a = getattr(prev,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(prev,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- local lookupmatch = lookupcache[getchar(prev)]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
- if ok then
- done = true
- success = true
- end
+ a = not attribute or getprop(start,a_state) == attribute
end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(sub,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- -- sequence kan weg
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local lookupmatch = lookupcache[getchar(n)]
- if lookupmatch then
- local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,step,1,injection)
- if ok then
- done = true
- success = true
- end
+ if a then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ success = true
+ -- elseif gpossing and zwnjruns and char == zwnj then
+ -- discrun(start,d_run,font,attr,lookupcache)
end
- else
- -- message
+ -- elseif gpossing and zwnjruns and char == zwnj then
+ -- discrun(start,d_run,font,attr,lookupcache)
end
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
- if ok then
- success = true
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
+ if start then
+ start = getnext(start)
end
else
- start = getnext(start)
+ start = getnext(start)
end
+ elseif char == false then
+ -- whatever glyph
+ start = getnext(start)
elseif id == disc_code then
+ local ok
if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
+ start, ok = kernrun(start,k_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
+ start, ok = testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
else
- comprun(start,c_run)
- start = getnext(start)
+ start, ok = comprun(start,c_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ success = true
end
elseif id == math_code then
start = getnext(end_of_math(start))
elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,mref(rlparmode),mref(rlmode),topstack,mref(newdir))
- end
- start = getnext(start)
+ start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,mref(rlparmode),mref(rlmode))
- end
- start = getnext(start)
+ start, rlparmode, rlmode = pardirstate(start)
else
start = getnext(start)
end
@@ -3400,280 +3473,74 @@ local function featuresprocessor(head,font,attr)
else
- local function c_run(head)
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely
- start = getnext(start)
+ while start do
+ local char, id = ischar(start,font)
+ if char then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
else
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(dataset,sequence)
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
+ a = not attribute or getprop(start,a_state) == attribute
end
- end
- if done then
- success = true
- end
- return head, done
- end
-
- local function d_run(prev)
- local a = getattr(prev,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(prev,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- -- brr prev can be disc
- local char = getchar(prev)
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(dataset,sequence)
- end
- end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(sub,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local char = getchar(n)
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- local h, d, ok = handler(head,n,dataset,sequence,lookupmatch,step,rlmode,i,injection)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(dataset,sequence)
- end
- end
- else
- -- message
- end
- end
- end
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- -- if a then
- if not a or (a == attr) then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
+ if a then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ -- elseif gpossing and zwnjruns and char == zwnj then
+ -- discrun(start,d_run,font,attr,steps,nofsteps)
end
- else
- report_missing_cache(dataset,sequence)
+ -- elseif gpossing and zwnjruns and char == zwnj then
+ -- discrun(start,d_run,font,attr,steps,nofsteps)
end
+ else
+ report_missing_coverage(dataset,sequence)
end
end
- start = getnext(start)
- else
- break
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- local char = ischar(start,font)
- if char then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- -- local char = getchar(start)
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- else
- report_missing_cache(dataset,sequence)
- end
- end
- if start then start = getnext(start) end
- else
+ if start then
start = getnext(start)
end
else
start = getnext(start)
end
+ elseif char == false then
+ start = getnext(start)
elseif id == disc_code then
+ local ok
if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
+ start, ok = kernrun(start,k_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
+ start, ok = testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
else
- comprun(start,c_run)
- start = getnext(start)
+ start, ok = comprun(start,c_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ success = true
end
elseif id == math_code then
start = getnext(end_of_math(start))
elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,mref(rlparmode),mref(rlmode),topstack,mref(newdir))
- end
- start = getnext(start)
+ start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,mref(rlparmode),mref(rlmode))
- end
- start = getnext(start)
+ start, rlparmode, rlmode = pardirstate(start)
else
start = getnext(start)
end
end
end
end
+
if success then
done = true
end
@@ -3711,3 +3578,129 @@ registerotffeature {
-- This can be used for extra handlers, but should be used with care!
otf.handlers = handlers -- used in devanagari
+
+-- We implement one here:
+
+local setspacekerns = nodes.injections.setspacekerns if not setspacekerns then os.exit() end
+
+function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
+ -- if not setspacekerns then
+ -- setspacekerns = nodes.injections.setspacekerns
+ -- end
+ setspacekerns(font,sequence)
+ return head, start, true
+end
+
+local function hasspacekerns(data)
+ local sequences = data.resources.sequences
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ if steps and sequence.features.kern then
+ for i=1,#steps do
+ local coverage = steps[i].coverage
+ if not coverage then
+ -- maybe an issue
+ elseif coverage[32] then
+ return true
+ else
+ for k, v in next, coverage do
+ if v[32] then
+ return true
+ end
+ end
+ end
+ end
+ end
+ end
+ return false
+end
+
+otf.readers.registerextender {
+ name = "spacekerns",
+ action = function(data)
+ data.properties.hasspacekerns = hasspacekerns(data)
+ end
+}
+
+local function spaceinitializer(tfmdata,value) -- attr
+ local resources = tfmdata.resources
+ local spacekerns = resources and resources.spacekerns
+ if spacekerns == nil then
+ local properties = tfmdata.properties
+ if properties and properties.hasspacekerns then
+ local sequences = resources.sequences
+ local left = { }
+ local right = { }
+ local last = 0
+ local feat = nil
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ if steps then
+ local kern = sequence.features.kern
+ if kern then
+ feat = feat or kern -- or maybe merge
+ for i=1,#steps do
+ local step = steps[i]
+ local coverage = step.coverage
+ if coverage then
+ local kerns = coverage[32]
+ if kerns then
+ for k, v in next, kerns do
+ right[k] = v
+ end
+ end
+ for k, v in next, coverage do
+ local kern = v[32]
+ if kern then
+ left[k] = kern
+ end
+ end
+ end
+ end
+ last = i
+ end
+ else
+ -- no steps ... needed for old one ... we could use the basekerns
+ -- instead
+ end
+ end
+ left = next(left) and left or false
+ right = next(right) and right or false
+ if left or right then
+ spacekerns = {
+ left = left,
+ right = right,
+ }
+ if last > 0 then
+ local triggersequence = {
+ features = { kern = feat or { dflt = { dflt = true, } } },
+ flags = noflags,
+ name = "trigger_space_kerns",
+ order = { "kern" },
+ type = "trigger_space_kerns",
+ left = left,
+ right = right,
+ }
+ insert(sequences,last,triggersequence)
+ end
+ else
+ spacekerns = false
+ end
+ else
+ spacekerns = false
+ end
+ resources.spacekerns = spacekerns
+ end
+ return spacekerns
+end
+
+registerotffeature {
+ name = "spacekern",
+ description = "space kern injection",
+ default = true,
+ initializers = {
+ node = spaceinitializer,
+ },
+}
diff --git a/tex/context/base/mkiv/font-pre.mkiv b/tex/context/base/mkiv/font-pre.mkiv
index 17adc5290..584bd1779 100644
--- a/tex/context/base/mkiv/font-pre.mkiv
+++ b/tex/context/base/mkiv/font-pre.mkiv
@@ -541,6 +541,7 @@
\definefontstyle [\v!teletype,\v!type,\v!mono] [\s!tt]
\definefontstyle [\v!handwritten] [\s!hw]
\definefontstyle [\v!calligraphic] [\s!cg]
+\definefontstyle [\v!math,\v!mathematics] [\s!mm]
\definefontalternative[\s!tf]
\definefontalternative[\s!bf]
@@ -558,7 +559,6 @@
\definealternativestyle [\v!bold] [\bf] []
\definealternativestyle [\v!type] [\tt] []
\definealternativestyle [\v!mono] [\tt] []
-\definealternativestyle [\v!monobold] [\tt\bf] []
\definealternativestyle [\v!slanted] [\sl] []
\definealternativestyle [\v!italic] [\it] []
\definealternativestyle [\v!boldslanted,\v!slantedbold] [\bs] []
@@ -567,7 +567,7 @@
\definealternativestyle [\v!small,\v!smallnormal] [\setsmallbodyfont\tf] []
\definealternativestyle [\v!smallbold] [\setsmallbodyfont\bf] []
\definealternativestyle [\v!smalltype] [\setsmallbodyfont\tt] []
-\definealternativestyle [\v!smallslanted] [\setsmallbodyfont\sl] []
+\definealternativestyle [\v!smallitalic,\v!smallslanted] [\setsmallbodyfont\sl] []
\definealternativestyle [\v!smallboldslanted,\v!smallslantedbold] [\setsmallbodyfont\bs] []
\definealternativestyle [\v!smallbolditalic,\v!smallitalicbold] [\setsmallbodyfont\bi] []
@@ -586,6 +586,13 @@
\definealternativestyle [\v!mononormal] [\tt\tf] []
\definealternativestyle [\v!monobold] [\tt\bf] []
+% For Alan:
+
+\definealternativestyle
+ [\v!camel]
+ [{\setcharactercasing[\v!camel]}]
+ [{\setcharactercasing[\v!camel]}]
+
% % maybe we need interface neutral as well (for use in cld):
%
% \letcscsname\mediaeval \csname\v!mediaeval \endcsname
diff --git a/tex/context/base/mkiv/font-sol.lua b/tex/context/base/mkiv/font-sol.lua
index eca51cd61..8d45552a5 100644
--- a/tex/context/base/mkiv/font-sol.lua
+++ b/tex/context/base/mkiv/font-sol.lua
@@ -63,7 +63,6 @@ local getid = nuts.getid
local getattr = nuts.getattr
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
local getlist = nuts.getlist
local setfield = nuts.setfield
diff --git a/tex/context/base/mkiv/font-syn.lua b/tex/context/base/mkiv/font-syn.lua
index f750f92e9..dc090a1d9 100644
--- a/tex/context/base/mkiv/font-syn.lua
+++ b/tex/context/base/mkiv/font-syn.lua
@@ -2041,6 +2041,10 @@ function names.resolvespec(askedname,sub) -- overloads previous definition
end
end
+function fonts.names.ignoredfile(filename) -- only supported in mkiv
+ return false -- will be overloaded
+end
+
-- We could generate typescripts with designsize info from the name database but
-- it's not worth the trouble as font names remain a mess: for instance how do we
-- idenfity a font? Names, families, subfamilies or whatever snippet can contain
diff --git a/tex/generic/context/luatex/luatex-fonts-def.lua b/tex/context/base/mkiv/font-xtx.lua
index 494ac00a9..494ac00a9 100644
--- a/tex/generic/context/luatex/luatex-fonts-def.lua
+++ b/tex/context/base/mkiv/font-xtx.lua
diff --git a/tex/context/base/mkiv/lang-dis.lua b/tex/context/base/mkiv/lang-dis.lua
index ab62cc479..84d9b2d5b 100644
--- a/tex/context/base/mkiv/lang-dis.lua
+++ b/tex/context/base/mkiv/lang-dis.lua
@@ -29,6 +29,7 @@ local setsubtype = nuts.setsubtype
local getchar = nuts.getchar
local getdisc = nuts.getdisc
local setdisc = nuts.setdisc
+local isglyph = nuts.isglyph
local copy_node = nuts.copy
local free_node = nuts.free
@@ -59,13 +60,19 @@ local expanders = {
-- \-
local pre, post, replace = getdisc(d)
local done = false
- if pre and getid(pre) == glyph_code and getchar(pre) <= 0 then
- done = true
- pre = nil
+ if pre then
+ local char = isglyph(pre)
+ if char and char <= 0 then
+ done = true
+ pre = nil
+ end
end
- if post and getid(post) == glyph_code and getchar(post) <= 0 then
- done = true
- post = nil
+ if post then
+ local char = isglyph(post)
+ if char and char <= 0 then
+ done = true
+ post = nil
+ end
end
if done then
setdisc(d,pre,post,replace,discretionary_code,tex.exhyphenpenalty)
diff --git a/tex/context/base/mkiv/lang-hyp.lua b/tex/context/base/mkiv/lang-hyp.lua
index dad0f5232..65337143d 100644
--- a/tex/context/base/mkiv/lang-hyp.lua
+++ b/tex/context/base/mkiv/lang-hyp.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['lang-hyp'] = {
-- todo: hyphenate over range if needed
+-- setattr: helper for full attr
+
-- to be considered: reset dictionary.hyphenated when a pattern is added
-- or maybe an explicit reset of the cache
@@ -610,16 +612,17 @@ if context then
local new_disc = nodepool.disc
local new_glyph = nodepool.glyph
+ local new_penalty = nodepool.penalty
local getfield = nuts.getfield
local getfont = nuts.getfont
- local getchar = nuts.getchar
local getid = nuts.getid
local getattr = nuts.getattr
local getnext = nuts.getnext
local getprev = nuts.getprev
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
+ local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setchar = nuts.setchar
@@ -638,6 +641,7 @@ if context then
local variables = interfaces.variables
local v_reset = variables.reset
local v_yes = variables.yes
+ local v_word = variables.word
local v_all = variables.all
local settings_to_array = utilities.parsers.settings_to_array
@@ -652,6 +656,8 @@ if context then
local a_hyphenation = attributes.private("hyphenation")
+ local interwordpenalty = 5000
+
function traditional.loadpatterns(language)
return dictionaries[language]
end
@@ -828,9 +834,10 @@ if context then
local rightedge = featureset.rightedge
local leftchar = somehyphenchar(featureset.leftchar)
local rightchar = somehyphenchar(featureset.rightchar)
- --
- joinerchars = joinerchars == v_yes and defaultjoiners or joinerchars
- hyphenchars = hyphenchars == v_yes and defaulthyphens or hyphenchars
+ local rightchars = featureset.rightchars
+ rightchars = rightchars == v_word and true or tonumber(rightchars)
+ joinerchars = joinerchars == v_yes and defaultjoiners or joinerchars
+ hyphenchars = hyphenchars == v_yes and defaulthyphens or hyphenchars
-- not yet ok: extrachars have to be ignored so it cannot be all)
featureset.extrachars = makeset(joinerchars or "",extrachars or "")
featureset.hyphenchars = makeset(hyphenchars or "")
@@ -839,6 +846,7 @@ if context then
featureset.charmin = charmin and charmin > 0 and charmin or nil
featureset.leftcharmin = leftcharmin and leftcharmin > 0 and leftcharmin or nil
featureset.rightcharmin = rightcharmin and rightcharmin > 0 and rightcharmin or nil
+ featureset.rightchars = rightchars
featureset.leftchar = leftchar
featureset.rightchar = rightchar
featureset.strict = rightedge == 'tex'
@@ -883,6 +891,7 @@ if context then
{ "characters" },
{ "hyphens" },
{ "joiners" },
+ { "rightchars" },
{ "rightwordmin", "integer" },
{ "charmin", "integer" },
{ "leftcharmin", "integer" },
@@ -980,6 +989,7 @@ if context then
local rightcharmin = nil
----- leftwordmin = nil
local rightwordmin = nil
+ local rightchars = nil
local leftchar = nil
local rightchar = nil
local attr = nil
@@ -1000,6 +1010,16 @@ if context then
starttiming(traditional)
+ local function insertpenalty()
+ local p = new_penalty(interwordpenalty)
+ setfield(p,"attr",getfield(last,"attr"))
+ if trace_visualize then
+ nuts.setvisual(p,"penalty")
+ end
+ last = getprev(last)
+ first, last = insert_after(first,last,p)
+ end
+
local function synchronizefeatureset(a)
local f = a and featuresets[a]
if f then
@@ -1013,6 +1033,7 @@ if context then
leftchar = f.leftchar
rightchar = f.rightchar
strict = f.strict and strictids
+ rightchars = f.rightchars
if rightwordmin and rightwordmin > 0 and lastwordlast ~= rightwordmin then
-- so we can change mid paragraph but it's kind of unpredictable then
if not tail then
@@ -1020,16 +1041,25 @@ if context then
end
last = tail
local inword = false
+ local count = 0
while last and rightwordmin > 0 do
local id = getid(last)
if id == glyph_code then
+ count = count + 1
inword = true
if trace_visualize then
- setcolor(last,"darkred")
+ setcolor(last,"darkgreen")
end
elseif inword then
inword = false
rightwordmin = rightwordmin - 1
+ if rightchars == true then
+ if rightwordmin > 0 then
+ insertpenalty()
+ end
+ elseif rightchars and count <= rightchars then
+ insertpenalty()
+ end
end
last = getprev(last)
end
@@ -1283,9 +1313,8 @@ if context then
-- for extensions.
while current and current ~= last do -- and current
- local id = getid(current)
- if id == glyph_code then
- local code = getchar(current)
+ local code, id = isglyph(current)
+ if code then
local lang = getfield(current,"lang")
if lang ~= language then
if dictionary and size > charmin and leftmin + rightmin <= size then
diff --git a/tex/context/base/mkiv/lang-hyp.mkiv b/tex/context/base/mkiv/lang-hyp.mkiv
index e3f032c6b..ca9113386 100644
--- a/tex/context/base/mkiv/lang-hyp.mkiv
+++ b/tex/context/base/mkiv/lang-hyp.mkiv
@@ -107,6 +107,7 @@
\letdummyparameter\s!righthyphenchar\zerocount
\letdummyparameter\c!alternative\empty
\letdummyparameter\c!rightedge\empty
+ \letdummyparameter\c!rightchars\empty
\getdummyparameters[#2]%
\clf_definehyphenationfeatures
{#1}%
@@ -115,6 +116,7 @@
hyphens {\dummyparameter\c!hyphens}%
joiners {\dummyparameter\c!joiners}%
rightwordmin \numexpr\dummyparameter\c!rightwords\relax
+ rightchars {\dummyparameter\c!rightchars}%
charmin \numexpr\dummyparameter\s!hyphenmin\relax
leftcharmin \numexpr\dummyparameter\s!lefthyphenmin\relax
rightcharmin \numexpr\dummyparameter\s!righthyphenmin\relax
diff --git a/tex/context/base/mkiv/lang-rep.lua b/tex/context/base/mkiv/lang-rep.lua
index ec82b7b19..28f2e5d50 100644
--- a/tex/context/base/mkiv/lang-rep.lua
+++ b/tex/context/base/mkiv/lang-rep.lua
@@ -49,6 +49,7 @@ local getprev = nuts.getprev
local getattr = nuts.getattr
local getid = nuts.getid
local getchar = nuts.getchar
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setattr = nuts.setattr
@@ -146,23 +147,28 @@ local function hit(a,head)
local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local newroot = root[getchar(current)]
- if not newroot then
- return lastrun, lastfinal
- else
- local final = newroot.final
- if final then
- if trace_detail then
- report_replacement("hitting word %a, replacement %a",final.word,final.replacement)
- end
- lastrun = current
- lastfinal = final
+ while current do
+ local char = isglyph(current)
+ if char then
+ local newroot = root[char]
+ if not newroot then
+ return lastrun, lastfinal
else
- root = newroot
+ local final = newroot.final
+ if final then
+ if trace_detail then
+ report_replacement("hitting word %a, replacement %a",final.word,final.replacement)
+ end
+ lastrun = current
+ lastfinal = final
+ else
+ root = newroot
+ end
end
+ current = getnext(current)
+ else
+ break
end
- current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
diff --git a/tex/context/base/mkiv/lang-wrd.lua b/tex/context/base/mkiv/lang-wrd.lua
index 7d625fa9e..38e6187af 100644
--- a/tex/context/base/mkiv/lang-wrd.lua
+++ b/tex/context/base/mkiv/lang-wrd.lua
@@ -38,6 +38,7 @@ local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local setattr = nuts.setattr
+local isglyph = nuts.isglyph
local traverse_nodes = nuts.traverse
local traverse_ids = nuts.traverse_id
@@ -158,8 +159,8 @@ local function mark_words(head,whenfound) -- can be optimized and shared
-- we haven't done the fonts yet so we have characters (otherwise
-- we'd have to use the tounicodes)
while current do
- local id = getid(current)
- if id == glyph_code then
+ local code, id = isglyph(current)
+ if code then
local a = getfield(current,"lang")
if a then
if a ~= language then
@@ -172,7 +173,6 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local code = getchar(current)
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua
index e2a85bccf..0d96abb7d 100644
--- a/tex/context/base/mkiv/lpdf-ini.lua
+++ b/tex/context/base/mkiv/lpdf-ini.lua
@@ -12,6 +12,7 @@ local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setme
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values
local sind, cosd, floor, max, min = math.sind, math.cosd, math.floor, math.max, math.min
+local sort = table.sort
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
local formatters = string.formatters
local isboolean = string.is_boolean
@@ -369,34 +370,85 @@ local f_tonumber = formatters["%F"]
local tostring_a, tostring_d
+-- tostring_d = function(t,contentonly,key)
+-- if next(t) then
+-- local r, rn = { }, 0
+-- for k, v in next, t do
+-- -- for k, v in sortedhash(t) do -- can be an option
+-- rn = rn + 1
+-- local tv = type(v)
+-- if tv == "string" then
+-- r[rn] = f_key_value(k,toeight(v))
+-- elseif tv == "number" then
+-- r[rn] = f_key_number(k,v)
+-- -- elseif tv == "unicode" then -- can't happen
+-- -- r[rn] = f_key_value(k,tosixteen(v))
+-- elseif tv == "table" then
+-- local mv = getmetatable(v)
+-- if mv and mv.__lpdftype then
+-- -- if v == t then
+-- -- report_objects("ignoring circular reference in dirctionary")
+-- -- r[rn] = f_key_null(k)
+-- -- else
+-- r[rn] = f_key_value(k,tostring(v))
+-- -- end
+-- elseif v[1] then
+-- r[rn] = f_key_value(k,tostring_a(v))
+-- else
+-- r[rn] = f_key_value(k,tostring_d(v))
+-- end
+-- else
+-- r[rn] = f_key_value(k,tostring(v))
+-- end
+-- end
+-- if contentonly then
+-- return concat(r," ")
+-- elseif key then
+-- return f_key_dictionary(key,r)
+-- else
+-- return f_dictionary(r)
+-- end
+-- elseif contentonly then
+-- return ""
+-- else
+-- return "<< >>"
+-- end
+-- end
+
tostring_d = function(t,contentonly,key)
if next(t) then
- local r, rn = { }, 0
- for k, v in next, t do
- rn = rn + 1
+ local r, n = { }, 0
+ for k in next, t do
+ n = n + 1
+ r[n] = k
+ end
+ sort(r)
+ for i=1,n do
+ local k = r[i]
+ local v = t[k]
local tv = type(v)
if tv == "string" then
- r[rn] = f_key_value(k,toeight(v))
+ r[i] = f_key_value(k,toeight(v))
elseif tv == "number" then
- r[rn] = f_key_number(k,v)
+ r[i] = f_key_number(k,v)
-- elseif tv == "unicode" then -- can't happen
- -- r[rn] = f_key_value(k,tosixteen(v))
+ -- r[i] = f_key_value(k,tosixteen(v))
elseif tv == "table" then
local mv = getmetatable(v)
if mv and mv.__lpdftype then
-- if v == t then
-- report_objects("ignoring circular reference in dirctionary")
- -- r[rn] = f_key_null(k)
+ -- r[i] = f_key_null(k)
-- else
- r[rn] = f_key_value(k,tostring(v))
+ r[i] = f_key_value(k,tostring(v))
-- end
elseif v[1] then
- r[rn] = f_key_value(k,tostring_a(v))
+ r[i] = f_key_value(k,tostring_a(v))
else
- r[rn] = f_key_value(k,tostring_d(v))
+ r[i] = f_key_value(k,tostring_d(v))
end
else
- r[rn] = f_key_value(k,tostring(v))
+ r[i] = f_key_value(k,tostring(v))
end
end
if contentonly then
diff --git a/tex/context/base/mkiv/lxml-ini.mkiv b/tex/context/base/mkiv/lxml-ini.mkiv
index 1770d087c..6fa14ddfc 100644
--- a/tex/context/base/mkiv/lxml-ini.mkiv
+++ b/tex/context/base/mkiv/lxml-ini.mkiv
@@ -45,15 +45,12 @@
% aliased
\let\xmlall \clf_xmlall
-\let\xmllastmatch \clf_xmllastmatch
\let\xmlatt \clf_xmlatt
-\let\xmllastatt \clf_xmllastatt
\let\xmlattdef \clf_xmlattdef
\let\xmlattribute \clf_xmlattribute
\let\xmlattributedef \clf_xmlattributedef
\let\xmlchainatt \clf_xmlchainatt
\let\xmlchainattdef \clf_xmlchainattdef
-\let\xmlrefatt \clf_xmlrefatt
\let\xmlchecknamespace \clf_xmlchecknamespace
\let\xmlcommand \clf_xmlcommand
\let\xmlcontext \clf_xmlcontext
@@ -71,7 +68,9 @@
\let\xmlflush \clf_xmlflush
\let\xmlflushcontext \clf_xmlflushcontext
\let\xmlflushlinewise \clf_xmlflushlinewise
+\let\xmlflushpure \clf_xmlflushpure
\let\xmlflushspacewise \clf_xmlflushspacewise
+\let\xmlflushtext \clf_xmlflushtext
\let\xmlfunction \clf_xmlfunction
\let\xmlinclude \clf_xmlinclude
\let\xmlincludeoptions \clf_xmlincludeoptions
@@ -79,18 +78,22 @@
\let\xmlinclusions \clf_xmlinclusions
\let\xmlbadinclusions \clf_xmlbadinclusions
\let\xmlindex \clf_xmlindex
-\let\xmlposition \clf_xmlindex
\let\xmlinlineverbatim \clf_xmlinlineverbatim
\let\xmllast \clf_xmllast
+\let\xmllastatt \clf_xmllastatt
+\let\xmllastmatch \clf_xmllastmatch
\let\xmlloaddirectives \clf_xmlloaddirectives
\let\xmlmain \clf_xmlmain
\let\xmlmatch \clf_xmlmatch
-\let\xmlpath \clf_xmlpath
\let\xmlname \clf_xmlname
\let\xmlnamespace \clf_xmlnamespace
\let\xmlnonspace \clf_xmlnonspace
+\let\xmlpath \clf_xmlpath
\let\xmlpos \clf_xmlpos
+\let\xmlposition \clf_xmlindex
+\let\xmlpure \clf_xmlpure
\let\xmlraw \clf_xmlraw
+\let\xmlrefatt \clf_xmlrefatt
\let\xmlregisterns \clf_xmlregisterns % document
\let\xmlremapname \clf_xmlremapname % element
\let\xmlremapnamespace \clf_xmlremapnamespace % document
@@ -104,16 +107,12 @@
\let\xmlstrippednolines \clf_xmlstrippednolines
\let\xmltag \clf_xmltag
\let\xmltext \clf_xmltext
-\let\xmlpure \clf_xmlpure
\let\xmltobuffer \clf_xmltobuffer % id pattern name
\let\xmltobufferverbose \clf_xmltobufferverbose % id pattern name
\let\xmltofile \clf_xmltofile % id pattern filename
\let\xmltoparameters \clf_xmltoparameters
\let\xmlverbatim \clf_xmlverbatim
-\let\xmlflushtext \clf_xmlflushtext
-\let\xmlflushpure \clf_xmlflushpure
-
\unexpanded\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlname{#1}]}}
\unexpanded\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
diff --git a/tex/context/base/mkiv/lxml-tab.lua b/tex/context/base/mkiv/lxml-tab.lua
index b03c7eb43..60d6262c7 100644
--- a/tex/context/base/mkiv/lxml-tab.lua
+++ b/tex/context/base/mkiv/lxml-tab.lua
@@ -36,6 +36,7 @@ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber, rawset = type, next, setmetatable, getmetatable, tonumber, rawset
local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub
+local sort = table.sort
local utfchar = utf.char
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -1316,29 +1317,44 @@ and then handle the lot.</p>
local f_attribute = formatters['%s=%q']
+-- we could reuse ats
+
local function verbose_element(e,handlers,escape) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
+ -- we now sort attributes
local n = 0
- for k,v in next, eat do
+ for k in next, eat do
n = n + 1
- ats[n] = f_attribute(k,escaped(v))
+ ats[n] = k
+ end
+ if n == 1 then
+ local k = ats[1]
+ ats = f_attribute(k,escaped(eat[k]))
+ else
+ sort(ats)
+ for i=1,n do
+ local k = ats[i]
+ ats[i] = f_attribute(k,escaped(eat[k]))
+ end
+ ats = concat(ats," ")
end
end
if ern and trace_entities and ern ~= ens then
ens = ern
end
+ local n = edt and #edt
if ens ~= "" then
- if edt and #edt > 0 then
+ if n and n > 0 then
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
+ handle("<",ens,":",etg," ",ats,">")
else
handle("<",ens,":",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e = edt[i]
if type(e) == "string" then
handle(escaped(e))
@@ -1349,19 +1365,19 @@ local function verbose_element(e,handlers,escape) -- options
handle("</",ens,":",etg,">")
else
if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ handle("<",ens,":",etg," ",ats,"/>")
else
handle("<",ens,":",etg,"/>")
end
end
else
- if edt and #edt > 0 then
+ if n and n > 0 then
if ats then
- handle("<",etg," ",concat(ats," "),">")
+ handle("<",etg," ",ats,">")
else
handle("<",etg,">")
end
- for i=1,#edt do
+ for i=1,n do
local e = edt[i]
if type(e) == "string" then
handle(escaped(e)) -- option: hexify escaped entities
@@ -1372,7 +1388,7 @@ local function verbose_element(e,handlers,escape) -- options
handle("</",etg,">")
else
if ats then
- handle("<",etg," ",concat(ats," "),"/>")
+ handle("<",etg," ",ats,"/>")
else
handle("<",etg,"/>")
end
diff --git a/tex/context/base/mkiv/math-ini.mkiv b/tex/context/base/mkiv/math-ini.mkiv
index d20278c9b..8627436b7 100644
--- a/tex/context/base/mkiv/math-ini.mkiv
+++ b/tex/context/base/mkiv/math-ini.mkiv
@@ -1331,7 +1331,7 @@
% \setupmathematics
% [\v!autopunctuation=\v!no]
%
-% \def\math_punctuation_next{\ifx\nexttoken\blankspace\char\zerocount\fi}
+% \def\math_punctuation_next{\ifx\nexttoken\blankspace\signalcharacter\fi}
%
% \unexpanded\def\math_punctuation_comma {\textcomma \futurelet\nexttoken\math_punctuation_next}
% \unexpanded\def\math_punctuation_period{\textperiod\futurelet\nexttoken\math_punctuation_next}
diff --git a/tex/context/base/mkiv/math-noa.lua b/tex/context/base/mkiv/math-noa.lua
index 878b1e9a1..d3409e01b 100644
--- a/tex/context/base/mkiv/math-noa.lua
+++ b/tex/context/base/mkiv/math-noa.lua
@@ -170,9 +170,6 @@ local math_style = nodecodes.style -- attr style
local math_choice = nodecodes.choice -- attr display text script scriptscript
local math_fence = nodecodes.fence -- attr subtype
-local hlist_code = nodecodes.hlist
-local glyph_code = nodecodes.glyph
-
local left_fence_code = fencecodes.left
local middle_fence_code = fencecodes.middle
local right_fence_code = fencecodes.right
@@ -198,8 +195,8 @@ local function process(start,what,n,parent)
report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
elseif id == math_char then
local char = getchar(start)
+ local font = getfont(start)
local fam = getfield(start,"fam")
- local font = font_of_family(fam)
report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
else
report_processing("%w%S",n*2,nutstring(start))
@@ -531,8 +528,6 @@ do
local function checked(pointer)
local char = getchar(pointer)
- -- local fam = getfield(pointer,"fam")
- -- local font = font_of_family(fam)
local font = getfont(pointer)
local data = fontcharacters[font]
if not data[char] then
@@ -556,8 +551,6 @@ do
local g = getattr(pointer,a_mathgreek) or 0
local a = getattr(pointer,a_mathalphabet) or 0
local char = getchar(pointer)
- -- local fam = getfield(pointer,"fam")
- -- local font = font_of_family(fam)
local font = getfont(pointer)
local characters = fontcharacters[font]
if a > 0 or g > 0 then
@@ -648,8 +641,6 @@ processors.render[math_char] = function(pointer)
if renderset then
local newchar = renderset[char]
if newchar then
- -- local fam = getfield(pointer,"fam")
- -- local font = font_of_family(fam)
local font = getfont(pointer)
local characters = fontcharacters[font]
if characters and characters[newchar] then
@@ -1110,7 +1101,6 @@ alternate[math_char] = function(pointer)
local a = getattr(pointer,a_mathalternate)
if a and a > 0 then
setattr(pointer,a_mathalternate,0)
- -- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))]
local tfmdata = fontdata[getfont(pointer)]
local resources = tfmdata.resources -- was tfmdata.shared
if resources then
@@ -1239,7 +1229,6 @@ italics[math_char] = function(pointer,what,n,parent)
local method = getattr(pointer,a_mathitalics)
if method and method > 0 and method < 100 then
local char = getchar(pointer)
- -- local font = font_of_family(getfield(pointer,"fam")) -- todo: table
local font = getfont(pointer)
local correction, visual = getcorrection(method,font,char)
if correction and correction ~= 0 then
@@ -1357,7 +1346,7 @@ local validpair = {
}
local function movesubscript(parent,current_nucleus,current_char)
- local prev = getfield(parent,"prev")
+ local prev = getprev(parent)
if prev and getid(prev) == math_noad then
if not getfield(prev,"sup") and not getfield(prev,"sub") then
-- {f} {'}_n => f_n^'
@@ -1407,8 +1396,6 @@ local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to tur
if getid(next_nucleus) == math_char then
local newchar = mathpair[next_char]
if newchar then
- -- local fam = getfield(current_nucleus,"fam")
- -- local id = font_of_family(fam)
local id = getfont(current_nucleus)
local characters = fontcharacters[id]
if characters and characters[newchar] then
@@ -1487,7 +1474,6 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
local nucleus = getfield(next,"nucleus")
if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
local variant
- -- local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local tfmdata = fontdata[getfont(pointer)]
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
diff --git a/tex/context/base/mkiv/mult-def.lua b/tex/context/base/mkiv/mult-def.lua
index 7a6268876..47f6518c4 100644
--- a/tex/context/base/mkiv/mult-def.lua
+++ b/tex/context/base/mkiv/mult-def.lua
@@ -10134,6 +10134,9 @@ return {
["rightwords"]={
["en"]="rightwords",
},
+ ["rightchars"]={
+ ["en"]="rightchars",
+ },
["roffset"]={
["en"]="roffset",
},
diff --git a/tex/context/base/mkiv/mult-low.lua b/tex/context/base/mkiv/mult-low.lua
index 9bf001352..bea7d6fee 100644
--- a/tex/context/base/mkiv/mult-low.lua
+++ b/tex/context/base/mkiv/mult-low.lua
@@ -338,6 +338,7 @@ return {
"offinterlineskip", "oninterlineskip", "nointerlineskip",
--
"strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ "leftboundary", "rightboundary", "signalcharacter",
--
"ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
"ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
diff --git a/tex/context/base/mkiv/mult-prm.lua b/tex/context/base/mkiv/mult-prm.lua
index 931ffb007..4ecc006d0 100644
--- a/tex/context/base/mkiv/mult-prm.lua
+++ b/tex/context/base/mkiv/mult-prm.lua
@@ -420,6 +420,7 @@ return {
"pdfinclusionerrorlevel",
"pdfignoreunknownimages",
"pdfinfo",
+ "pdfinfoid",
"pdfinsertht",
"pdflastannot",
"pdflastlinedepth",
@@ -471,6 +472,8 @@ return {
"pdftexbanner",
"pdftexrevision",
"pdftexversion",
+ "pdfsuppressptexinfo",
+ "pdfsuppressoptionalinfo",
"pdfthread",
"pdfthreadmargin",
"pdftracingfonts",
@@ -1041,6 +1044,7 @@ return {
"pdfinclusionerrorlevel",
"pdfignoreunknownimages",
"pdfinfo",
+ "pdfinfoid",
"pdfinsertht",
"pdflastannot",
"pdflastlinedepth",
@@ -1092,6 +1096,8 @@ return {
"pdftexbanner",
"pdftexrevision",
"pdftexversion",
+ "pdfsuppressptexinfo",
+ "pdfsuppressoptionalinfo",
"pdfthread",
"pdfthreadmargin",
"pdftracingfonts",
diff --git a/tex/context/base/mkiv/node-fnt.lua b/tex/context/base/mkiv/node-fnt.lua
index 5e6d8f3d3..76273cfd6 100644
--- a/tex/context/base/mkiv/node-fnt.lua
+++ b/tex/context/base/mkiv/node-fnt.lua
@@ -44,6 +44,7 @@ local getid = nuts.getid
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
+local getdisc = nuts.getdisc
local getnext = nuts.getnext
local getprev = nuts.getprev
local getfield = nuts.getfield
@@ -52,6 +53,9 @@ local setchar = nuts.setchar
local setlink = nuts.setlink
local setfield = nuts.setfield
+local isglyph = nuts.isglyph -- unchecked
+local ischar = nuts.ischar -- checked
+
local traverse_id = nuts.traverse_id
local traverse_char = nuts.traverse_char
local delete_node = nuts.delete
@@ -174,11 +178,11 @@ function handlers.characters(head)
report_fonts()
local n = tonut(head)
while n do
- local id = getid(n)
- if id == glyph_code then
+ local char, id = isglyph(n)
+ if char then
local font = getfont(n)
local attr = getattr(n,0) or 0
- report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,char)
elseif id == disc_code then
report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
@@ -244,8 +248,8 @@ function handlers.characters(head)
local hash = variants[char]
if hash then
local p = getprev(n)
- if p and getid(p) == glyph_code then
- local char = getchar(p)
+ if p then
+ local char = ischar(p) -- checked
local variant = hash[char]
if variant then
if trace_variants then
@@ -285,8 +289,8 @@ function handlers.characters(head)
-- local prevattr = 0
for d in traverse_id(disc_code,nuthead) do
- -- we could use first_glyph
- local r = getfield(d,"replace") -- good enough
+ -- we could use first_glyph, only doing replace is good enough
+ local _, _, r = getdisc(d)
if r then
for n in traverse_char(r) do
local font = getfont(n)
@@ -414,18 +418,18 @@ function handlers.characters(head)
end
else
-- multiple fonts
- local front = nuthead == start
for i=1,b do
local range = basefonts[i]
local start = range[1]
local stop = range[2]
- if (start or stop) and (start ~= stop) then
+ if start then
+ local front = nuthead == start
local prev, next
if stop then
next = getnext(stop)
start, stop = ligaturing(start,stop)
start, stop = kerning(start,stop)
- elseif start then -- safeguard
+ else
prev = getprev(start)
start = ligaturing(start)
start = kerning(start)
@@ -436,18 +440,10 @@ function handlers.characters(head)
if next then
setlink(stop,next)
end
- if front then
- nuthead = start
- front = nil -- we assume a proper sequence
+ if front and nuthead ~= start then
+ head = tonode(nuthead)
end
end
- if front then
- -- shouldn't happen
- nuthead = start
- end
- end
- if front then
- head = tonode(nuthead)
end
end
stoptiming(nodes)
@@ -457,198 +453,6 @@ function handlers.characters(head)
return head, true
end
--- local formatters = string.formatters
-
--- local function make(processors,font,attribute)
--- _G.__temp = processors
--- local t = { }
--- for i=1,#processors do
--- if processors[i] then
--- t[#t+1] = formatters["local p_%s = _G.__temp[%s]"](i,i)
--- end
--- end
--- t[#t+1] = "return function(head,done)"
--- if #processors == 1 then
--- t[#t+1] = formatters["return p_%s(head,%s,%s)"](1,font,attribute or 0)
--- else
--- for i=1,#processors do
--- if processors[i] then
--- t[#t+1] = formatters["local h,d=p_%s(head,%s,%s) if d then head=h or head done=true end"](i,font,attribute or 0)
--- end
--- end
--- t[#t+1] = "return head, done"
--- end
--- t[#t+1] = "end"
--- t = concat(t,"\n")
--- t = load(t)(processors)
--- _G.__temp = nil
--- return t
--- end
-
--- setmetatableindex(fontprocesses, function(t,font)
--- local tfmdata = fontdata[font]
--- local shared = tfmdata.shared -- we need to check shared, only when same features
--- local processes = shared and shared.processes
--- if processes and #processes > 0 then
--- processes = make(processes,font,0)
--- t[font] = processes
--- return processes
--- else
--- t[font] = false
--- return false
--- end
--- end)
-
--- setmetatableindex(setfontdynamics, function(t,font)
--- local tfmdata = fontdata[font]
--- local shared = tfmdata.shared
--- local f = shared and shared.dynamics and otf.setdynamics or false
--- if f then
--- local v = { }
--- t[font] = v
--- setmetatableindex(v,function(t,k)
--- local v = f(font,k)
--- v = make(v,font,k)
--- t[k] = v
--- return v
--- end)
--- return v
--- else
--- t[font] = false
--- return false
--- end
--- end)
---
--- -- TODO: basepasses!
---
--- function handlers.characters(head)
--- -- either next or not, but definitely no already processed list
--- starttiming(nodes)
--- local usedfonts, attrfonts
--- local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false
--- if trace_fontrun then
--- run = run + 1
--- report_fonts()
--- report_fonts("checking node list, run %s",run)
--- report_fonts()
--- local n = head
--- while n do
--- local id = n.id
--- if id == glyph_code then
--- local font = n.font
--- local attr = n[0] or 0
--- report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
--- else
--- report_fonts("[%s]",nodecodes[n.id])
--- end
--- n = n.next
--- end
--- end
--- for n in traverse_id(glyph_code,head) do
--- -- if n.subtype<256 then -- all are 1
--- local font = n.font
--- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
--- if font ~= prevfont or attr ~= prevattr then
--- if attr > 0 then
--- if not attrfonts then
--- attrfonts = {
--- [font] = {
--- [attr] = setfontdynamics[font][attr]
--- }
--- }
--- a = 1
--- else
--- local used = attrfonts[font]
--- if not used then
--- attrfonts[font] = {
--- [attr] = setfontdynamics[font][attr]
--- }
--- a = a + 1
--- elseif not used[attr] then
--- used[attr] = setfontdynamics[font][attr]
--- a = a + 1
--- end
--- end
--- else
--- if not usedfonts then
--- local fp = fontprocesses[font]
--- if fp then
--- usedfonts = {
--- [font] = fp
--- }
--- u = 1
--- end
--- else
--- local used = usedfonts[font]
--- if not used then
--- local fp = fontprocesses[font]
--- if fp then
--- usedfonts[font] = fp
--- u = u + 1
--- end
--- end
--- end
--- end
--- prevfont = font
--- prevattr = attr
--- variants = fontvariants[font]
--- end
--- if variants then
--- local char = getchar(n)
--- if char >= 0xFE00 and (char <= 0xFE0F or (char >= 0xE0100 and char <= 0xE01EF)) then
--- local hash = variants[char]
--- if hash then
--- local p = getprev(n)
--- if p and getid(p) == glyph_code then
--- local variant = hash[getchar(p)]
--- if variant then
--- setchar(p,variant)
--- delete_node(nuthead,n)
--- end
--- end
--- end
--- end
--- end
--- end
--- -- end
--- end
--- if trace_fontrun then
--- report_fonts()
--- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
--- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
--- report_fonts()
--- end
--- if not usedfonts then
--- -- skip
--- elseif u == 1 then
--- local font, processors = next(usedfonts)
--- head, done = processors(head,done)
--- else
--- for font, processors in next, usedfonts do
--- head, done = processors(head,done)
--- end
--- end
--- if not attrfonts then
--- -- skip
--- elseif a == 1 then
--- local font, dynamics = next(attrfonts)
--- for attribute, processors in next, dynamics do
--- head, done = processors(head,done)
--- end
--- else
--- for font, dynamics in next, attrfonts do
--- for attribute, processors in next, dynamics do
--- head, done = processors(head,done)
--- end
--- end
--- end
--- stoptiming(nodes)
--- if trace_characters then
--- nodes.report(head,done)
--- end
--- return head, true
--- end
-
local d_protect_glyphs = nuts.protect_glyphs
local d_unprotect_glyphs = nuts.unprotect_glyphs
diff --git a/tex/context/base/mkiv/node-ini.lua b/tex/context/base/mkiv/node-ini.lua
index 9a8337e4e..f311bb811 100644
--- a/tex/context/base/mkiv/node-ini.lua
+++ b/tex/context/base/mkiv/node-ini.lua
@@ -53,6 +53,10 @@ into the <l n='tex'/> engine, but this is a not so natural extension.</p>
also ignore the empty nodes. [This is obsolete!]</p>
--ldx]]--
+-- local gf = node.direct.getfield
+-- local n = table.setmetatableindex("number")
+-- function node.direct.getfield(a,b) n[b] = n[b] + 1 print(b,n[b]) return gf(a,b) end
+
nodes = nodes or { }
local nodes = nodes
nodes.handlers = nodes.handlers or { }
diff --git a/tex/context/base/mkiv/node-ltp.lua b/tex/context/base/mkiv/node-ltp.lua
index 5b5cc5c08..95efbcb9c 100644
--- a/tex/context/base/mkiv/node-ltp.lua
+++ b/tex/context/base/mkiv/node-ltp.lua
@@ -203,9 +203,12 @@ local getboth = nuts.getboth
local getlist = nuts.getlist
local getfont = nuts.getfont
local getchar = nuts.getchar
+local getdisc = nuts.getdisc
local getattr = nuts.getattr
local getdisc = nuts.getdisc
+local isglyph = nuts.isglyph
+
local setfield = nuts.setfield
local setlink = nuts.setlink
local setlist = nuts.setlist
@@ -486,20 +489,23 @@ end)
local function kern_stretch_shrink(p,d)
local left = getprev(p)
- if left and getid(left) == glyph_code then -- how about disc nodes?
- local data = expansions[getfont(left)][getchar(left)]
- if data then
- local stretch = data.stretch
- local shrink = data.shrink
- if stretch ~= 0 then
- -- stretch = data.factor * (d * stretch - d)
- stretch = data.factor * d * (stretch - 1)
- end
- if shrink ~= 0 then
- -- shrink = data.factor * (d * shrink - d)
- shrink = data.factor * d * (shrink - 1)
+ if left then
+ local char = isglyph(left)
+ if char then
+ local data = expansions[getfont(left)][char]
+ if data then
+ local stretch = data.stretch
+ local shrink = data.shrink
+ if stretch ~= 0 then
+ -- stretch = data.factor * (d * stretch - d)
+ stretch = data.factor * d * (stretch - 1)
+ end
+ if shrink ~= 0 then
+ -- shrink = data.factor * (d * shrink - d)
+ shrink = data.factor * d * (shrink - 1)
+ end
+ return stretch, shrink
end
- return stretch, shrink
end
end
return 0, 0
@@ -694,8 +700,8 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
local adjust_stretch = 0
local adjust_shrink = 0
while s do
- local id = getid(s)
- if id == glyph_code then
+ local char, id = isglyph(s)
+ if char then
if is_rotated[line_break_dir] then -- can be shared
size = size + getfield(s,"height") + getfield(s,"depth")
else
@@ -704,7 +710,7 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
if checked_expansion then
local data = checked_expansion[getfont(s)]
if data then
- data = data[getchar(s)]
+ data = data[char]
if data then
adjust_stretch = adjust_stretch + data.glyphstretch
adjust_shrink = adjust_shrink + data.glyphshrink
@@ -1200,7 +1206,7 @@ local function post_line_break(par)
local prevlast = getprev(lastnode)
local nextlast = getnext(lastnode)
local subtype = getsubtype(lastnode)
- local pre, post, replace = getdisc(lastnode)
+ local pre, post, replace, pretail, posttail, replacetail = getdisc(lastnode)
if subtype == second_disc_code then
if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
report_parbuilders('unsupported disc at location %a',3)
@@ -1210,12 +1216,11 @@ local function post_line_break(par)
pre = nil -- signal
end
if replace then
- local n = find_tail(replace)
setlink(prevlast,replace)
- setlink(n,lastnode)
+ setlink(replacetail,lastnode)
replace = nil -- signal
end
- setdisc(pre,post,replace)
+ setdisc(lastnode,pre,post,replace)
local pre, post, replace = getdisc(prevlast)
if pre then
flush_nodelist(pre)
@@ -1234,20 +1239,18 @@ local function post_line_break(par)
end
setsubtype(nextlast,regular_disc_code)
setfield(nextlast,"replace",post)
- setfield(lastnode,"post")
+ setfield(lastnode,"post") -- nil
end
if replace then
flush_nodelist(replace)
end
if pre then
- local n = find_tail(pre)
setlink(prevlast,pre)
- setlink(n,lastnode)
+ setlink(pretail,lastnode)
end
if post then
- local n = find_tail(post)
setlink(lastnode,post)
- setlink(n,nextlast)
+ setlink(posttail,nextlast)
post_disc_break = true
end
setdisc(lastnode) -- nil, nil, nil
@@ -1882,8 +1885,13 @@ local function try_break(pi, break_type, par, first_p, current, checked_expansio
local b = r.break_node
local l = b and b.cur_break or first_p
local o = current and getprev(current)
- if current and getid(current) == disc_code and getfield(current,"pre") then
- o = find_tail(getfield(current,"pre"))
+ if current and getid(current) == disc_code then
+ local pre, _, _, pretail = getdisc(current)
+ if pre then
+ o = pretail
+ else
+ o = find_protchar_right(l,o)
+ end
else
o = find_protchar_right(l,o)
end
@@ -2176,8 +2184,8 @@ function constructors.methods.basic(head,d)
trialcount = 0
while current and p_active ~= n_active do
- local id = getid(current)
- if id == glyph_code then
+ local char, id = isglyph(current)
+ if char then
if is_rotated[par.line_break_dir] then
active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
else
@@ -2185,14 +2193,14 @@ function constructors.methods.basic(head,d)
end
if checked_expansion then
local currentfont = getfont(current)
- local data= checked_expansion[currentfont]
+ local data = checked_expansion[currentfont]
if data then
if currentfont ~= lastfont then
fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[getchar(current)]
+ local expansion = fontexps[char]
if expansion then
active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
@@ -2238,7 +2246,7 @@ function constructors.methods.basic(head,d)
-- 0.81 :
-- local actual_pen = getfield(current,"penalty")
--
- local pre = getfield(current,"pre")
+ local pre, post, replace = getdisc(current)
if not pre then -- trivial pre-break
disc_width.size = 0
if checked_expansion then
@@ -2297,7 +2305,6 @@ function constructors.methods.basic(head,d)
end
end
end
- local replace = getfield(current,"replace")
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
active_width.size = active_width.size + size
@@ -2453,8 +2460,8 @@ local verbose = false -- true
local function short_display(target,a,font_in_short_display)
while a do
- local id = getid(a)
- if id == glyph_code then
+ local char, id = isglyph(a)
+ if char then
local font = getfont(a)
if font ~= font_in_short_display then
write(target,tex.fontidentifier(font) .. ' ')
@@ -2463,11 +2470,12 @@ local function short_display(target,a,font_in_short_display)
if getsubtype(a) == ligature_code then
font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
else
- write(target,utfchar(getchar(a)))
+ write(target,utfchar(char))
end
elseif id == disc_code then
- font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
- font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
+ local pre, post, replace = getdisc(a)
+ font_in_short_display = short_display(target,pre,font_in_short_display)
+ font_in_short_display = short_display(target,post,font_in_short_display)
elseif verbose then
write(target,format("[%s]",nodecodes[id]))
elseif id == rule_code then
@@ -2822,8 +2830,8 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local function process(current) -- called nested in disc replace
while current do
- local id = getid(current)
- if id == glyph_code then
+ local char, id = isglyph(current)
+ if char then
if cal_expand_ratio then
local currentfont = getfont(current)
if currentfont ~= lastfont then
@@ -2831,7 +2839,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[getchar(current)]
+ local expansion = fontexps[char]
if expansion then
font_stretch = font_stretch + expansion.glyphstretch
font_shrink = font_shrink + expansion.glyphshrink
@@ -3004,15 +3012,16 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local fontexps, lastfont
for i=1,expansion_index do
- local g = expansion_stack[i]
- local e
- if getid(g) == glyph_code then
+ local g = expansion_stack[i]
+ local e = 0
+ local char = isglyph(g)
+ if char then
local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[char]
if trace_expansion then
setnodecolor(g,"hz:positive")
end
@@ -3060,15 +3069,16 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
local fontexps, lastfont
for i=1,expansion_index do
- local g = expansion_stack[i]
- local e
- if getid(g) == glyph_code then
+ local g = expansion_stack[i]
+ local e = 0
+ local char = isglyph(g)
+ if char then
local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[getchar(g)]
+ local data = fontexps[char]
if trace_expansion then
setnodecolor(g,"hz:negative")
end
diff --git a/tex/context/base/mkiv/node-met.lua b/tex/context/base/mkiv/node-met.lua
index 5c6e18bd3..a5ad15c80 100644
--- a/tex/context/base/mkiv/node-met.lua
+++ b/tex/context/base/mkiv/node-met.lua
@@ -127,32 +127,70 @@ nodes.tonut = function(n) return n end
local getfield = node.getfield
local setfield = node.setfield
-local getattr = node.get_attribute or node.has_attribute or getfield
+local getattr = node.get_attribute
local setattr = setfield
-local getnext = node.getnext or function(n) return getfield(n,"next") end
-local getprev = node.getprev or function(n) return getfield(n,"prev") end
-local getid = node.getid or function(n) return getfield(n,"id") end
-local getchar = node.getchar or function(n) return getfield(n,"char") end
-local getfont = node.getfont or function(n) return getfield(n,"font") end
-local getsubtype = node.getsubtype or function(n) return getfield(n,"subtype") end
-local getlist = node.getlist or function(n) return getfield(n,"list") end
-local getleader = node.getleader or function(n) return getfield(n,"leader") end
-
-nodes.getfield = getfield
-nodes.getattr = getattr
-
-nodes.setfield = setfield
-nodes.setattr = setattr
-
-nodes.getnext = getnext
-nodes.getprev = getprev
-nodes.getid = getid
-nodes.getchar = getchar
-nodes.getfont = getfont
-nodes.getsubtype = getsubtype
-nodes.getlist = getlist
-nodes.getleader = getleader
+local n_getid = node.getid
+local n_getlist = node.getlist
+local n_getnext = node.getnext
+local n_getprev = node.getprev
+local n_getchar = node.getchar
+local n_getfont = node.getfont
+local n_getsubtype = node.getsubtype
+local n_setfield = node.setfield
+local n_getfield = node.getfield
+local n_setattr = node.setattr
+local n_getattr = node.getattr
+local n_getdisc = node.getdisc
+local n_getleader = node.getleader
+
+local n_setnext = node.setnext or
+ function(c,next)
+ setfield(c,"next",n)
+ end
+local n_setprev = node.setprev or
+ function(c,prev)
+ setfield(c,"prev",p)
+ end
+local n_setlink = node.setlink or
+ function(c1,c2)
+ if c1 then setfield(c1,"next",c2) end
+ if c2 then setfield(c2,"prev",c1) end
+ end
+local n_setboth = node.setboth or
+ function(c,p,n)
+ setfield(c,"prev",p)
+ setfield(c,"next",n)
+ end
+
+node.setnext = n_setnext
+node.setprev = n_setprev
+node.setlink = n_setlink
+node.setboth = n_setboth
+
+nodes.getfield = n_getfield
+nodes.setfield = n_setfield
+nodes.getattr = n_getattr
+nodes.setattr = n_setattr
+
+nodes.getnext = n_getnext
+nodes.getprev = n_getprev
+nodes.getid = n_getid
+nodes.getchar = n_getchar
+nodes.getfont = n_getfont
+nodes.getsubtype = n_getsubtype
+nodes.getlist = n_getlist
+nodes.getleader = n_getleader
+nodes.getdisc = n_getdisc
+-----.getpre = node.getpre or function(n) local h, _, _, t = n_getdisc(n,true) return h, t end
+-----.getpost = node.getpost or function(n) local _, h, _, _, t = n_getdisc(n,true) return h, t end
+-----.getreplace = node.getreplace or function(n) local _, _, h, _, _, t = n_getdisc(n,true) return h, t end
+
+nodes.is_char = node.is_char
+nodes.ischar = node.is_char
+
+nodes.is_glyph = node.is_glyph
+nodes.isglyph = node.is_glyph
nodes.getbox = node.getbox or tex.getbox
nodes.setbox = node.setbox or tex.setbox
@@ -160,12 +198,6 @@ nodes.getskip = node.getskip or tex.get
local n_new_node = nodes.new
local n_free_node = nodes.free
-local n_setfield = nodes.setfield
-local n_getfield = nodes.getfield
-local n_getnext = nodes.getnext
-local n_getprev = nodes.getprev
-local n_getid = nodes.getid
-local n_getlist = nodes.getlist
local n_copy_node = nodes.copy
local n_copy_list = nodes.copy_list
local n_find_tail = nodes.tail
@@ -184,8 +216,7 @@ local function remove(head,current,free_too)
n_free_node(t)
t = nil
else
- n_setfield(t,"next",nil)
- n_setfield(t,"prev",nil)
+ n_setboth(t)
end
return head, current, t
end
@@ -210,12 +241,10 @@ function nodes.replace(head,current,new) -- no head returned if false
local prev = n_getprev(current)
local next = n_getnext(current)
if next then
- n_setfield(new,"next",next)
- n_setfield(next,"prev",new)
+ n_setlink(new,next)
end
if prev then
- n_setfield(new,"prev",prev)
- n_setfield(prev,"next",new)
+ n_setlink(prev,new)
end
if head then
if head == current then
@@ -270,8 +299,7 @@ function nodes.linked(...)
local next = select(i,...)
if next then
if head then
- n_setfield(last,"next",next)
- n_setfield(next,"prev",last)
+ n_setlink(last,next)
else
head = next
end
@@ -287,8 +315,7 @@ function nodes.concat(list) -- consider tail instead of slide
local li = list[i]
if li then
if head then
- n_setfield(tail,"next",li)
- n_setfield(li,"prev",tail)
+ n_setlink(tail,li)
else
head = li
end
@@ -431,8 +458,7 @@ metatable.__concat = function(n1,n2) -- todo: accept nut on one end
return n2 -- or n2 * 2
else
local tail = n_find_tail(n1)
- n_setfield(tail,"next",n2)
- n_setfield(n2,"prev",tail)
+ n_setlink(tail,n2)
return n1
end
end
@@ -449,26 +475,22 @@ metatable.__mul = function(n,multiplier)
local h = n_copy_list(n)
if head then
local t = n_find_tail(h)
- n_setfield(t,"next",head)
- n_setfield(head,"prev",t)
+ n_setlink(t,head)
end
head = h
end
local t = n_find_tail(n)
- n_setfield(t,"next",head)
- n_setfield(head,"prev",t)
+ n_setlink(t,head)
else
local head
for i=2,multiplier do
local c = n_copy_node(n)
if head then
- n_setfield(c,"next",head)
- n_setfield(head,"prev",c)
+ n_setlink(c,head)
end
head = c
end
- n_setfield(n,"next",head)
- n_setfield(head,"prev",n)
+ n_setlink(n,head)
end
return n
end
@@ -477,7 +499,7 @@ metatable.__sub = function(first,second)
if type(second) == "number" then
local tail = n_find_tail(first)
for i=1,second do
- local prev = n_getfield(tail,"prev")
+ local prev = n_getprev(tail)
n_free_node(tail) -- can become flushlist/flushnode
if prev then
tail = prev
@@ -486,7 +508,7 @@ metatable.__sub = function(first,second)
end
end
if tail then
- n_setfield(tail,"next",nil)
+ n_setnext(tail)
return first
else
return nil
@@ -497,15 +519,12 @@ metatable.__sub = function(first,second)
local prev = n_getprev(firsttail)
if prev then
local secondtail = n_find_tail(second)
- n_setfield(secondtail,"next",firsttail)
- n_setfield(firsttail,"prev",ltail)
- n_setfield(prev,"next",second)
- n_setfield(second,"prev",prev)
+ n_setlink(secondtail,firsttail)
+ n_setlink(prev,second)
return first
else
local secondtail = n_find_tail(second)
- n_setfield(secondtail,"next",first)
- n_setfield(first,"prev",ltail)
+ n_setlink(secondtail,first)
return second
end
end
@@ -524,7 +543,7 @@ metatable.__add = function(first,second)
end
end
if head then
- n_setfield(head,"prev",nil)
+ n_setprev(head)
return head
else
return nil
@@ -534,13 +553,10 @@ metatable.__add = function(first,second)
local next = n_getnext(first)
if next then
local secondtail = n_find_tail(second)
- n_setfield(first,"next",second)
- n_setfield(second,"prev",first)
- n_setfield(secondtail,"next",next)
- n_setfield(next,"prev",secondtail)
+ n_setlink(first,second)
+ n_setlink(secondtail,next)
else
- n_setfield(first,"next",second)
- n_setfield(second,"prev",first)
+ n_setlink(first,second)
end
return first
end
@@ -562,7 +578,7 @@ end
metatable.__pow = function(n,multiplier)
local tail = n
local head = nil
- if getnext(n) then
+ if n_getnext(n) then
if multiplier == 1 then
head = n_copy_list(n)
else
@@ -570,8 +586,7 @@ metatable.__pow = function(n,multiplier)
local h = n_copy_list(n)
if head then
local t = n_find_tail(h)
- n_setfield(t,"next",head)
- n_setfield(head,"prev",t)
+ n_setlink(t,head)
end
head = h
end
@@ -583,8 +598,7 @@ metatable.__pow = function(n,multiplier)
for i=2,multiplier do
local c = n_copy_node(n)
if head then
- n_setfield(head,"next",c)
- n_setfield(c,"prev",head)
+ n_setlink(head,c)
end
head = c
end
@@ -600,13 +614,12 @@ metatable.__unm = function(head)
local current = n_getnext(head)
while current do
local next = n_getnext(current)
- n_setfield(first,"prev",current)
- n_setfield(current,"next",first)
+ n_setlink(current,first)
first = current
current = next
end
- n_setfield(first,"prev",nil)
- n_setfield(last,"next",nil)
+ n_setprev(first)
+ n_setnext(last)
return first
end
diff --git a/tex/context/base/mkiv/node-nut.lua b/tex/context/base/mkiv/node-nut.lua
index d82a21443..200cbce5b 100644
--- a/tex/context/base/mkiv/node-nut.lua
+++ b/tex/context/base/mkiv/node-nut.lua
@@ -142,9 +142,7 @@ nuts.getleader = direct.getleader
-- end
-- end
--- track("getsubtype")
-
--- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
+-- track("getfield")
-- setters
@@ -268,7 +266,20 @@ if not direct.setlist then
end
+-- if not direct.getpre then
+--
+-- local getfield = nuts.getfield
+--
+-- function direct.getpre (n) local h, _, _, t = getdisc(n,true) return h, t end
+-- function direct.getpost (n) local _, h, _, _, t = getdisc(n,true) return h, t end
+-- function direct.getreplace(n) local _, _, h, _, _, t = getdisc(n,true) return h, t end
+--
+-- end
+
nuts.getdisc = direct.getdisc
+----.getpre = direct.getpre
+----.getpost = direct.getpost
+----.getreplace = direct.getreplace
nuts.setdisc = direct.setdisc
nuts.setchar = direct.setchar
nuts.setnext = direct.setnext
@@ -279,7 +290,50 @@ nuts.setlink = direct.setlink
nuts.setlist = direct.setlist
nuts.setleader = direct.setleader
nuts.setsubtype = direct.setsubtype
+
+if not direct.is_glyph then
+
+ local getchar = direct.getchar
+ local getid = direct.getid
+ local getfont = direct.getfont
+ local getsubtype = direct.getsubtype
+
+ local glyph_code = nodes.nodecodes.glyph
+
+ function direct.is_glyph(n,f)
+ local id = getid(n)
+ if id == glyph_code then
+ if f and getfont(n) == f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil, id
+ end
+ end
+
+ function direct.is_char(n,f)
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) >= 256 then
+ return false
+ elseif f and getfont(n) == f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil, id
+ end
+ end
+
+end
+
nuts.is_char = direct.is_char
+nuts.ischar = direct.is_char
+nuts.is_glyph = direct.is_glyph
+nuts.isglyph = direct.is_glyph
local d_remove_node = direct.remove
local d_free_node = direct.free
diff --git a/tex/context/base/mkiv/node-pro.lua b/tex/context/base/mkiv/node-pro.lua
index d6a8e9fe3..36670eed2 100644
--- a/tex/context/base/mkiv/node-pro.lua
+++ b/tex/context/base/mkiv/node-pro.lua
@@ -35,8 +35,7 @@ local actions = tasks.actions("processors")
do
local tonut = nuts.tonut
- local getid = nuts.getid
- local getchar = nuts.getchar
+ local isglyph = nuts.isglyph
local getnext = nuts.getnext
local n = 0
@@ -45,9 +44,9 @@ do
local t, n, h = { }, 0, tonut(head)
while h do
n = n + 1
- local id = getid(h)
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(getchar(h))
+ local char, id = isglyph(h)
+ if char then -- todo: disc etc
+ t[n] = utfchar(char)
else
t[n] = "[]"
end
diff --git a/tex/context/base/mkiv/node-rul.lua b/tex/context/base/mkiv/node-rul.lua
index 6d6c5fd02..e0649e99b 100644
--- a/tex/context/base/mkiv/node-rul.lua
+++ b/tex/context/base/mkiv/node-rul.lua
@@ -30,7 +30,6 @@ local getattr = nuts.getattr
local setattr = nuts.setattr
local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
local getlist = nuts.getlist
local setlist = nuts.setlist
diff --git a/tex/context/base/mkiv/node-shp.lua b/tex/context/base/mkiv/node-shp.lua
index 19b99b12c..4e4c88c03 100644
--- a/tex/context/base/mkiv/node-shp.lua
+++ b/tex/context/base/mkiv/node-shp.lua
@@ -40,7 +40,6 @@ local tonode = nuts.tonode
local free_node = nuts.free
local remove_node = nuts.remove
local traverse_nodes = nuts.traverse
-local find_tail = nuts.tail
local getfield = nuts.getfield
local setfield = nuts.setfield
@@ -48,8 +47,9 @@ local setsetlink = nuts.setlink
local setsetprev = nuts.setprev
local setsetnext = nuts.setnext
local getid = nuts.getid
+local getdisc = nuts.getdisc
+local getboth = nuts.getboth
local getnext = nuts.getnext
-local getprev = nuts.getprev
local getlist = nuts.getlist
local getsubtype = nuts.getsubtype
@@ -78,11 +78,9 @@ local function cleanup_redundant(head) -- better name is: flatten_page
local id = getid(start)
if id == disc_code then
if getsubtype(start) == fulldisc_code then
- local replace = getfield(start,"replace")
+ local _, _, replace, _, _ tail = getdisc(start)
if replace then
- local prev = getprev(start)
- local next = getnext(start)
- local tail = find_tail(replace)
+ local prev, next = getboth(start)
setfield(start,"replace",nil)
if start == head then
remove_node(head,start,true)
diff --git a/tex/context/base/mkiv/node-tra.lua b/tex/context/base/mkiv/node-tra.lua
index e68228fe8..eb1832947 100644
--- a/tex/context/base/mkiv/node-tra.lua
+++ b/tex/context/base/mkiv/node-tra.lua
@@ -46,8 +46,8 @@ local getchar = nuts.getchar
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getdisc = nuts.getdisc
-
local setattr = nuts.setattr
+local isglyph = nuts.isglyph
local flush_list = nuts.flush_list
local count_nodes = nuts.count
@@ -129,9 +129,8 @@ local function tosequence(start,stop,compact)
stop = stop and tonut(stop)
local t = { }
while start do
- local id = getid(start)
- if id == glyph_code then
- local c = getchar(start)
+ local c, id = isglyph(start)
+ if c then
if compact then
local components = getfield(start,"components")
if components then
@@ -293,9 +292,8 @@ nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last,nodisc)
local w = { }
while h do
- local id = getid(h)
- if id == glyph_code then -- always true
- local c = getchar(h)
+ local c, id = isglyph(h)
+ if c then
w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
diff --git a/tex/context/base/mkiv/publ-ini.mkiv b/tex/context/base/mkiv/publ-ini.mkiv
index 782f73e0c..cf51b831a 100644
--- a/tex/context/base/mkiv/publ-ini.mkiv
+++ b/tex/context/base/mkiv/publ-ini.mkiv
@@ -1386,8 +1386,8 @@
\unexpanded\def\citation {\doifelsenextoptionalcs\btxlistcitation \btxdirectlistcite}
\unexpanded\def\nocitation{\doifelsenextoptionalcs\btxhiddencitation\btxdirecthiddencite}
-\let\citation \listcitation \let\cite \citation
-\let\nocitation\nocitation \let\nocite\nocitation
+\let\cite \citation
+\let\nocite\nocitation
\unexpanded\def\publ_entry_citation {\doifelsenextoptionalcs\btxlistcitation \btxdirectlistcite}
\unexpanded\def\publ_entry_nocitation{\doifelsenextoptionalcs\btxhiddencitation\btxdirecthiddencite}
diff --git a/tex/context/base/mkiv/scrp-cjk.lua b/tex/context/base/mkiv/scrp-cjk.lua
index 1d8191008..77c58b18a 100644
--- a/tex/context/base/mkiv/scrp-cjk.lua
+++ b/tex/context/base/mkiv/scrp-cjk.lua
@@ -47,6 +47,8 @@ local skipcodes = nodes.skipcodes
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
local userskip_code = skipcodes.userskip
+local spaceskip_code = skipcodes.spaceskip
+local xspaceskip_code = skipcodes.xspaceskip
local a_scriptstatus = attributes.private('scriptstatus')
local a_scriptinjection = attributes.private('scriptinjection')
@@ -954,15 +956,19 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
- if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = getfield(first,"width")
- local s = spacedata[getfont(p)]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
+ if id == glue_code then
+ -- also scriptstatus check?
+ local subtype = getsubtype(first)
+ if subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code then
+ -- for the moment no distinction possible between space and userskip
+ local w = getfield(first,"width")
+ local s = spacedata[getfont(p)]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
end
- remove_node(head,first,true)
end
end
previous = pcjk
diff --git a/tex/context/base/mkiv/scrp-eth.lua b/tex/context/base/mkiv/scrp-eth.lua
index 8ecbce522..43cb2ff6a 100644
--- a/tex/context/base/mkiv/scrp-eth.lua
+++ b/tex/context/base/mkiv/scrp-eth.lua
@@ -13,7 +13,6 @@ local nuts = nodes.nuts
local getnext = nuts.getnext
local getfont = nuts.getfont
-local getchar = nuts.getchar
local getid = nuts.getid
local getattr = nuts.getattr
diff --git a/tex/context/base/mkiv/scrp-ini.lua b/tex/context/base/mkiv/scrp-ini.lua
index 3c3517542..c35f99038 100644
--- a/tex/context/base/mkiv/scrp-ini.lua
+++ b/tex/context/base/mkiv/scrp-ini.lua
@@ -57,10 +57,12 @@ local getfont = nuts.getfont
local getid = nuts.getid
local getattr = nuts.getattr
local setattr = nuts.setattr
+local isglyph = nuts.isglyph
local insert_node_after = nuts.insert_after
local first_glyph = nuts.first_glyph
local traverse_id = nuts.traverse_id
+local traverse_char = nuts.traverse_char
local nodepool = nuts.pool
@@ -453,8 +455,8 @@ function scripts.injectors.handler(head)
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = getid(start)
- if id == glyph_code then
+ local char, id = isglyph(start)
+ if char then
local a = getattr(start,a_scriptinjection)
if a then
if a ~= last_a then
@@ -478,21 +480,20 @@ function scripts.injectors.handler(head)
end
if normal_process then
-- wrong: originals are indices !
- local f = getfont(start)
- if f ~= lastfont then
- originals = fontdata[f].resources
+ local font = getfont(start)
+ if font ~= lastfont then
+ originals = fontdata[font].resources
if resources then
originals = resources.originals
else
originals = nil -- can't happen
end
- lastfont = f
+ lastfont = font
end
- local c = getchar(start)
if originals and type(originals) == "number" then
- c = originals[c] or c
+ char = originals[char] or char
end
- local h = hash[c]
+ local h = hash[char]
if h then
setattr(start,a_scriptstatus,categorytonumber[h])
if not first then
@@ -697,26 +698,55 @@ end)
local categories = characters.categories or { }
+-- local function hit(root,head)
+-- local current = getnext(head)
+-- local lastrun = false
+-- local lastfinal = false
+-- while current and getid(current) == glyph_code do
+-- local char = getchar(current)
+-- local newroot = root[char]
+-- if newroot then
+-- local final = newroot.final
+-- if final then
+-- lastrun = current
+-- lastfinal = final
+-- end
+-- root = newroot
+-- elseif categories[char] == "mn" then
+-- -- continue
+-- else
+-- return lastrun, lastfinal
+-- end
+-- current = getnext(current)
+-- end
+-- if lastrun then
+-- return lastrun, lastfinal
+-- end
+-- end
+
local function hit(root,head)
local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and getid(current) == glyph_code do
- local char = getchar(current)
- local newroot = root[char]
- if newroot then
- local final = newroot.final
- if final then
- lastrun = current
- lastfinal = final
+ while current do
+ local char = isglyph(current)
+ if char then
+ local newroot = root[char]
+ if newroot then
+ local final = newroot.final
+ if final then
+ lastrun = current
+ lastfinal = final
+ end
+ root = newroot
+ elseif categories[char] == "mn" then
+ -- continue
+ else
+ return lastrun, lastfinal
end
- root = newroot
- elseif categories[char] == "mn" then
- -- continue
else
- return lastrun, lastfinal
+ break
end
- current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
@@ -746,9 +776,11 @@ function splitters.handler(head) -- todo: also first_glyph test
local last, final = hit(root,current)
if last then
local next = getnext(last)
- if next and getid(next) == glyph_code then
- local nextchar = getchar(next)
- if tree[nextchar] then
+ if next then
+ local nextchar = isglyph(next)
+ if not nextchar then
+ -- we're done
+ elseif tree[nextchar] then
if trace_splitdetail then
if type(final) == "string" then
report_splitting("advance %s processing between <%s> and <%c>","with",final,nextchar)
@@ -886,11 +918,11 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,tonut(head)) do
+ for n in traverse_char(tonut(head)) do
-- if getattr(n,a_scriptinjection) then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = getchar(n)
+ local char = getchar(n)
local script = otfscripts[char]
if script then
local dynamic = getattr(n,0) or 0
diff --git a/tex/context/base/mkiv/spac-ali.mkiv b/tex/context/base/mkiv/spac-ali.mkiv
index e896b3910..e7d4137a0 100644
--- a/tex/context/base/mkiv/spac-ali.mkiv
+++ b/tex/context/base/mkiv/spac-ali.mkiv
@@ -288,6 +288,7 @@
\newconstant\c_spac_align_state_horizontal
\newconstant\c_spac_align_state_broad
+\newconstant\c_spac_align_state_par_fill
\def\v_spac_align_fill_amount {\plusone fil}
\def\v_spac_align_fill_amount_negative {\minusone fil}
@@ -472,7 +473,8 @@
\spac_align_set_horizontal_centered_last_line
\or
\parfillskip\zeropoint
- \fi}
+ \fi
+ \relax}
% Page spacing:
@@ -542,6 +544,7 @@
\c_spac_align_state_vertical \zerocount
\c_spac_align_state_direction \zerocount % what is default ?
\c_spac_align_state_page \zerocount
+ \c_spac_align_state_par_fill \zerocount
\ifcsname\??aligncommand\m_spac_align_asked\endcsname
\lastnamedcs
\else
@@ -555,6 +558,7 @@
\spac_align_flush_vertical
\spac_align_flush_direction
\spac_align_flush_page
+ \spac_align_flush_parfill
}}% kept, nice for tracing
\edef\raggedcommand {\the\t_spac_align_collected }%
\edef\updateraggedskips{\spac_align_flush_horizontal}%
@@ -631,7 +635,7 @@
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
- \setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
+ \setvalue{\??aligncommand#1}{\toksapp\t_spac_align_collected{#2}}%
\fi}
% beware, toks stuff and states are set at a differt time, so installalign is
@@ -639,7 +643,7 @@
%
% \setvalue{\??aligncommand whatever}%
% {\c_spac_align_state_horizontal\plushundred
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
+% \toksapp\t_spac_align_collected{.....}}
%
% this one could deal with both
%
@@ -648,7 +652,7 @@
% \setvalue{\??aligncommand#1}%
% {\spac_align_set_horizontal_none
% \c_spac_align_state_horizontal\plushundred % don't set
-% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
+% \toksapp\t_spac_align_collected{#2}}%
% \fi}
%
% \installalignoption
@@ -665,7 +669,7 @@
\setvalue{\??aligncommand\v!height }{\c_spac_align_state_page \plustwo }
\setvalue{\??aligncommand\v!line }{\c_spac_align_state_page \plusthree
% this will become another keyword (undocumented anyway)
- \t_spac_align_collected\expandafter{\the\t_spac_align_collected\settrue\raggedonelinerstate}}
+ \toksapp\t_spac_align_collected{\settrue\raggedonelinerstate}}
\setvalue{\??aligncommand\v!high }{\c_spac_align_state_vertical \plusthree}
\setvalue{\??aligncommand\v!low }{\c_spac_align_state_vertical \plustwo }
@@ -702,23 +706,42 @@
\c_spac_align_state_broad \plusone
\c_spac_align_state_horizontal\plustwo }
-\setvalue{\??aligncommand\v!lesshyphenation}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\lesshyphens}}
-\setvalue{\??aligncommand\v!morehyphenation}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\morehyphens}}
-
-\setvalue{\??aligncommand\v!hanging }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\font_protruding_enable }}
-\setvalue{\??aligncommand\v!nothanging }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\font_protruding_disable}}
-\setvalue{\??aligncommand\v!hz }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\font_expansion_enable }}
-\setvalue{\??aligncommand\v!fullhz }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\font_expansion_enable_k}}
-\setvalue{\??aligncommand\v!nohz }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\font_expansion_disable }}
-%setvalue{\??aligncommand\v!spacing }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\normalspacing\zerocount}} % not yet
-%setvalue{\??aligncommand\v!nospacing }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\normalspacing\plusone}} % not yet
-\setvalue{\??aligncommand\v!hyphenated }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\dohyphens}}
-\setvalue{\??aligncommand\v!nothyphenated }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\nohyphens}}
-
-\setvalue{\??aligncommand\v!tolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_tolerant}}
-\setvalue{\??aligncommand\v!verytolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_very_tolerant}}
-\setvalue{\??aligncommand\v!stretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_stretch}}
-\setvalue{\??aligncommand\v!extremestretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_extreme_stretch}}
+\setvalue{\??aligncommand\v!lesshyphenation}{\toksapp\t_spac_align_collected{\lesshyphens}}
+\setvalue{\??aligncommand\v!morehyphenation}{\toksapp\t_spac_align_collected{\morehyphens}}
+
+\setvalue{\??aligncommand\v!hanging }{\toksapp\t_spac_align_collected{\font_protruding_enable }}
+\setvalue{\??aligncommand\v!nothanging }{\toksapp\t_spac_align_collected{\font_protruding_disable}}
+\setvalue{\??aligncommand\v!hz }{\toksapp\t_spac_align_collected{\font_expansion_enable }}
+\setvalue{\??aligncommand\v!fullhz }{\toksapp\t_spac_align_collected{\font_expansion_enable_k}}
+\setvalue{\??aligncommand\v!nohz }{\toksapp\t_spac_align_collected{\font_expansion_disable }}
+%setvalue{\??aligncommand\v!spacing }{\toksapp\t_spac_align_collected{\normalspacing\zerocount}} % not yet
+%setvalue{\??aligncommand\v!nospacing }{\toksapp\t_spac_align_collected{\normalspacing\plusone}} % not yet
+\setvalue{\??aligncommand\v!hyphenated }{\toksapp\t_spac_align_collected{\dohyphens}}
+\setvalue{\??aligncommand\v!nothyphenated }{\toksapp\t_spac_align_collected{\nohyphens}}
+
+\setvalue{\??aligncommand\v!tolerant }{\toksapp\t_spac_align_collected{\spac_align_set_tolerant}}
+\setvalue{\??aligncommand\v!verytolerant }{\toksapp\t_spac_align_collected{\spac_align_set_very_tolerant}}
+\setvalue{\??aligncommand\v!stretch }{\toksapp\t_spac_align_collected{\spac_align_set_stretch}}
+\setvalue{\??aligncommand\v!extremestretch }{\toksapp\t_spac_align_collected{\spac_align_set_extreme_stretch}}
+
+\setvalue{\??aligncommand \v!final}{\c_spac_align_state_par_fill\plusone}
+\setvalue{\??aligncommand2*\v!final}{\c_spac_align_state_par_fill\plustwo} % hardcoded multiplier
+\setvalue{\??aligncommand3*\v!final}{\c_spac_align_state_par_fill\plusthree}
+\setvalue{\??aligncommand4*\v!final}{\c_spac_align_state_par_fill\plusfour}
+
+\definehspace [\v!final] [\emspaceamount]
+
+\def\spac_align_flush_parfill
+ {\ifcase\c_spac_align_state_par_fill\else
+ \spac_align_flush_parfill_indeed{\number\c_spac_align_state_par_fill}%
+ \fi}
+
+\unexpanded\def\spac_align_flush_parfill_indeed#1%
+ {\parfillskip
+ #1\directhspaceamount\v!final
+ % plus \dimexpr\availablehsize-#1\directhspaceamount\v!final\relax
+ plus 1fill
+ \relax}
%D For Wolfgang:
diff --git a/tex/context/base/mkiv/spac-chr.lua b/tex/context/base/mkiv/spac-chr.lua
index 3eb348586..97b32c366 100644
--- a/tex/context/base/mkiv/spac-chr.lua
+++ b/tex/context/base/mkiv/spac-chr.lua
@@ -34,12 +34,12 @@ local getfield = nuts.getfield
local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
-local getid = nuts.getid
local getattr = nuts.getattr
local setattr = nuts.setattr
local getfont = nuts.getfont
local getchar = nuts.getchar
local setsubtype = nuts.setsubtype
+local isglyph = nuts.isglyph
local setcolor = nodes.tracers.colors.set
@@ -181,32 +181,42 @@ local methods = {
[0x001F] = function(head,current)
local next = getnext(current)
- if next and getid(next) == glyph_code then
- local char = getchar(next)
- head, current = remove_node(head,current,true)
- if not is_punctuation[char] then
- local p = fontparameters[getfont(next)]
- head, current = insert_node_before(head,current,new_glue(p.space,p.space_stretch,p.space_shrink))
+ if next then
+ local char = isglyph(next)
+ if char then
+ head, current = remove_node(head,current,true)
+ if not is_punctuation[char] then
+ local p = fontparameters[getfont(next)]
+ head, current = insert_node_before(head,current,new_glue(p.space,p.space_stretch,p.space_shrink))
+ end
end
end
end,
[0x00A0] = function(head,current) -- nbsp
local next = getnext(current)
- if next and getid(next) == glyph_code then
- local char = getchar(next)
- if char == 0x200C or char == 0x200D then -- nzwj zwj
+ if next then
+ local char = isglyph(current)
+ if not char then
+ -- move on
+ elseif char == 0x200C or char == 0x200D then -- nzwj zwj
next = getnext(next)
- if next and nbsphash[getchar(next)] then
- return false
+ if next then
+ char = isglyph(next)
+ if char and nbsphash[char] then
+ return false
+ end
end
elseif nbsphash[char] then
return false
end
end
local prev = getprev(current)
- if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
- return false
+ if prev then
+ local char = isglyph(prev)
+ if char and nbsphash[char] then
+ return false
+ end
end
return nbsp(head,current)
end,
@@ -280,10 +290,9 @@ function characters.handler(head) -- todo: use traverse_id
local current = head
local done = false
while current do
- local id = getid(current)
- if id == glyph_code then
- local next = getnext(current)
- local char = getchar(current)
+ local char, id = isglyph(current)
+ if char then
+ local next = getnext(current)
local method = methods[char]
if method then
if trace_characters then
diff --git a/tex/context/base/mkiv/spac-hor.mkiv b/tex/context/base/mkiv/spac-hor.mkiv
index 1ccd5ac9f..39ebcfa26 100644
--- a/tex/context/base/mkiv/spac-hor.mkiv
+++ b/tex/context/base/mkiv/spac-hor.mkiv
@@ -972,9 +972,10 @@
\fi}
\def\hspaceamount#1#2%
- {\csname\??hspace
- \ifcsname\??hspace#1:#2\endcsname#1:#2\else:\v!none\fi
- \endcsname}
+ {\dimexpr\ifcsname\??hspace#1:#2\endcsname\lastnamedcs\else\zeropoint\fi\relax}
+
+\def\directhspaceamount#1%
+ {\dimexpr\ifcsname\??hspace :#1\endcsname\lastnamedcs\else\zeropoint\fi\relax}
% no installhspace here (this is already an old command)
diff --git a/tex/context/base/mkiv/spac-ver.lua b/tex/context/base/mkiv/spac-ver.lua
index 3095bbbd7..5d040db6f 100644
--- a/tex/context/base/mkiv/spac-ver.lua
+++ b/tex/context/base/mkiv/spac-ver.lua
@@ -1306,7 +1306,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
if glue_data then
if force_glue then
- if trace then trace_done("flushed due to " .. why,glue_data) end
+ if trace then trace_done("flushed due to forced " .. why,glue_data) end
head = forced_skip(head,current,getfield(glue_data,"width") or 0,"before",trace)
free_node(glue_data)
else
@@ -1320,9 +1320,12 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
-- end
local w = getfield(glue_data,"width")
if w ~= 0 then
- if trace then trace_done("flushed due to " .. why,glue_data) end
+ if trace then trace_done("flushed due to non zero " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
- else -- i really need to clean this up
+ elseif getfield(glue_data,"stretch") ~= 0 or getfield(glue_data,"shrink") ~= 0 then
+ if trace then trace_done("flushed due to stretch/shrink in" .. why,glue_data) end
+ head = insert_node_before(head,current,glue_data)
+ else
-- report_vspacing("needs checking (%s): %p",skipcodes[getsubtype(glue_data)],w)
free_node(glue_data)
end
diff --git a/tex/context/base/mkiv/spac-ver.mkiv b/tex/context/base/mkiv/spac-ver.mkiv
index f2ddb16ea..4fe1012e9 100644
--- a/tex/context/base/mkiv/spac-ver.mkiv
+++ b/tex/context/base/mkiv/spac-ver.mkiv
@@ -1139,6 +1139,12 @@
% when enabled, sigstruts will remove themselves if nothing
% goes inbetween
+%D For poractical reasons we define some boundary characters here.
+
+\unexpanded\def\leftboundary {\boundary\plusone}
+\unexpanded\def\rightboundary {\boundary\plustwo}
+\unexpanded\def\signalcharacter{\boundary\plusone\char\zerocount\boundary\plustwo} % not the same as strut signals
+
\newsignal\strutsignal \setfalse\sigstruts
\unexpanded\def\begstrut
@@ -1162,9 +1168,17 @@
\hskip-\strutsignal
\hskip\strutsignal}
+% \def\spac_struts_beg_normal
+% {\strut
+% \penalty\plustenthousand
+% \hskip\zeropoint}
+
\def\spac_struts_beg_normal
- {\strut
+ {\boundary\plusone
+ \strut
+ %\boundary\plusone
\penalty\plustenthousand
+ %\boundary\plusone
\hskip\zeropoint}
\unexpanded\def\endstrut
@@ -1195,11 +1209,20 @@
\strut
\fi}
+% \def\spac_struts_end_normal
+% {\removeunwantedspaces
+% \penalty\plustenthousand
+% \hskip\zeropoint
+% \strut}
+
\def\spac_struts_end_normal
{\removeunwantedspaces
\penalty\plustenthousand
+ %\boundary\plustwo
\hskip\zeropoint
- \strut}
+ %\boundary\plustwo
+ \strut
+ \boundary\plustwo}
% unsave:
%
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index d83845a80..941bdeb48 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 59447fcec..a70cf80b6 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/strc-itm.mkvi b/tex/context/base/mkiv/strc-itm.mkvi
index e43aeae45..506aec940 100644
--- a/tex/context/base/mkiv/strc-itm.mkvi
+++ b/tex/context/base/mkiv/strc-itm.mkvi
@@ -844,7 +844,7 @@
\ifinsidecolumns\else\ifcase\c_strc_itemgroups_column_depth
\global\c_strc_itemgroups_column_depth\c_strc_itemgroups_nesting % global ?
\strc_itemgroups_before_command
- %\strc_itemgroups_tag_start_group
+ \strc_itemgroups_tag_start_group
\strc_itemgroups_start_columns
\fi\fi
\fi
diff --git a/tex/context/base/mkiv/strc-lst.mkvi b/tex/context/base/mkiv/strc-lst.mkvi
index 19757aa61..e7ed0f1f2 100644
--- a/tex/context/base/mkiv/strc-lst.mkvi
+++ b/tex/context/base/mkiv/strc-lst.mkvi
@@ -116,6 +116,22 @@
% userdata : location=none
% simple : location=here
+\installcorenamespace {listlocations}
+
+\unexpanded\def\doifelseinlistentry#1%
+ {\ifcsname\??listlocations#1\endcsname
+ \ifnum\lastnamedcs=\structurelistlocation\relax
+ \doubleexpandafter\firstoftwoarguments
+ \else
+ \doubleexpandafter\secondoftwoarguments
+ \fi
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\unexpanded\def\doifelseincurrentlistentry
+ {\doifelseinlistentry\currentlist}
+
\unexpanded\def\structurelistinject
{\dotripleempty\strc_lists_inject}
@@ -150,6 +166,7 @@
userdata {\detokenize\expandafter{\normalexpanded{#userdata}}}
\relax
\edef\currentlistnumber{\the\scratchcounter}%
+\setxvalue{\??listlocations\currentlist}{\nextinternalreference}%
\ifx\p_location\v!here
% this branch injects nodes !
\strc_lists_inject_enhance{\currentlistnumber}{\nextinternalreference}%
@@ -381,8 +398,8 @@
\def\rawstructurelistuservariable#name%
{\clf_listuserdata{\currentlist}\currentlistindex{#name}}
-\unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s!
-\unexpanded\def\structurelistsecond{\structurelistuservariable\s!second} % s!
+\unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s!
+\unexpanded\def\structurelistsecond {\structurelistuservariable\s!second} % s!
\def\rawstructurelistfirst {\rawstructurelistuservariable\s!first } % s! % was \unexpanded
\def\rawstructurelistsecond{\rawstructurelistuservariable\s!second} % s! % was \unexpanded
@@ -718,10 +735,10 @@
%
% \installlistfiller\v!space
% {\hskip.25\emwidth\relax}
-%
-% \setuplistalternative
-% [\c!command=\strictlistparameter\c!command,
-% \c!symbol=.]
+
+\setuplistalternative
+ [\c!command=\strictlistparameter\c!command,
+ \c!symbol=.]
\unexpanded\def\currentlistfiller
{\checkedfiller{\listalternativeparameter\c!filler}}
@@ -918,12 +935,16 @@
\unexpanded\def\strc_lists_apply_renderingsetup
{\the\t_lists_every_renderingsetup
+ % now we group
+ \begingroup
\ifx\m_strc_list_alternative\empty
\edef\currentlistalternative{\listparameter\c!alternative}%
\else
\let\currentlistalternative\m_strc_list_alternative
\fi
\directsetup{\listalternativeparameter\c!renderingsetup}\relax
+ \endgroup
+ % till here, where we reset locals
\the\t_lists_every_renderingcleanup}
% todo: provide packager via attributes
diff --git a/tex/context/base/mkiv/strc-not.mkvi b/tex/context/base/mkiv/strc-not.mkvi
index e1107ff99..20bff7683 100644
--- a/tex/context/base/mkiv/strc-not.mkvi
+++ b/tex/context/base/mkiv/strc-not.mkvi
@@ -303,6 +303,7 @@
\startsetups[\??constructionrenderings:\v!note]
\noindent
+ \leftboundary % experiment, to be done in more places
\llap{\box\constructionheadbox\hskip\constructionparameter\c!distance}%
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
diff --git a/tex/context/base/mkiv/syst-aux.lua b/tex/context/base/mkiv/syst-aux.lua
index 0e7b2c2b6..98b92cef3 100644
--- a/tex/context/base/mkiv/syst-aux.lua
+++ b/tex/context/base/mkiv/syst-aux.lua
@@ -438,39 +438,77 @@ local function doifnotinset(a,b)
firstofoneargument()
end
-interfaces.implement {
+implement {
name = "doifelsecommon",
actions = doifelsecommon,
arguments = { "string", "string" },
}
-interfaces.implement {
+implement {
name = "doifcommon",
actions = doifcommon,
arguments = { "string", "string" },
}
-interfaces.implement {
+implement {
name = "doifnotcommon",
actions = doifnotcommon,
arguments = { "string", "string" },
}
-interfaces.implement {
+implement {
name = "doifelseinset",
actions = doifelseinset,
arguments = { "string", "string" },
}
-interfaces.implement {
+implement {
name = "doifinset",
actions = doifinset,
arguments = { "string", "string" },
}
-interfaces.implement {
+implement {
name = "doifnotinset",
actions = doifnotinset,
arguments = { "string", "string" },
}
+-- implement {
+-- name = "stringcompare",
+-- arguments = { "string", "string" },
+-- actions = function(a,b)
+-- context((a == b and 0) or (a > b and 1) or -1)
+-- end
+-- }
+--
+-- implement {
+-- name = "doifelsestringafter",
+-- arguments = { "string", "string" },
+-- actions = function(a,b)
+-- ctx_doifelse((a == b and 0) or (a > b and 1) or -1)
+-- end
+-- }
+--
+-- implement {
+-- name = "doifelsestringbefore",
+-- arguments = { "string", "string" },
+-- actions = function(a,b)
+-- ctx_doifelse((a == b and 0) or (a < b and -1) or 1)
+-- end
+-- }
+
+-- implement { -- not faster than addtocommalist
+-- name = "additemtolist", -- unique
+-- arguments = { "string", "string" },
+-- actions = function(l,s)
+-- if l == "" or s == l then
+-- -- s = s
+-- elseif find("," .. l .. ",","," .. s .. ",") then
+-- s = l
+-- else
+-- s = l .. "," .. s
+-- end
+-- context(s)
+-- end
+-- }
diff --git a/tex/context/base/mkiv/syst-aux.mkiv b/tex/context/base/mkiv/syst-aux.mkiv
index 8f8ba2024..825d18636 100644
--- a/tex/context/base/mkiv/syst-aux.mkiv
+++ b/tex/context/base/mkiv/syst-aux.mkiv
@@ -7148,7 +7148,7 @@
{\csname#1\ifcsname#1#2\endcsname#2\else#3\fi\endcsname}
%D Signal. Some fonts have a char0 rendering so we need to make sure that it
-%D is not set in the font!
+%D is not set in the font! (THis will be overloaded)
\unexpanded\def\signalcharacter{\char\zerocount} % \zwj
@@ -7357,6 +7357,15 @@
%def\ntimes#1#2{\ifnum#2>\zerocount#1\ntimes{#1}{\numexpr#2-\plusone\relax}\fi} % 1.72
\def\ntimes#1#2{\clf_ntimes{#1}\numexpr#2\relax} % 0.33
+%D Not that useful:
+
+% \unexpanded\def\doifelsestringbefore{\clf_doifelsestringbefore}
+% \unexpanded\def\doifelsestringafter {\clf_doifelsestringafter}
+
+% Could have been useful but not faster that addtocommalist and also expansion mess:
+%
+% \def\additemtolist#1#2{\normalexpanded{\noexpand\clf_additemtolist{#1}{#2}}}
+
\protect \endinput
% \edef\choicetokenyes{+}
diff --git a/tex/context/base/mkiv/syst-ini.mkiv b/tex/context/base/mkiv/syst-ini.mkiv
index e3053adfb..fa8fd1162 100644
--- a/tex/context/base/mkiv/syst-ini.mkiv
+++ b/tex/context/base/mkiv/syst-ini.mkiv
@@ -957,30 +957,45 @@
% module after which the official interfaces have to be used. This is needed for
% modules not made by ctx developers.
-\normalprotected\def\pdfliteral {\pdfextension literal }
-\normalprotected\def\pdfcolorstack {\pdfextension colorstack }
-\normalprotected\def\pdfsetmatrix {\pdfextension setmatrix }
-\normalprotected\def\pdfsave {\pdfextension save\relax}
-\normalprotected\def\pdfrestore {\pdfextension restore\relax}
-\normalprotected\def\pdfobj {\pdfextension obj }
-\normalprotected\def\pdfrefobj {\pdfextension refobj }
-\normalprotected\def\pdfannot {\pdfextension annot }
-\normalprotected\def\pdfstartlink {\pdfextension startlink }
-\normalprotected\def\pdfendlink {\pdfextension endlink\relax}
-\normalprotected\def\pdfoutline {\pdfextension outline }
-\normalprotected\def\pdfdest {\pdfextension dest }
-\normalprotected\def\pdfthread {\pdfextension thread }
-\normalprotected\def\pdfstartthread {\pdfextension startthread }
-\normalprotected\def\pdfendthread {\pdfextension endthread\relax}
-\normalprotected\def\pdfinfo {\pdfextension info }
-\normalprotected\def\pdfcatalog {\pdfextension catalog }
-\normalprotected\def\pdfnames {\pdfextension names }
-\normalprotected\def\pdfincludechars {\pdfextension includechars }
-\normalprotected\def\pdffontattr {\pdfextension fontattr }
-\normalprotected\def\pdfmapfile {\pdfextension mapfile }
-\normalprotected\def\pdfmapline {\pdfextension mapline }
-\normalprotected\def\pdftrailer {\pdfextension trailer }
-\normalprotected\def\pdfglyphtounicode {\pdfextension glyphtounicode }
+\normalprotected\def\pdfliteral {\pdfextension literal }
+\normalprotected\def\pdfcolorstack {\pdfextension colorstack }
+\normalprotected\def\pdfsetmatrix {\pdfextension setmatrix }
+\normalprotected\def\pdfsave {\pdfextension save\relax}
+\normalprotected\def\pdfrestore {\pdfextension restore\relax}
+\normalprotected\def\pdfobj {\pdfextension obj }
+\normalprotected\def\pdfrefobj {\pdfextension refobj }
+\normalprotected\def\pdfannot {\pdfextension annot }
+\normalprotected\def\pdfstartlink {\pdfextension startlink }
+\normalprotected\def\pdfendlink {\pdfextension endlink\relax}
+\normalprotected\def\pdfoutline {\pdfextension outline }
+\normalprotected\def\pdfdest {\pdfextension dest }
+\normalprotected\def\pdfthread {\pdfextension thread }
+\normalprotected\def\pdfstartthread {\pdfextension startthread }
+\normalprotected\def\pdfendthread {\pdfextension endthread\relax}
+\normalprotected\def\pdfinfo {\pdfextension info }
+\normalprotected\def\pdfcatalog {\pdfextension catalog }
+\normalprotected\def\pdfnames {\pdfextension names }
+\normalprotected\def\pdfincludechars {\pdfextension includechars }
+\normalprotected\def\pdffontattr {\pdfextension fontattr }
+\normalprotected\def\pdfmapfile {\pdfextension mapfile }
+\normalprotected\def\pdfmapline {\pdfextension mapline }
+\normalprotected\def\pdftrailer {\pdfextension trailer }
+\normalprotected\def\pdfglyphtounicode {\pdfextension glyphtounicode }
+\normalprotected\gdef\pdfsuppressoptionalinfo {\pdfextension suppressoptionalinfo }
+\global \let\pdfsuppressptexinfo \pdfsuppressoptionalinfo
+\normalprotected\gdef\pdfinfoid {\pdfextension infoid }
+\normalprotected\gdef\pdfinfoomitdate {\pdfextension suppressoptionalinfo \numexpr32+64\relax}
+
+% \chardef\pdfnofullbanner = 1
+% \chardef\pdfnofilename = 2
+% \chardef\pdfnopagenumber = 4
+% \chardef\pdfnoinfodict = 8
+% \chardef\pdfnocreator = 16
+% \chardef\pdfnocreationdate = 32
+% \chardef\pdfnomoddate = 64
+% \chardef\pdfnoproducer = 128
+% \chardef\pdfnotrapped = 256
+% \chardef\pdfnoid = 512
\def\pdftexversion {\numexpr\pdffeedback version}
\def\pdftexrevision {\pdffeedback revision}
diff --git a/tex/context/base/mkiv/tabl-tbl.mkiv b/tex/context/base/mkiv/tabl-tbl.mkiv
index 23d8002d6..58e6b7108 100644
--- a/tex/context/base/mkiv/tabl-tbl.mkiv
+++ b/tex/context/base/mkiv/tabl-tbl.mkiv
@@ -1650,8 +1650,6 @@
\global\c_tabl_tabulate_max_colorcolumn\c_tabl_tabulate_column
\fi
\setxvalue{\??tabulatecolor\the\c_tabl_tabulate_column}{#1}%
- %\attribute\alignbackgroundattribute\plusone
- %\dousecolorparameter{#1}\char\zerocount\strut % hack
\hbox \thealignbackgroundcolorattr{#1}{}% pack ?
\endgroup}
diff --git a/tex/context/base/mkiv/tabl-xtb.mkvi b/tex/context/base/mkiv/tabl-xtb.mkvi
index 06a5318e1..851b6e80f 100644
--- a/tex/context/base/mkiv/tabl-xtb.mkvi
+++ b/tex/context/base/mkiv/tabl-xtb.mkvi
@@ -86,7 +86,7 @@
\numexpr\tablecellcolumns\relax
\numexpr\raggedstatus\relax}%
\def\dotagxtablesignal
- {\char\zerocount}% not used
+ {\signalcharacter}% not used
\to \everyenableelements
\newdimen\d_tabl_x_width
diff --git a/tex/context/base/mkiv/trac-vis.lua b/tex/context/base/mkiv/trac-vis.lua
index f77a10364..061cef8ba 100644
--- a/tex/context/base/mkiv/trac-vis.lua
+++ b/tex/context/base/mkiv/trac-vis.lua
@@ -293,6 +293,10 @@ function nuts.setvisual(n,mode)
setattr(n,a_visual,setvisual(mode,getattr(n,a_visual),true))
end
+function nuts.copyvisual(n,m)
+ setattr(n,a_visual,getattr(m,a_visual))
+end
+
function visualizers.setvisual(n)
texsetattribute(a_visual,setvisual(n,texgetattribute(a_visual)))
end
@@ -309,6 +313,10 @@ for mode, value in next, modes do
trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
end
+local raisepenalties = false
+
+directives.register("visualizers.raisepenalties",function(v) raisepenalties = v end)
+
local fraction = 10
trackers .register("visualizers.reset", function(v) set("reset", v) end)
@@ -828,6 +836,8 @@ local function ruledpenalty(head,current,vertical)
info = copy_list(info)
if vertical then
info = vpack_nodes(info)
+ elseif raisepenalties then
+ setfield(info,"shift",-65536*4)
end
head, current = insert_node_before(head,current,info)
return head, getnext(current)
@@ -973,21 +983,21 @@ local function visualize(head,vertical,forced,parent)
return head
end
-local function freed(cache)
- local n = 0
- for k, v in next, cache do
- free_node_list(v)
- n = n + 1
- end
- if n == 0 then
- return 0, cache
- else
- return n, { }
- end
-end
-
do
+ local function freed(cache)
+ local n = 0
+ for k, v in next, cache do
+ free_node_list(v)
+ n = n + 1
+ end
+ if n == 0 then
+ return 0, cache
+ else
+ return n, { }
+ end
+ end
+
local function cleanup()
local hf, nw, nb, ng_v, ng_h, np_v, np_h, nk_v, nk_h
nf, f_cache = freed(f_cache)
diff --git a/tex/context/base/mkiv/typo-brk.lua b/tex/context/base/mkiv/typo-brk.lua
index 2f97f1869..9bccf91ea 100644
--- a/tex/context/base/mkiv/typo-brk.lua
+++ b/tex/context/base/mkiv/typo-brk.lua
@@ -29,11 +29,11 @@ local getnext = nuts.getnext
local getprev = nuts.getprev
local getboth = nuts.getboth
local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
local getfont = nuts.getfont
local getid = nuts.getid
local getfield = nuts.getfield
local getattr = nuts.getattr
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setattr = nuts.setattr
@@ -209,8 +209,8 @@ function breakpoints.handler(head)
local map = nil
local current = nead
while current do
- local id = getid(current)
- if id == glyph_code then
+ local char, id = isglyph(current)
+ if char then
local a = getattr(current,a_breakpoints)
if a and a > 0 then
if a ~= attr then
@@ -223,7 +223,7 @@ function breakpoints.handler(head)
attr = a
end
if map then
- local cmap = map[getchar(current)]
+ local cmap = map[char]
if cmap then
-- for now we collect but when found ok we can move the handler here
-- although it saves nothing in terms of performance
@@ -291,9 +291,9 @@ function breakpoints.handler(head)
local cright = 0
local next = getnext(current)
while next and nright ~= cright do
- local id = getid(next)
- if id == glyph_code then
- if cright == 1 and cmap[getchar(next)] then
+ local char, id = isglyph(next)
+ if char then
+ if cright == 1 and cmap[char] then
-- let's not make it too messy
break
end
diff --git a/tex/context/base/mkiv/typo-cap.lua b/tex/context/base/mkiv/typo-cap.lua
index 0eae3b2d9..ec6326042 100644
--- a/tex/context/base/mkiv/typo-cap.lua
+++ b/tex/context/base/mkiv/typo-cap.lua
@@ -51,7 +51,6 @@ local disc_code = nodecodes.disc
local math_code = nodecodes.math
local kerning_code = kerncodes.kerning
-local userskip_code = skipcodes.userskip
local tasks = nodes.tasks
diff --git a/tex/context/base/mkiv/typo-dha.lua b/tex/context/base/mkiv/typo-dha.lua
index 30984a26c..b1ce6ffbf 100644
--- a/tex/context/base/mkiv/typo-dha.lua
+++ b/tex/context/base/mkiv/typo-dha.lua
@@ -53,13 +53,13 @@ local nutstring = nuts.tostring
local getnext = nuts.getnext
local getprev = nuts.getprev
local getfont = nuts.getfont
-local getchar = nuts.getchar
local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getfield = nuts.getfield
local getattr = nuts.getattr
local getprop = nuts.getprop
+local isglyph = nuts.isglyph -- or ischar
local setfield = nuts.setfield
local setprop = nuts.setprop
@@ -124,9 +124,8 @@ end
local function nextisright(current)
current = getnext(current)
- local id = getid(current)
+ local character, id = isglyph(current)
if id == glyph_code then
- local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
@@ -134,9 +133,8 @@ end
local function previsright(current)
current = getprev(current)
- local id = getid(current)
+ local character, id = isglyph(current)
if id == glyph_code then
- local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
@@ -161,7 +159,7 @@ local function process(start)
local fences = { }
while current do
- local id = getid(current)
+ local character, id = isglyph(current)
local next = getnext(current)
if id == math_code then
current = getnext(end_of_math(next))
@@ -179,9 +177,8 @@ local function process(start)
prevattr = attr
end
end
- if id == glyph_code then
+ if character then
if attr and attr > 0 then
- local character = getchar(current)
if character == 0 then
-- skip signals
setprop(current,"direction",true)
diff --git a/tex/context/base/mkiv/typo-drp.lua b/tex/context/base/mkiv/typo-drp.lua
index 9d6d645d0..bddcc008e 100644
--- a/tex/context/base/mkiv/typo-drp.lua
+++ b/tex/context/base/mkiv/typo-drp.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['typo-drp'] = {
-- This ons is sensitive for order (e.g. when combined with first line
-- processing.
+-- todo: use isglyph
+
local tonumber, type, next = tonumber, type, next
local ceil = math.ceil
local settings_to_hash = utilities.parsers.settings_to_hash
diff --git a/tex/context/base/mkiv/typo-dua.lua b/tex/context/base/mkiv/typo-dua.lua
index bbb071f55..867d7f04c 100644
--- a/tex/context/base/mkiv/typo-dua.lua
+++ b/tex/context/base/mkiv/typo-dua.lua
@@ -72,12 +72,12 @@ local tonode = nuts.tonode
local nutstring = nuts.tostring
local getnext = nuts.getnext
-local getchar = nuts.getchar
local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getfield = nuts.getfield
local getprop = nuts.getprop
+local isglyph = nuts.isglyph -- or ischar
local setfield = nuts.setfield
local setprop = nuts.setprop
@@ -205,7 +205,7 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local chr, id = isglyph(current)
if getprop(current,"directions") then
local skip = 0
local last = id
@@ -226,7 +226,6 @@ local function build_list(head) -- todo: store node pointer ... saves loop
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id, last = last }
end
elseif id == glyph_code then
- local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
current = getnext(current)
diff --git a/tex/context/base/mkiv/typo-dub.lua b/tex/context/base/mkiv/typo-dub.lua
index 2ee85ad7d..e6a092568 100644
--- a/tex/context/base/mkiv/typo-dub.lua
+++ b/tex/context/base/mkiv/typo-dub.lua
@@ -60,13 +60,13 @@ local tonode = nuts.tonode
local nutstring = nuts.tostring
local getnext = nuts.getnext
-local getchar = nuts.getchar
local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getattr = nuts.getattr
local getfield = nuts.getfield
local getprop = nuts.getprop
+local isglyph = nuts.isglyph -- or ischar
local setfield = nuts.setfield
local setprop = nuts.setprop
@@ -260,7 +260,7 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = getid(current)
+ local chr, id = isglyph(current)
if getprop(current,"directions") then
local skip = 0
local last = id
@@ -281,7 +281,6 @@ local function build_list(head) -- todo: store node pointer ... saves loop
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id, last = last }
end
elseif id == glyph_code then
- local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
current = getnext(current)
diff --git a/tex/context/base/mkiv/typo-itc.lua b/tex/context/base/mkiv/typo-itc.lua
index 62fea3e2e..a46f3e8a0 100644
--- a/tex/context/base/mkiv/typo-itc.lua
+++ b/tex/context/base/mkiv/typo-itc.lua
@@ -37,9 +37,12 @@ local getnext = nuts.getnext
local getid = nuts.getid
local getfont = nuts.getfont
local getchar = nuts.getchar
+local getdisc = nuts.getdisc
local getattr = nuts.getattr
local setattr = nuts.setattr
local setfield = nuts.setfield
+local setdisc = nuts.setdisc
+local isglyph = nuts.isglyph
local insert_node_after = nuts.insert_after
local delete_node = nuts.delete
@@ -173,28 +176,32 @@ function italics.handler(head)
local previtalic = 0
local previnserted = nil
- local replace = nil
- local replacechar = nil
- local replacehead = nil
- local replaceitalic = 0
- local replaceinserted = nil
+ local pre = nil
+ local pretail = nil
local post = nil
+ local posttail = nil
local postchar = nil
local posthead = nil
local postitalic = 0
local postinserted = nil
+ local replace = nil
+ local replacetail = nil
+ local replacechar = nil
+ local replacehead = nil
+ local replaceitalic = 0
+ local replaceinserted = nil
+
local current = prevhead
local done = false
local lastfont = nil
local lastattr = nil
while current do
- local id = getid(current)
- if id == glyph_code then
+ local char, id = isglyph(current)
+ if char then
local font = getfont(current)
- local char = getchar(current)
local data = italicsdata[font]
if font ~= lastfont then
if previtalic ~= 0 then
@@ -275,95 +282,84 @@ function italics.handler(head)
replaceitalic = 0
postinserted = nil
postitalic = 0
- replace = getfield(current,"replace")
+ updated = false
+ pre, post, replace, pretail, posttail, replacetail = getdisc(current)
if replace then
- local current = find_tail(replace)
- if getid(current) ~= glyph_code then
- current = getprev(current)
- end
- if current and getid(current) == glyph_code then
- local font = getfont(current)
- local char = getchar(current)
- local data = italicsdata[font]
- if data then
- local attr = forcedvariant or getattr(current,a_italics)
- if attr and attr > 0 then
- local cd = data[char]
- if not cd then
- -- this really can happen
- replaceitalic = 0
- else
- replaceitalic = cd.italic
- if not replaceitalic then
- replaceitalic = setitalicinfont(font,char) -- calculated once
- -- replaceitalic = 0
- end
- if replaceitalic ~= 0 then
- lastfont = font
- lastattr = attr
- replacechar = char
- replacehead = replace
- replace = current
+ local current = replacetail
+ while current do
+ local char, id = isglyph(current)
+ if char then
+ local font = getfont(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+ replaceitalic = 0
+ else
+ replaceitalic = cd.italic
+ if not replaceitalic then
+ replaceitalic = setitalicinfont(font,char) -- calculated once
+ -- replaceitalic = 0
+ end
+ if replaceitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ replacechar = char
+ replacehead = replace
+ replace = current
+ updated = true
+ end
end
end
--- else
--- replaceitalic = 0
end
--- else
--- replaceitalic = 0
+ break
+ else
+ current = getprev(current)
end
--- else
--- replaceitalic = 0
end
--- replaceinserted = nil
--- else
--- replaceitalic = 0
--- replaceinserted = nil
end
- post = getfield(current,"post")
if post then
- local current = find_tail(post)
- if getid(current) ~= glyph_code then
- current = getprev(current)
- end
- if current and getid(current) == glyph_code then
- local font = getfont(current)
- local char = getchar(current)
- local data = italicsdata[font]
- if data then
- local attr = forcedvariant or getattr(current,a_italics)
- if attr and attr > 0 then
- local cd = data[char]
- if not cd then
- -- this really can happen
--- postitalic = 0
- else
- postitalic = cd.italic
- if not postitalic then
- postitalic = setitalicinfont(font,char) -- calculated once
- -- postitalic = 0
- end
- if postitalic ~= 0 then
- lastfont = font
- lastattr = attr
- postchar = char
- posthead = post
- post = current
+ local current = posttail
+ while current do
+ local char, id = isglyph(current)
+ if char then
+ local font = getfont(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+ -- postitalic = 0
+ else
+ postitalic = cd.italic
+ if not postitalic then
+ postitalic = setitalicinfont(font,char) -- calculated once
+ -- postitalic = 0
+ end
+ if postitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ postchar = char
+ posthead = post
+ post = current
+ updated = true
+ end
end
end
--- else
--- postitalic = 0
end
--- else
--- postitalic = 0
+ break
+ else
+ current = getprev(current)
end
--- else
--- postitalic = 0
end
--- postinserted = nil
--- else
--- postitalic = 0
--- postinserted = nil
+ end
+ if updated then
+ setdisc(current,pre,post,replace)
end
elseif id == kern_code then -- how about fontkern ?
previnserted = nil
@@ -410,9 +406,9 @@ function italics.handler(head)
postinserted = nil
postitalic = 0
local next = getnext(current)
- if next and getid(next) == glyph_code then
- local char = getchar(next)
- if is_punctuation[char] then
+ if next then
+ local char, id = isglyph(next)
+ if char and is_punctuation[char] then
local kern = getprev(current)
if kern and getid(kern) == kern_code then
local glyph = getprev(kern)
diff --git a/tex/context/base/mkiv/typo-krn.lua b/tex/context/base/mkiv/typo-krn.lua
index ddb7abd86..a245fec41 100644
--- a/tex/context/base/mkiv/typo-krn.lua
+++ b/tex/context/base/mkiv/typo-krn.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['typo-krn'] = {
}
-- glue is still somewhat suboptimal
+-- components: better split on tounicode
local next, type, tonumber = next, type, tonumber
local utfchar = utf.char
@@ -38,6 +39,7 @@ local getfont = nuts.getfont
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local getdisc = nuts.getdisc
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local getattr = nuts.getattr
@@ -222,7 +224,7 @@ end
-- sublists .. beware: we can have char -1
local function inject_begin(boundary,prev,keeptogether,krn,ok) -- prev is a glyph
- local id = getid(boundary)
+ local char, id = isglyph(boundary)
if id == kern_code then
if getsubtype(boundary) == kerning_code or getattr(boundary,a_fontkern) then
local inject = true
@@ -239,17 +241,16 @@ local function inject_begin(boundary,prev,keeptogether,krn,ok) -- prev is a glyp
return boundary, true
end
end
- elseif id == glyph_code then
+ elseif char then
if keeptogether and keeptogether(boundary,prev) then
-- keep 'm
else
- local charone = getchar(prev)
- if charone > 0 then
- local font = getfont(boundary)
- local chartwo = getchar(boundary)
- local data = chardata[font][charone]
- local kerns = data and data.kerns
- local kern = new_kern((kerns and kerns[chartwo] or 0) + quaddata[font]*krn)
+ local prevchar = isglyph(prev)
+ if prevchar and prevchar > 0 then
+ local font = getfont(boundary)
+ local data = chardata[font][prevchar]
+ local kerns = data and data.kerns
+ local kern = new_kern((kerns and kerns[char] or 0) + quaddata[font]*krn)
setlink(kern,boundary)
return kern, true
end
@@ -260,7 +261,7 @@ end
local function inject_end(boundary,next,keeptogether,krn,ok)
local tail = find_node_tail(boundary)
- local id = getid(tail)
+ local char, id = getid(tail)
if id == kern_code then
if getsubtype(tail) == kerning_code or getattr(tail,a_fontkern) then
local inject = true
@@ -277,17 +278,16 @@ local function inject_end(boundary,next,keeptogether,krn,ok)
return boundary, true
end
end
- elseif id == glyph_code then
+ elseif char then
if keeptogether and keeptogether(tail,two) then
-- keep 'm
else
- local charone = getchar(tail)
- if charone > 0 then
- local font = getfont(tail)
- local chartwo = getchar(next)
- local data = chardata[font][charone]
- local kerns = data and data.kerns
- local kern = (kerns and kerns[chartwo] or 0) + quaddata[font]*krn
+ local nextchar = isglyph(tail)
+ if nextchar and nextchar > 0 then
+ local font = getfont(tail)
+ local data = chardata[font][nextchar]
+ local kerns = data and data.kerns
+ local kern = (kerns and kerns[char] or 0) + quaddata[font]*krn
insert_node_after(boundary,tail,new_kern(kern))
return boundary, true
end
@@ -303,15 +303,14 @@ local function process_list(head,keeptogether,krn,font,okay)
local kern = 0
local mark = font and markdata[font]
while start do
- local id = getid(start)
- if id == glyph_code then
+ local char, id = isglyph(start)
+ if char then
if not font then
font = getfont(start)
mark = markdata[font]
kern = quaddata[font]*krn
end
if prev then
- local char = getchar(start)
if mark[char] then
-- skip
elseif pid == kern_code then
diff --git a/tex/context/base/mkiv/typo-lin.lua b/tex/context/base/mkiv/typo-lin.lua
index e405d6c21..b794ed13c 100644
--- a/tex/context/base/mkiv/typo-lin.lua
+++ b/tex/context/base/mkiv/typo-lin.lua
@@ -66,7 +66,6 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local line_code = listcodes.line
local localpar_code = nodecodes.localpar
-local userskip_code = gluecodes.userskip
local leftskip_code = gluecodes.leftskip
local rightskip_code = gluecodes.rightskip
local parfillskip_code = gluecodes.parfillskip
diff --git a/tex/context/base/mkiv/typo-rep.lua b/tex/context/base/mkiv/typo-rep.lua
index d95eff68e..a8925a2ce 100644
--- a/tex/context/base/mkiv/typo-rep.lua
+++ b/tex/context/base/mkiv/typo-rep.lua
@@ -26,7 +26,7 @@ local tonode = nuts.tonode
local getnext = nuts.getnext
local getchar = nuts.getchar
-local getid = nuts.getid
+local isglyph = nuts.isglyph
local getattr = nuts.getattr
@@ -89,11 +89,11 @@ function nodes.handlers.stripping(head)
head = tonut(head)
local current, done = head, false
while current do
- if getid(current) == glyph_code then
+ local char, id = isglyph(current)
+ if char then
-- it's more efficient to keep track of what needs to be kept
local todo = getattr(current,a_stripping)
if todo == 1 then
- local char = getchar(current)
local what = glyphs[char]
if what then
head, current = process(what,head,current,char)
diff --git a/tex/context/base/mkiv/typo-spa.lua b/tex/context/base/mkiv/typo-spa.lua
index 519ba3f34..f00e3ae6b 100644
--- a/tex/context/base/mkiv/typo-spa.lua
+++ b/tex/context/base/mkiv/typo-spa.lua
@@ -32,11 +32,10 @@ local tonode = nuts.tonode
local getnext = nuts.getnext
local getprev = nuts.getprev
-local getchar = nuts.getchar
-local getid = nuts.getid
local getfont = nuts.getfont
local getattr = nuts.getattr
local setattr = nuts.setattr
+local isglyph = nuts.isglyph
local insert_node_before = nuts.insert_before
local insert_node_after = nuts.insert_after
@@ -84,13 +83,12 @@ function spacings.handler(head)
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- local id = getid(start)
- if id == glyph_code then
+ local char, id = isglyph(start)
+ if char then
local attr = getattr(start,a_spacings)
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local char = getchar(start)
local map = data.characters[char]
setattr(start,a_spacings,unsetvalue) -- needed?
if map then
diff --git a/tex/context/base/mkiv/typo-sus.lua b/tex/context/base/mkiv/typo-sus.lua
index 2f1949ff0..ce1933330 100644
--- a/tex/context/base/mkiv/typo-sus.lua
+++ b/tex/context/base/mkiv/typo-sus.lua
@@ -42,13 +42,13 @@ local tonut = nodes.tonut
local tonode = nodes.tonode
local getid = nuts.getid
-local getchar = nuts.getchar
local getprev = nuts.getprev
local getnext = nuts.getnext
local getfield = nuts.getfield
local getattr = nuts.getattr
local getfont = nuts.getfont
local getlist = nuts.getlist
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setattr = nuts.setattr
@@ -166,9 +166,8 @@ function typesetters.marksuspects(head)
local lastdone = nil
while current do
if getattr(current,a_suspecting) then
- local id = getid(current)
- if id == glyph_code then
- local char = getchar(current)
+ local char, id = isglyph(current)
+ if char then
local code = categories[char]
local done = false
if punctuation[code] then
diff --git a/tex/context/base/mkiv/typo-tal.lua b/tex/context/base/mkiv/typo-tal.lua
index 2594b7298..c7efc0f28 100644
--- a/tex/context/base/mkiv/typo-tal.lua
+++ b/tex/context/base/mkiv/typo-tal.lua
@@ -43,6 +43,7 @@ local getfont = nuts.getfont
local getchar = nuts.getchar
local getfield = nuts.getfield
local getattr = nuts.getattr
+local isglyph = nuts.isglyph
local setfield = nuts.setfield
local setattr = nuts.setattr
@@ -196,9 +197,8 @@ function characteralign.handler(originalhead,where)
-- we can think of constraints
if method == v_number then
while current do
- local id = getid(current)
- if id == glyph_code then
- local char = getchar(current)
+ local char, id = isglyph(current)
+ if char then
local font = getfont(current)
-- local unicode = unicodes[font][char]
local unicode = fontcharacters[font][char].unicode or char -- ignore tables
@@ -273,9 +273,8 @@ function characteralign.handler(originalhead,where)
end
else
while current do
- local id = getid(current)
- if id == glyph_code then
- local char = getchar(current)
+ local char, id = isglyph(current)
+ if char then
local font = getfont(current)
-- local unicode = unicodes[font][char]
local unicode = fontcharacters[font][char].unicode or char -- ignore tables
diff --git a/tex/context/fonts/mkiv/type-imp-lato.mkiv b/tex/context/fonts/mkiv/type-imp-lato.mkiv
index 8fb8647fc..11a386b3f 100644
--- a/tex/context/fonts/mkiv/type-imp-lato.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-lato.mkiv
@@ -26,7 +26,7 @@
\setups[\s!font:\s!fallback:\s!sans]
\definefontsynonym [\s!Sans] [\s!file:lato-reg] [\s!features=\s!default]
\definefontsynonym [\s!SansBold] [\s!file:lato-bol] [\s!features=\s!default]
- \definefontsynonym [\s!SansItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-regita] [\s!features=\s!default]
\definefontsynonym [\s!SansBoldItalic] [\s!file:lato-bollta] [\s!features=\s!default]
\stoptypescript
@@ -35,7 +35,7 @@
\definefontsynonym [\s!Sans] [\s!file:lato-lig] [\s!features=\s!default]
\definefontsynonym [\s!SansBold] [\s!file:lato-reg] [\s!features=\s!default]
\definefontsynonym [\s!SansItalic] [\s!file:lato-liglta] [\s!features=\s!default]
- \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-regita] [\s!features=\s!default]
\stoptypescript
\starttypescript [\s!sans] [lato-dark] [\s!name]
diff --git a/tex/context/fonts/mkiv/type-imp-texgyre.mkiv b/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
index ca5d456e5..86db8c603 100644
--- a/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
@@ -52,7 +52,7 @@
\definetypescriptprefix [n:courier] [TeXGyreCursor]
\definetypescriptprefix [n:chancery] [TeXGyreChorus]
- \starttypescript [\s!serif,\s!sans] [adventor,bonum,bookman,heros,helvetica,pagella,palatino,bonum,bookman,schola,schoolbook,termes,times]
+ \starttypescript [\s!serif,\s!sans] [adventor,bonum,bookman,heros,helvetica,pagella,palatino,schola,schoolbook,termes,times]
\definefontsynonym [\typescriptprefix{n:\typescripttwo}-Regular] [\s!file:texgyre\typescriptprefix{f:\typescripttwo}-regular] [\s!features=\s!default]
\definefontsynonym [\typescriptprefix{n:\typescripttwo}-Italic] [\s!file:texgyre\typescriptprefix{f:\typescripttwo}-italic] [\s!features=\s!default]
\definefontsynonym [\typescriptprefix{n:\typescripttwo}-Bold] [\s!file:texgyre\typescriptprefix{f:\typescripttwo}-bold] [\s!features=\s!default]
diff --git a/tex/context/interface/common/keys-cs.xml b/tex/context/interface/common/keys-cs.xml
index ea492e3d0..01504609a 100644
--- a/tex/context/interface/common/keys-cs.xml
+++ b/tex/context/interface/common/keys-cs.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='reverse'/>
<cd:constant name='right' value='vpravo'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='barvavpravo'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='pravahrana'/>
diff --git a/tex/context/interface/common/keys-de.xml b/tex/context/interface/common/keys-de.xml
index 26c226166..97a8ef09b 100644
--- a/tex/context/interface/common/keys-de.xml
+++ b/tex/context/interface/common/keys-de.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='reverse'/>
<cd:constant name='right' value='rechts'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='rechterfarbe'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='rechtekante'/>
diff --git a/tex/context/interface/common/keys-en.xml b/tex/context/interface/common/keys-en.xml
index 709e393a7..c22ee2820 100644
--- a/tex/context/interface/common/keys-en.xml
+++ b/tex/context/interface/common/keys-en.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='reverse'/>
<cd:constant name='right' value='right'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='rightcolor'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='rightedge'/>
diff --git a/tex/context/interface/common/keys-fr.xml b/tex/context/interface/common/keys-fr.xml
index 442d9c91d..b03d55d6d 100644
--- a/tex/context/interface/common/keys-fr.xml
+++ b/tex/context/interface/common/keys-fr.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='inverse'/>
<cd:constant name='right' value='droite'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='couleurdroite'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='borddroit'/>
diff --git a/tex/context/interface/common/keys-it.xml b/tex/context/interface/common/keys-it.xml
index 0860dc302..622e73f4a 100644
--- a/tex/context/interface/common/keys-it.xml
+++ b/tex/context/interface/common/keys-it.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='invertito'/>
<cd:constant name='right' value='destra'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='coloredestra'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='bordodestro'/>
diff --git a/tex/context/interface/common/keys-nl.xml b/tex/context/interface/common/keys-nl.xml
index 50d18abb5..21040d080 100644
--- a/tex/context/interface/common/keys-nl.xml
+++ b/tex/context/interface/common/keys-nl.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='omgekeerd'/>
<cd:constant name='right' value='rechts'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='rechterkleur'/>
<cd:constant name='rightcompoundhyphen' value='rechterkoppelteken'/>
<cd:constant name='rightedge' value='rechterrand'/>
diff --git a/tex/context/interface/common/keys-pe.xml b/tex/context/interface/common/keys-pe.xml
index 297b22ce4..adf7c3e23 100644
--- a/tex/context/interface/common/keys-pe.xml
+++ b/tex/context/interface/common/keys-pe.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='برعکس'/>
<cd:constant name='right' value='راست'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='رنگ‌راست'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='لبه‌راست'/>
diff --git a/tex/context/interface/common/keys-ro.xml b/tex/context/interface/common/keys-ro.xml
index 21a10c7c2..474a2ade5 100644
--- a/tex/context/interface/common/keys-ro.xml
+++ b/tex/context/interface/common/keys-ro.xml
@@ -1090,6 +1090,7 @@
<cd:constant name='resources' value='resources'/>
<cd:constant name='reverse' value='reverse'/>
<cd:constant name='right' value='dreapta'/>
+ <cd:constant name='rightchars' value='rightchars'/>
<cd:constant name='rightcolor' value='culoaredreapta'/>
<cd:constant name='rightcompoundhyphen' value='rightcompoundhyphen'/>
<cd:constant name='rightedge' value='borduradreapta'/>
diff --git a/tex/context/modules/mkiv/m-json.mkiv b/tex/context/modules/mkiv/m-json.mkiv
index 329aa0f31..095b49558 100644
--- a/tex/context/modules/mkiv/m-json.mkiv
+++ b/tex/context/modules/mkiv/m-json.mkiv
@@ -27,4 +27,6 @@
\registerctxluafile{util-jsn}{}
+% \def\u#1#2#3#4{\cldcontext{utf.char(0x#1#2#3#4)}}
+
\stopmodule
diff --git a/tex/context/modules/mkiv/m-visual.mkiv b/tex/context/modules/mkiv/m-visual.mkiv
index ee48836ed..01fae49e2 100644
--- a/tex/context/modules/mkiv/m-visual.mkiv
+++ b/tex/context/modules/mkiv/m-visual.mkiv
@@ -90,7 +90,7 @@
\def\dofakewords#1%
{\bgroup
\dorecurse{#1}
- {\iffaketrigger\char\zerocount\fi % so that e.g. line numbering works
+ {\iffaketrigger\signalcharacter\fi % so that e.g. line numbering works
\getrandomcount\scratchcounter{1}{5}%
\dorecurse\scratchcounter
{\getrandomdimen\scratchdimen{.5em}{1.25em}%
diff --git a/tex/context/modules/mkiv/x-set-11.mkiv b/tex/context/modules/mkiv/x-set-11.mkiv
index 661f3fb4f..ce14b0619 100644
--- a/tex/context/modules/mkiv/x-set-11.mkiv
+++ b/tex/context/modules/mkiv/x-set-11.mkiv
@@ -13,7 +13,7 @@
%C details.
% we can make this module a bit cleaner using more recent features
-% like sorting the xml directly
+% like sorting the xml directly .. will happen stepwise
% \startluacode
% collectgarbage("stop")
@@ -506,25 +506,25 @@
\installsetuponlycommandhandler \??interfacesetup {setup} % \??interfacesetup
-\unexpanded\def\c!setup!definereserved#1#2%
+\unexpanded\def\cmd_define_reserved#1#2%
{\setvalue{\??interfacesetupreserved#1}{#2}}
-\unexpanded\def\c!setup!reserved!#1%
+\unexpanded\def\cmd_reserved_value#1%
{\executeifdefined{\??interfacesetupreserved#1}{#1}}
-\unexpanded\def\c!setup!internal!#1%
+\unexpanded\def\cmd_internal_value#1%
{\dontleavehmode
\begingroup
\setupintfont{#1}%
\endgroup}
-\unexpanded\def\c!setup!text!#1%
+\unexpanded\def\cmd_text_value#1%
{\dontleavehmode
\begingroup
\setupvarfont{#1}%
\endgroup}
-\unexpanded\def\c!setup!command!#1%
+\unexpanded\def\cmd_command_value#1%
{{\setupvarfont{\texescape...#1}}}
\defineregister
@@ -674,36 +674,42 @@
\newconstant\kindofsetup
-\unexpanded\def\basicsetup{\kindofsetup\zerocount\doshowsetup}
-\unexpanded\def\shortsetup{\kindofsetup\plusone \doshowsetup}
-\unexpanded\def\setup {\kindofsetup\plustwo \doshowsetup}
-\unexpanded\def\showsetup {\kindofsetup\plustwo \doshowsetup}
+\unexpanded\def\basicsetup{\kindofsetup\zerocount\cmd_show_setup}
+\unexpanded\def\shortsetup{\kindofsetup\plusone \cmd_show_setup}
+\unexpanded\def\setup {\kindofsetup\plustwo \cmd_show_setup}
+\unexpanded\def\showsetup {\kindofsetup\plustwo \cmd_show_setup}
-%unexpanded\def\showsetupinlist#1#2#3{\kindofsetup\plustwo\showsetupindeed{#3}\par}
-\unexpanded\def\showsetupinlist#1#2#3{\kindofsetup\plustwo\xmlsetup{#3}{xml:setups:typeset}\par}
+\unexpanded\def\showsetupinlist#1#2#3%
+ {\kindofsetup\plustwo\xmlsetup{#3}{xml:setups:typeset}\par}
% todo: only references in lists
-\unexpanded\def\doshowsetup
- {\dosingleempty\dodoshowsetup}
+\newconditional\c_cmd_showsetup
-\def\dodoshowsetup[#1]%
+\installtextracker
+ {cmd.showsetup}
+ {\settrue\c_cmd_showsetup}
+ {\setfalse\c_cmd_showsetup}
+
+\unexpanded\def\cmd_show_setup
+ {\doifelsenextoptionalcs\cmd_show_setup_yes\cmd_show_setup_nop}
+
+\def\cmd_show_setup_yes[#1]%
{\iffirstargument
- \dododoshowsetup{#1}%
+ \cmd_show_setup_nop{#1}%
\else
- \expandafter\dododoshowsetup
+ \expandafter\cmd_show_setup_nop
\fi}
-\def\dododoshowsetup#1% this will trigger 'used'
+\def\cmd_show_setup_nop#1% this will trigger 'used'
{\registersort[texcommand][stp:x:#1]%
- \showsetupindeed{#1}}
-
-\def\showsetupindeed#1%
- {\startelement[setup][name=#1]%
+ \ifconditional\c_cmd_showsetup
+ \writestatus{setup}{#1 / \rawsynonymname{texcommand}{stp:x:#1}}%
+ \fi
+ \startelement[setup][name=#1]%
\startelement[noexport][comment={setup definition #1}]%
-% \nospaces\plusone
\xmlsetup{\rawsynonymname{texcommand}{stp:x:#1}}{xml:setups:typeset}
- % \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and '\e!start' or '') .. @name]/command(xml:setups:typeset)}%
+ % \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and '\e!start' or '') .. @name]/command(xml:setups:typeset)}%
\stopelement
\stopelement}
@@ -1006,9 +1012,9 @@
\startxmlsetups xml:setups:parameter
\doifmodeelse {setups-measure} {
- \c!setup!reserved!{\xmlatt{#1}{name}}\par
+ \cmd_reserved_value{\xmlatt{#1}{name}}\par
} {
- \startsecondSETUPcolumn{\c!setup!reserved!{\xmlatt{#1}{name}}}{=}
+ \startsecondSETUPcolumn{\cmd_reserved_value{\xmlatt{#1}{name}}}{=}
\ignorespaces
\xmlflush{#1}
\doifmode{interface:setup:defaults} {
@@ -1037,21 +1043,21 @@
\xmlmapvalue{setups:method}{none} {}
\startxmlsetups xml:setups:constant:value
- \c!setup!reserved!{\xmlatt{#1}{type}}
+ \cmd_reserved_value{\xmlatt{#1}{type}}
\stopxmlsetups
\startxmlsetups xml:setups:constant
\doifelsemode {setups-pass-one} {
} {
\doifsomethingelse{\xmlatt{#1}{prefix}} {
- \c!setup!reserved!{\xmllastatt}
+ \cmd_reserved_value{\xmllastatt}
\xmlmappedvalue{setups:method}{\xmlatt{#1}{method}}{none}
} {
\doif {\xmlatt{#1}{default}} {yes} {
\underbar % next needs to be {braced}
}
}
- {\c!setup!reserved!{\xmlatt{#1}{type}}}
+ {\cmd_reserved_value{\xmlatt{#1}{type}}}
\space
\ignorespaces
}
@@ -1061,7 +1067,7 @@
\doifelsemode {setups-pass-one} {
\expanded{\setupintfont{\xmlatt{#1}{value}}}\ignorespaces
} {
- \c!setup!reserved!{\xmlatt{#1}{value}}
+ \cmd_reserved_value{\xmlatt{#1}{value}}
\space
\ignorespaces
}
@@ -1069,7 +1075,7 @@
\startxmlsetups xml:setups:inherit
\secondSETUPcolumn {
- \c!setup!text!{\getmessage{setup}{inherits}}
+ \cmd_text_value{\getmessage{setup}{inherits}}
\enspace
\letterbackslash
\xmlatt{#1}{name}
@@ -1079,64 +1085,64 @@
\unexpanded\def\simpleSETUPargument#1%
{\startfirstSETUPcolumn{\showSETUPnumber}%
- \c!setup!internal!{#1}%
+ \cmd_internal_value{#1}%
\stopfirstSETUPcolumn
\blank[\v!halfline]
\ignorespaces}
-\c!setup!definereserved {cd:command} {\c!setup!internal!{\getmessage{setup}{command}}}
-\c!setup!definereserved {cd:dimension} {\c!setup!internal!{\getmessage{setup}{dimension}}}
-\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
-\c!setup!definereserved {cd:buffer} {\c!setup!internal!{\getmessage{setup}{buffer}}}
-\c!setup!definereserved {cd:name} {\c!setup!internal!{\getmessage{setup}{identifier}}}
-\c!setup!definereserved {cd:character} {\c!setup!internal!{\getmessage{setup}{character}}}
-\c!setup!definereserved {cd:mark} {\c!setup!internal!{\getmessage{setup}{mark}}}
-\c!setup!definereserved {cd:number} {\c!setup!internal!{\getmessage{setup}{number}}}
-\c!setup!definereserved {cd:first} {\c!setup!internal!{\getmessage{setup}{first}}}
-\c!setup!definereserved {cd:last} {\c!setup!internal!{\getmessage{setup}{last}}}
-\c!setup!definereserved {cd:reference} {\c!setup!internal!{\getmessage{setup}{reference}}}
-\c!setup!definereserved {cd:plural} {\c!setup!internal!{\getmessage{setup}{plural}}}
-\c!setup!definereserved {cd:singular} {\c!setup!internal!{\getmessage{setup}{singular}}}
-\c!setup!definereserved {cd:text} {\c!setup!internal!{\getmessage{setup}{text}}}
-\c!setup!definereserved {cd:formula} {\c!setup!internal!{\getmessage{setup}{formula}}}
-\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
-\c!setup!definereserved {cd:matrix} {\c!setup!internal!{\getmessage{setup}{matrix}}}
-\c!setup!definereserved {cd:list} {\c!setup!internal!{\getmessage{setup}{list}}}
-\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
-\c!setup!definereserved {cd:language} {\c!setup!internal!{\getmessage{setup}{language}}}
-\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
-\c!setup!definereserved {cd:language} {\c!setup!internal!{\getmessage{setup}{language}}}
-\c!setup!definereserved {cd:processor} {\c!setup!internal!{\getmessage{setup}{processor}}}
-\c!setup!definereserved {cd:style} {\c!setup!internal!{\getmessage{setup}{style}}}
-\c!setup!definereserved {cd:font} {\c!setup!internal!{\getmessage{setup}{font}}}
-\c!setup!definereserved {cd:character} {\c!setup!internal!{\getmessage{setup}{character}}}
-\c!setup!definereserved {cd:userdata} {\c!setup!internal!{\getmessage{setup}{userdata}}}
-\c!setup!definereserved {cd:key} {\c!setup!internal!{\getmessage{setup}{key}}}
-\c!setup!definereserved {cd:value} {\c!setup!internal!{\getmessage{setup}{value}}}
-\c!setup!definereserved {cd:color} {\c!setup!internal!{\getmessage{setup}{color}}}
-\c!setup!definereserved {cd:template} {\c!setup!internal!{\getmessage{setup}{template}}}
-\c!setup!definereserved {cd:node} {\c!setup!internal!{\getmessage{setup}{node}}}
-\c!setup!definereserved {cd:lpath} {\c!setup!internal!{\getmessage{setup}{lpath}}}
-\c!setup!definereserved {cd:setup} {\c!setup!internal!{\getmessage{setup}{setup}}}
-\c!setup!definereserved {cd:xmlsetup} {\c!setup!internal!{\getmessage{setup}{xmlsetup}}}
-\c!setup!definereserved {cd:luafunction} {\c!setup!internal!{\getmessage{setup}{luafunction}}}
-\c!setup!definereserved {cd:marking} {\c!setup!internal!{\getmessage{setup}{marking}}}
-\c!setup!definereserved {cd:sectionblock} {\c!setup!internal!{\getmessage{setup}{sectionblock}}}
-\c!setup!definereserved {cd:row} {\c!setup!internal!{\getmessage{setup}{row}}}
-\c!setup!definereserved {cd:column} {\c!setup!internal!{\getmessage{setup}{column}}}
-\c!setup!definereserved {cd:url} {\c!setup!internal!{\getmessage{setup}{url}}}
-\c!setup!definereserved {cd:true} {\c!setup!internal!{\getmessage{setup}{true}}}
-\c!setup!definereserved {cd:false} {\c!setup!internal!{\getmessage{setup}{false}}}
-\c!setup!definereserved {cd:category} {\c!setup!internal!{\getmessage{setup}{category}}}
-\c!setup!definereserved {cd:csname} {\c!setup!internal!{\getmessage{setup}{csname}}}
-\c!setup!definereserved {cd:content} {\c!setup!internal!{\getmessage{setup}{content}}}
-
-%c!setup!definereserved {cd:noargument} {\c!setup!command! {}}
-\c!setup!definereserved {cd:oneargument} {\c!setup!command! {\texthash1}}
-\c!setup!definereserved {cd:twoarguments} {\c!setup!command! {\texthash1\texthash2}}
-\c!setup!definereserved {cd:threearguments} {\c!setup!command! {\texthash1\texthash2\texthash3}}
-
-\c!setup!definereserved {cd:sign} {[-+]}
+\cmd_define_reserved {cd:command} {\cmd_internal_value{\getmessage{setup}{command}}}
+\cmd_define_reserved {cd:dimension} {\cmd_internal_value{\getmessage{setup}{dimension}}}
+\cmd_define_reserved {cd:file} {\cmd_internal_value{\getmessage{setup}{file}}}
+\cmd_define_reserved {cd:buffer} {\cmd_internal_value{\getmessage{setup}{buffer}}}
+\cmd_define_reserved {cd:name} {\cmd_internal_value{\getmessage{setup}{identifier}}}
+\cmd_define_reserved {cd:character} {\cmd_internal_value{\getmessage{setup}{character}}}
+\cmd_define_reserved {cd:mark} {\cmd_internal_value{\getmessage{setup}{mark}}}
+\cmd_define_reserved {cd:number} {\cmd_internal_value{\getmessage{setup}{number}}}
+\cmd_define_reserved {cd:first} {\cmd_internal_value{\getmessage{setup}{first}}}
+\cmd_define_reserved {cd:last} {\cmd_internal_value{\getmessage{setup}{last}}}
+\cmd_define_reserved {cd:reference} {\cmd_internal_value{\getmessage{setup}{reference}}}
+\cmd_define_reserved {cd:plural} {\cmd_internal_value{\getmessage{setup}{plural}}}
+\cmd_define_reserved {cd:singular} {\cmd_internal_value{\getmessage{setup}{singular}}}
+\cmd_define_reserved {cd:text} {\cmd_internal_value{\getmessage{setup}{text}}}
+\cmd_define_reserved {cd:formula} {\cmd_internal_value{\getmessage{setup}{formula}}}
+\cmd_define_reserved {cd:file} {\cmd_internal_value{\getmessage{setup}{file}}}
+\cmd_define_reserved {cd:matrix} {\cmd_internal_value{\getmessage{setup}{matrix}}}
+\cmd_define_reserved {cd:list} {\cmd_internal_value{\getmessage{setup}{list}}}
+\cmd_define_reserved {cd:section} {\cmd_internal_value{\getmessage{setup}{section}}}
+\cmd_define_reserved {cd:language} {\cmd_internal_value{\getmessage{setup}{language}}}
+\cmd_define_reserved {cd:section} {\cmd_internal_value{\getmessage{setup}{section}}}
+\cmd_define_reserved {cd:language} {\cmd_internal_value{\getmessage{setup}{language}}}
+\cmd_define_reserved {cd:processor} {\cmd_internal_value{\getmessage{setup}{processor}}}
+\cmd_define_reserved {cd:style} {\cmd_internal_value{\getmessage{setup}{style}}}
+\cmd_define_reserved {cd:font} {\cmd_internal_value{\getmessage{setup}{font}}}
+\cmd_define_reserved {cd:character} {\cmd_internal_value{\getmessage{setup}{character}}}
+\cmd_define_reserved {cd:userdata} {\cmd_internal_value{\getmessage{setup}{userdata}}}
+\cmd_define_reserved {cd:key} {\cmd_internal_value{\getmessage{setup}{key}}}
+\cmd_define_reserved {cd:value} {\cmd_internal_value{\getmessage{setup}{value}}}
+\cmd_define_reserved {cd:color} {\cmd_internal_value{\getmessage{setup}{color}}}
+\cmd_define_reserved {cd:template} {\cmd_internal_value{\getmessage{setup}{template}}}
+\cmd_define_reserved {cd:node} {\cmd_internal_value{\getmessage{setup}{node}}}
+\cmd_define_reserved {cd:lpath} {\cmd_internal_value{\getmessage{setup}{lpath}}}
+\cmd_define_reserved {cd:setup} {\cmd_internal_value{\getmessage{setup}{setup}}}
+\cmd_define_reserved {cd:xmlsetup} {\cmd_internal_value{\getmessage{setup}{xmlsetup}}}
+\cmd_define_reserved {cd:luafunction} {\cmd_internal_value{\getmessage{setup}{luafunction}}}
+\cmd_define_reserved {cd:marking} {\cmd_internal_value{\getmessage{setup}{marking}}}
+\cmd_define_reserved {cd:sectionblock} {\cmd_internal_value{\getmessage{setup}{sectionblock}}}
+\cmd_define_reserved {cd:row} {\cmd_internal_value{\getmessage{setup}{row}}}
+\cmd_define_reserved {cd:column} {\cmd_internal_value{\getmessage{setup}{column}}}
+\cmd_define_reserved {cd:url} {\cmd_internal_value{\getmessage{setup}{url}}}
+\cmd_define_reserved {cd:true} {\cmd_internal_value{\getmessage{setup}{true}}}
+\cmd_define_reserved {cd:false} {\cmd_internal_value{\getmessage{setup}{false}}}
+\cmd_define_reserved {cd:category} {\cmd_internal_value{\getmessage{setup}{category}}}
+\cmd_define_reserved {cd:csname} {\cmd_internal_value{\getmessage{setup}{csname}}}
+\cmd_define_reserved {cd:content} {\cmd_internal_value{\getmessage{setup}{content}}}
+
+%cmd_define_reserved {cd:noargument} {\cmd_command_value {}}
+\cmd_define_reserved {cd:oneargument} {\cmd_command_value {\texthash1}}
+\cmd_define_reserved {cd:twoarguments} {\cmd_command_value {\texthash1\texthash2}}
+\cmd_define_reserved {cd:threearguments} {\cmd_command_value {\texthash1\texthash2\texthash3}}
+
+\cmd_define_reserved {cd:sign} {[-+]}
%D Auxiliary.
@@ -1148,7 +1154,7 @@
\setbox2=\hbox to \wd0
{\hss
\raise1.25\exheight\hbox
- {\tx\ifcase\maximumSETUPargument\relax
+ {\txx\ifcase\maximumSETUPargument\relax
\or*\else\currentSETUPargument
\fi}%
\hss}%
@@ -1158,7 +1164,7 @@
\bgroup
\txx
\doif {\xmlatt{#1}{optional}} {yes}
- {\c!setup!internal!{\getmessage{setup}{optional}}}%
+ {\cmd_internal_value{\getmessage{setup}{optional}}}%
\egroup
\hss}%
\ht2\ht\strutbox
@@ -1195,6 +1201,9 @@
\unexpanded\def\setupEQsymbol % we raise the number already
{.\lower.25\exheight\hpack{=}.}
+\unexpanded\def\setupAPPLYsymbol % we raise the number already
+ {..\lower.25\exheight\hpack{=>}..}
+
\starttexdefinition unexpanded showSETUPassignmentbraces #1
\ifcase\kindofsetup
\showSETUPline{\letterleftbrace\setupEQsymbol\letterrightbrace}
@@ -1341,8 +1350,8 @@
\unexpanded\def\showSETUPapply#1%
{\showSETUP{#1}
- {[..=>..]}
- {[..,..=>..,...]}}
+ {[\setupAPPLYsymbol]}
+ {[..,\setupAPPLYsymbol,...]}}
\unexpanded\def\showSETUPtwowords#1%
{\showSETUP{#1}
@@ -1356,7 +1365,7 @@
\unexpanded\def\showSETUPcsname#1%
{\showSETUP{#1}
- {{\c!setup!command!{}}}
+ {{\cmd_command_value{}}}
{}}
\unexpanded\def\showSETUPdestination#1%
@@ -1477,7 +1486,7 @@
% official interface
\unexpanded\def\cmdinternal#1%
- {{\tttf\c!setup!reserved!{#1}}} % todo color .. highlight
+ {{\tttf\cmd_reserved_value{#1}}} % todo color .. highlight
\let\cmdbasicsetup\basicsetup
\let\cmdshortsetup\shortsetup
diff --git a/tex/generic/context/luatex/luatex-basics-chr.lua b/tex/generic/context/luatex/luatex-basics-chr.lua
new file mode 100644
index 000000000..4329256f2
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-basics-chr.lua
@@ -0,0 +1,758 @@
+-- automatically generated from context data
+
+characters = characters or { }
+
+-- dummies
+
+characters.blockrange = { }
+
+-- classifiers needed for analysis
+
+characters.classifiers={
+ [1536]=4,
+ [1537]=4,
+ [1538]=4,
+ [1539]=4,
+ [1540]=4,
+ [1541]=4,
+ [1542]=6,
+ [1543]=6,
+ [1544]=4,
+ [1545]=6,
+ [1546]=6,
+ [1547]=4,
+ [1548]=6,
+ [1549]=6,
+ [1550]=6,
+ [1551]=6,
+ [1552]=5,
+ [1553]=5,
+ [1554]=5,
+ [1555]=5,
+ [1556]=5,
+ [1557]=5,
+ [1558]=5,
+ [1559]=5,
+ [1560]=5,
+ [1561]=5,
+ [1562]=5,
+ [1563]=6,
+ [1564]=6,
+ [1566]=6,
+ [1567]=6,
+ [1568]=2,
+ [1569]=4,
+ [1570]=3,
+ [1571]=3,
+ [1572]=3,
+ [1573]=3,
+ [1574]=2,
+ [1575]=3,
+ [1576]=2,
+ [1577]=3,
+ [1578]=2,
+ [1579]=2,
+ [1580]=2,
+ [1581]=2,
+ [1582]=2,
+ [1583]=3,
+ [1584]=3,
+ [1585]=3,
+ [1586]=3,
+ [1587]=2,
+ [1588]=2,
+ [1589]=2,
+ [1590]=2,
+ [1591]=2,
+ [1592]=2,
+ [1593]=2,
+ [1594]=2,
+ [1595]=2,
+ [1596]=2,
+ [1597]=2,
+ [1598]=2,
+ [1599]=2,
+ [1600]=2,
+ [1601]=2,
+ [1602]=2,
+ [1603]=2,
+ [1604]=2,
+ [1605]=2,
+ [1606]=2,
+ [1607]=2,
+ [1608]=3,
+ [1609]=2,
+ [1610]=2,
+ [1611]=5,
+ [1612]=5,
+ [1613]=5,
+ [1614]=5,
+ [1615]=5,
+ [1616]=5,
+ [1617]=5,
+ [1618]=5,
+ [1619]=5,
+ [1620]=5,
+ [1621]=5,
+ [1622]=5,
+ [1623]=5,
+ [1624]=5,
+ [1625]=5,
+ [1626]=5,
+ [1627]=5,
+ [1628]=5,
+ [1629]=5,
+ [1630]=5,
+ [1631]=5,
+ [1632]=6,
+ [1633]=6,
+ [1634]=6,
+ [1635]=6,
+ [1636]=6,
+ [1637]=6,
+ [1638]=6,
+ [1639]=6,
+ [1640]=6,
+ [1641]=6,
+ [1642]=6,
+ [1643]=6,
+ [1644]=6,
+ [1645]=6,
+ [1646]=2,
+ [1647]=2,
+ [1648]=5,
+ [1649]=3,
+ [1650]=3,
+ [1651]=3,
+ [1652]=4,
+ [1653]=3,
+ [1654]=3,
+ [1655]=3,
+ [1656]=2,
+ [1657]=2,
+ [1658]=2,
+ [1659]=2,
+ [1660]=2,
+ [1661]=2,
+ [1662]=2,
+ [1663]=2,
+ [1664]=2,
+ [1665]=2,
+ [1666]=2,
+ [1667]=2,
+ [1668]=2,
+ [1669]=2,
+ [1670]=2,
+ [1671]=2,
+ [1672]=3,
+ [1673]=3,
+ [1674]=3,
+ [1675]=3,
+ [1676]=3,
+ [1677]=3,
+ [1678]=3,
+ [1679]=3,
+ [1680]=3,
+ [1681]=3,
+ [1682]=3,
+ [1683]=3,
+ [1684]=3,
+ [1685]=3,
+ [1686]=3,
+ [1687]=3,
+ [1688]=3,
+ [1689]=3,
+ [1690]=2,
+ [1691]=2,
+ [1692]=2,
+ [1693]=2,
+ [1694]=2,
+ [1695]=2,
+ [1696]=2,
+ [1697]=2,
+ [1698]=2,
+ [1699]=2,
+ [1700]=2,
+ [1701]=2,
+ [1702]=2,
+ [1703]=2,
+ [1704]=2,
+ [1705]=2,
+ [1706]=2,
+ [1707]=2,
+ [1708]=2,
+ [1709]=2,
+ [1710]=2,
+ [1711]=2,
+ [1712]=2,
+ [1713]=2,
+ [1714]=2,
+ [1715]=2,
+ [1716]=2,
+ [1717]=2,
+ [1718]=2,
+ [1719]=2,
+ [1720]=2,
+ [1721]=2,
+ [1722]=2,
+ [1723]=2,
+ [1724]=2,
+ [1725]=2,
+ [1726]=2,
+ [1727]=2,
+ [1728]=3,
+ [1729]=2,
+ [1730]=2,
+ [1731]=3,
+ [1732]=3,
+ [1733]=3,
+ [1734]=3,
+ [1735]=3,
+ [1736]=3,
+ [1737]=3,
+ [1738]=3,
+ [1739]=3,
+ [1740]=2,
+ [1741]=3,
+ [1742]=2,
+ [1743]=3,
+ [1744]=2,
+ [1745]=2,
+ [1746]=3,
+ [1747]=3,
+ [1748]=6,
+ [1749]=3,
+ [1750]=5,
+ [1751]=5,
+ [1752]=5,
+ [1753]=5,
+ [1754]=5,
+ [1755]=5,
+ [1756]=5,
+ [1757]=4,
+ [1758]=6,
+ [1759]=5,
+ [1760]=5,
+ [1761]=5,
+ [1762]=5,
+ [1763]=5,
+ [1764]=5,
+ [1765]=6,
+ [1766]=6,
+ [1767]=5,
+ [1768]=5,
+ [1769]=6,
+ [1770]=5,
+ [1771]=5,
+ [1772]=5,
+ [1773]=5,
+ [1774]=3,
+ [1775]=3,
+ [1776]=6,
+ [1777]=6,
+ [1778]=6,
+ [1779]=6,
+ [1780]=6,
+ [1781]=6,
+ [1782]=6,
+ [1783]=6,
+ [1784]=6,
+ [1785]=6,
+ [1786]=2,
+ [1787]=2,
+ [1788]=2,
+ [1789]=6,
+ [1790]=6,
+ [1791]=2,
+ [1792]=6,
+ [1793]=6,
+ [1794]=6,
+ [1795]=6,
+ [1796]=6,
+ [1797]=6,
+ [1798]=6,
+ [1799]=6,
+ [1800]=6,
+ [1801]=6,
+ [1802]=6,
+ [1803]=6,
+ [1804]=6,
+ [1805]=6,
+ [1807]=6,
+ [1808]=3,
+ [1809]=5,
+ [1810]=2,
+ [1811]=2,
+ [1812]=2,
+ [1813]=3,
+ [1814]=3,
+ [1815]=3,
+ [1816]=3,
+ [1817]=3,
+ [1818]=2,
+ [1819]=2,
+ [1820]=2,
+ [1821]=2,
+ [1822]=3,
+ [1823]=2,
+ [1824]=2,
+ [1825]=2,
+ [1826]=2,
+ [1827]=2,
+ [1828]=2,
+ [1829]=2,
+ [1830]=2,
+ [1831]=2,
+ [1832]=3,
+ [1833]=2,
+ [1834]=3,
+ [1835]=2,
+ [1836]=3,
+ [1837]=2,
+ [1838]=2,
+ [1839]=3,
+ [1840]=5,
+ [1841]=5,
+ [1842]=5,
+ [1843]=5,
+ [1844]=5,
+ [1845]=5,
+ [1846]=5,
+ [1847]=5,
+ [1848]=5,
+ [1849]=5,
+ [1850]=5,
+ [1851]=5,
+ [1852]=5,
+ [1853]=5,
+ [1854]=5,
+ [1855]=5,
+ [1856]=5,
+ [1857]=5,
+ [1858]=5,
+ [1859]=5,
+ [1860]=5,
+ [1861]=5,
+ [1862]=5,
+ [1863]=5,
+ [1864]=5,
+ [1865]=5,
+ [1866]=5,
+ [1869]=3,
+ [1870]=2,
+ [1871]=2,
+ [1872]=2,
+ [1873]=2,
+ [1874]=2,
+ [1875]=2,
+ [1876]=2,
+ [1877]=2,
+ [1878]=2,
+ [1879]=2,
+ [1880]=2,
+ [1881]=3,
+ [1882]=3,
+ [1883]=3,
+ [1884]=2,
+ [1885]=2,
+ [1886]=2,
+ [1887]=2,
+ [1888]=2,
+ [1889]=2,
+ [1890]=2,
+ [1891]=2,
+ [1892]=2,
+ [1893]=2,
+ [1894]=2,
+ [1895]=2,
+ [1896]=2,
+ [1897]=2,
+ [1898]=2,
+ [1899]=3,
+ [1900]=3,
+ [1901]=2,
+ [1902]=2,
+ [1903]=2,
+ [1904]=2,
+ [1905]=3,
+ [1906]=2,
+ [1907]=3,
+ [1908]=3,
+ [1909]=2,
+ [1910]=2,
+ [1911]=2,
+ [1912]=3,
+ [1913]=3,
+ [1914]=2,
+ [1915]=2,
+ [1916]=2,
+ [1917]=2,
+ [1918]=2,
+ [1919]=2,
+ [1984]=6,
+ [1985]=6,
+ [1986]=6,
+ [1987]=6,
+ [1988]=6,
+ [1989]=6,
+ [1990]=6,
+ [1991]=6,
+ [1992]=6,
+ [1993]=6,
+ [1994]=2,
+ [1995]=2,
+ [1996]=2,
+ [1997]=2,
+ [1998]=2,
+ [1999]=2,
+ [2000]=2,
+ [2001]=2,
+ [2002]=2,
+ [2003]=2,
+ [2004]=2,
+ [2005]=2,
+ [2006]=2,
+ [2007]=2,
+ [2008]=2,
+ [2009]=2,
+ [2010]=2,
+ [2011]=2,
+ [2012]=2,
+ [2013]=2,
+ [2014]=2,
+ [2015]=2,
+ [2016]=2,
+ [2017]=2,
+ [2018]=2,
+ [2019]=2,
+ [2020]=2,
+ [2021]=2,
+ [2022]=2,
+ [2023]=2,
+ [2024]=2,
+ [2025]=2,
+ [2026]=2,
+ [2027]=5,
+ [2028]=5,
+ [2029]=5,
+ [2030]=5,
+ [2031]=5,
+ [2032]=5,
+ [2033]=5,
+ [2034]=5,
+ [2035]=5,
+ [2036]=6,
+ [2037]=6,
+ [2038]=6,
+ [2039]=6,
+ [2040]=6,
+ [2041]=6,
+ [2042]=2,
+ [2112]=3,
+ [2113]=2,
+ [2114]=2,
+ [2115]=2,
+ [2116]=2,
+ [2117]=2,
+ [2118]=3,
+ [2119]=3,
+ [2120]=2,
+ [2121]=3,
+ [2122]=2,
+ [2123]=2,
+ [2124]=2,
+ [2125]=2,
+ [2126]=2,
+ [2127]=2,
+ [2128]=2,
+ [2129]=2,
+ [2130]=2,
+ [2131]=2,
+ [2132]=3,
+ [2133]=2,
+ [2134]=4,
+ [2135]=4,
+ [2136]=4,
+ [2208]=2,
+ [2209]=2,
+ [2210]=2,
+ [2211]=2,
+ [2212]=2,
+ [2213]=2,
+ [2214]=2,
+ [2215]=2,
+ [2216]=2,
+ [2217]=2,
+ [2218]=3,
+ [2219]=3,
+ [2220]=3,
+ [2221]=4,
+ [2222]=3,
+ [2223]=2,
+ [2224]=2,
+ [2225]=3,
+ [2226]=3,
+ [2227]=2,
+ [2228]=2,
+ [6150]=4,
+ [6151]=2,
+ [6154]=2,
+ [6158]=4,
+ [6176]=2,
+ [6177]=2,
+ [6178]=2,
+ [6179]=2,
+ [6180]=2,
+ [6181]=2,
+ [6182]=2,
+ [6183]=2,
+ [6184]=2,
+ [6185]=2,
+ [6186]=2,
+ [6187]=2,
+ [6188]=2,
+ [6189]=2,
+ [6190]=2,
+ [6191]=2,
+ [6192]=2,
+ [6193]=2,
+ [6194]=2,
+ [6195]=2,
+ [6196]=2,
+ [6197]=2,
+ [6198]=2,
+ [6199]=2,
+ [6200]=2,
+ [6201]=2,
+ [6202]=2,
+ [6203]=2,
+ [6204]=2,
+ [6205]=2,
+ [6206]=2,
+ [6207]=2,
+ [6208]=2,
+ [6209]=2,
+ [6210]=2,
+ [6211]=2,
+ [6212]=2,
+ [6213]=2,
+ [6214]=2,
+ [6215]=2,
+ [6216]=2,
+ [6217]=2,
+ [6218]=2,
+ [6219]=2,
+ [6220]=2,
+ [6221]=2,
+ [6222]=2,
+ [6223]=2,
+ [6224]=2,
+ [6225]=2,
+ [6226]=2,
+ [6227]=2,
+ [6228]=2,
+ [6229]=2,
+ [6230]=2,
+ [6231]=2,
+ [6232]=2,
+ [6233]=2,
+ [6234]=2,
+ [6235]=2,
+ [6236]=2,
+ [6237]=2,
+ [6238]=2,
+ [6239]=2,
+ [6240]=2,
+ [6241]=2,
+ [6242]=2,
+ [6243]=2,
+ [6244]=2,
+ [6245]=2,
+ [6246]=2,
+ [6247]=2,
+ [6248]=2,
+ [6249]=2,
+ [6250]=2,
+ [6251]=2,
+ [6252]=2,
+ [6253]=2,
+ [6254]=2,
+ [6255]=2,
+ [6256]=2,
+ [6257]=2,
+ [6258]=2,
+ [6259]=2,
+ [6260]=2,
+ [6261]=2,
+ [6262]=2,
+ [6263]=2,
+ [6272]=4,
+ [6273]=4,
+ [6274]=4,
+ [6275]=4,
+ [6276]=4,
+ [6277]=4,
+ [6278]=4,
+ [6279]=2,
+ [6280]=2,
+ [6281]=2,
+ [6282]=2,
+ [6283]=2,
+ [6284]=2,
+ [6285]=2,
+ [6286]=2,
+ [6287]=2,
+ [6288]=2,
+ [6289]=2,
+ [6290]=2,
+ [6291]=2,
+ [6292]=2,
+ [6293]=2,
+ [6294]=2,
+ [6295]=2,
+ [6296]=2,
+ [6297]=2,
+ [6298]=2,
+ [6299]=2,
+ [6300]=2,
+ [6301]=2,
+ [6302]=2,
+ [6303]=2,
+ [6304]=2,
+ [6305]=2,
+ [6306]=2,
+ [6307]=2,
+ [6308]=2,
+ [6309]=2,
+ [6310]=2,
+ [6311]=2,
+ [6312]=2,
+ [6314]=2,
+ [8204]=4,
+ [8205]=2,
+ [8294]=4,
+ [8295]=4,
+ [8296]=4,
+ [8297]=4,
+ [43072]=2,
+ [43073]=2,
+ [43074]=2,
+ [43075]=2,
+ [43076]=2,
+ [43077]=2,
+ [43078]=2,
+ [43079]=2,
+ [43080]=2,
+ [43081]=2,
+ [43082]=2,
+ [43083]=2,
+ [43084]=2,
+ [43085]=2,
+ [43086]=2,
+ [43087]=2,
+ [43088]=2,
+ [43089]=2,
+ [43090]=2,
+ [43091]=2,
+ [43092]=2,
+ [43093]=2,
+ [43094]=2,
+ [43095]=2,
+ [43096]=2,
+ [43097]=2,
+ [43098]=2,
+ [43099]=2,
+ [43100]=2,
+ [43101]=2,
+ [43102]=2,
+ [43103]=2,
+ [43104]=2,
+ [43105]=2,
+ [43106]=2,
+ [43107]=2,
+ [43108]=2,
+ [43109]=2,
+ [43110]=2,
+ [43111]=2,
+ [43112]=2,
+ [43113]=2,
+ [43114]=2,
+ [43115]=2,
+ [43116]=2,
+ [43117]=2,
+ [43118]=2,
+ [43119]=2,
+ [43120]=2,
+ [43121]=2,
+ [43122]=1,
+ [43123]=4,
+ [68288]=2,
+ [68289]=2,
+ [68290]=2,
+ [68291]=2,
+ [68292]=2,
+ [68293]=3,
+ [68294]=4,
+ [68295]=3,
+ [68296]=4,
+ [68297]=3,
+ [68298]=3,
+ [68299]=4,
+ [68300]=4,
+ [68301]=1,
+ [68302]=3,
+ [68303]=3,
+ [68304]=3,
+ [68305]=3,
+ [68306]=3,
+ [68307]=2,
+ [68308]=2,
+ [68309]=2,
+ [68310]=2,
+ [68311]=1,
+ [68312]=2,
+ [68313]=2,
+ [68314]=2,
+ [68315]=2,
+ [68316]=2,
+ [68317]=3,
+ [68318]=2,
+ [68319]=2,
+ [68320]=2,
+ [68321]=3,
+ [68322]=4,
+ [68323]=4,
+ [68324]=3,
+ [68331]=2,
+ [68332]=2,
+ [68333]=2,
+ [68334]=2,
+ [68335]=3,
+ [68480]=2,
+ [68481]=3,
+ [68482]=2,
+ [68483]=3,
+ [68484]=3,
+ [68485]=3,
+ [68486]=2,
+ [68487]=2,
+ [68488]=2,
+ [68489]=3,
+ [68490]=2,
+ [68491]=2,
+ [68492]=3,
+ [68493]=2,
+ [68494]=3,
+ [68495]=3,
+ [68496]=2,
+ [68497]=3,
+ [68521]=3,
+ [68522]=3,
+ [68523]=3,
+ [68524]=3,
+ [68525]=2,
+ [68526]=2,
+ [68527]=4,
+}
+
+-- done
diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua
index c4d653604..c298f6dd9 100644
--- a/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -63,15 +63,19 @@ logs = {
}
callbacks = {
- register = function(n,f) return callback.register(n,f) end,
+ register = function(n,f)
+ return callback.register(n,f)
+ end,
}
-utilities = {
- storage = {
- allocate = function(t) return t or { } end,
- mark = function(t) return t or { } end,
- },
+utilities = utilities or { } utilities.storage = {
+ allocate = function(t)
+ return t or { }
+ end,
+ mark = function(t)
+ return t or { }
+ end,
}
characters = characters or {
@@ -355,12 +359,28 @@ end
--
+-- function table.setmetatableindex(t,f)
+-- if type(t) ~= "table" then
+-- f = f or t
+-- t = { }
+-- end
+-- setmetatable(t,{ __index = f })
+-- return t
+-- end
+
function table.setmetatableindex(t,f)
if type(t) ~= "table" then
- f = f or t
- t = { }
+ f, t = t, { }
+ end
+ local m = getmetatable(t)
+ if f == "table" then
+ f = function(t,k) local v = { } t[k] = v return v end
+ end
+ if m then
+ m.__index = f
+ else
+ setmetatable(t,{ __index = f })
end
- setmetatable(t,{ __index = f })
return t
end
diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua
index 78f1b172a..e7b5ab24f 100644
--- a/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -211,6 +211,44 @@ nuts.setlist = direct.setlist or function(n,l) setfield(n,"list",
nuts.getleader = direct.getleader
nuts.setleader = direct.setleader or function(n,l) setfield(n,"leader",l) end
+if not direct.is_glyph then
+ local getchar = direct.getchar
+ local getid = direct.getid
+ local getfont = direct.getfont
+ local glyph_code = nodes.nodecodes.glyph
+ function direct.is_glyph(n,f)
+ local id = getid(n)
+ if id == glyph_code then
+ if f and getfont(n) == f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil, id
+ end
+ end
+ function direct.is_char(n,f)
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) >= 256 then
+ return false
+ elseif f and getfont(n) == f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil, id
+ end
+ end
+end
+
+nuts.ischar = direct.is_char
+nuts.is_char = direct.is_char
+nuts.isglyph = direct.is_glyph
+nuts.is_glyph = direct.is_glyph
+
nuts.insert_before = direct.insert_before
nuts.insert_after = direct.insert_after
nuts.delete = direct.delete
diff --git a/tex/generic/context/luatex/luatex-basics-prepare.tex b/tex/generic/context/luatex/luatex-basics-prepare.tex
new file mode 100644
index 000000000..105226440
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-basics-prepare.tex
@@ -0,0 +1,90 @@
+%D \module
+%D [ file=luatex-fonts-prepare,
+%D version=2006.03.18,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Generate data for generic,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This file generates a few resources needed for generic font processing. This
+%D is needed because we don't want to load the (mostly not used in generic) data
+%D files.
+
+\startluacode
+
+-- dofile("t:/sources/char-ini.lua")
+-- dofile("t:/sources/char-def.lua")
+
+local chardata = characters.data
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local mappers = {
+ l = s_init, -- left
+ d = s_medi, -- double
+ c = s_medi, -- joiner
+ r = s_fina, -- right
+ u = s_isol, -- nonjoiner
+}
+
+local first_arabic, last_arabic = characters.blockrange("arabic")
+local first_syriac, last_syriac = characters.blockrange("syriac")
+local first_mandiac, last_mandiac = characters.blockrange("mandiac")
+local first_nko, last_nko = characters.blockrange("nko")
+
+local classifiers = { }
+
+for k, c in next, chardata do
+ if k > 0 then
+ local c = chardata[k]
+ if c then
+ local arabic = c.arabic
+ if arabic then
+ classifiers[k] = mappers[arabic]
+ elseif k >= first_arabic and k <= last_arabic or k >= first_syriac and k <= last_syriac or
+ k >= first_mandiac and k <= last_mandiac or k >= first_nko and k <= last_nko then
+ if c.category == "mn" then
+ classifiers[k] = s_mark
+ else
+ classifiers[k] = s_rest
+ end
+ end
+ end
+ end
+end
+
+local template = string.formatters [ [[
+-- automatically generated from context data
+
+characters = characters or { }
+
+-- dummies
+
+characters.blockrange = { }
+
+-- classifiers needed for analysis
+
+%s
+
+-- done
+]] ]
+
+io.savedata("luatex-basics-chr.lua",template(
+ table.serialize(classifiers,"characters.classifiers")
+))
+
+\stopluacode
+
+\startTEXpage[offset=10pt]
+ \tttf generated file: luatex-basics-chr.lua
+\stopTEXpage
diff --git a/tex/generic/context/luatex/luatex-fonts-lua.lua b/tex/generic/context/luatex/luatex-fonts-lua.lua
deleted file mode 100644
index ec3fe38be..000000000
--- a/tex/generic/context/luatex/luatex-fonts-lua.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-if not modules then modules = { } end modules ['luatex-fonts-lua'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-fonts.formats.lua = "lua"
-
-function fonts.readers.lua(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local fullname = resolvers.findfile(fullname) or ""
- if fullname ~= "" then
- local loader = loadfile(fullname)
- loader = loader and loader()
- return loader and loader(specification)
- end
-end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 88ea2056f..3f7f78c93 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date : 03/13/16 23:40:13
+-- merge date : 03/26/16 13:05:11
do -- begin closure to overcome local limits and interference
@@ -3474,6 +3474,149 @@ end -- closure
do -- begin closure to overcome local limits and interference
+if not modules then modules={} end modules ['util-fil']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local byte=string.byte
+local extract=bit32.extract
+utilities=utilities or {}
+local files={}
+utilities.files=files
+local zerobased={}
+function files.open(filename,zb)
+ local f=io.open(filename,"rb")
+ if f then
+ zerobased[f]=zb or false
+ end
+ return f
+end
+function files.close(f)
+ zerobased[f]=nil
+ f:close()
+end
+function files.size(f)
+ return f:seek("end")
+end
+function files.setposition(f,n)
+ if zerobased[f] then
+ f:seek("set",n)
+ else
+ f:seek("set",n-1)
+ end
+end
+function files.getposition(f)
+ if zerobased[f] then
+ return f:seek()
+ else
+ return f:seek()+1
+ end
+end
+function files.look(f,n,chars)
+ local p=f:seek()
+ local s=f:read(n)
+ f:seek("set",p)
+ if chars then
+ return s
+ else
+ return byte(s,1,#s)
+ end
+end
+function files.skip(f,n)
+ if n==1 then
+ f:read(n)
+ else
+ f:seek("set",f:seek()+n)
+ end
+end
+function files.readbyte(f)
+ return byte(f:read(1))
+end
+function files.readbytes(f,n)
+ return byte(f:read(n),1,n)
+end
+function files.readchar(f)
+ return f:read(1)
+end
+function files.readstring(f,n)
+ return f:read(n or 1)
+end
+function files.readinteger1(f)
+ local n=byte(f:read(1))
+ if n>=0x80 then
+ return n-0xFF-1
+ else
+ return n
+ end
+end
+files.readcardinal1=files.readbyte
+files.readcardinal=files.readcardinal1
+files.readinteger=files.readinteger1
+function files.readcardinal2(f)
+ local a,b=byte(f:read(2),1,2)
+ return 0x100*a+b
+end
+function files.readinteger2(f)
+ local a,b=byte(f:read(2),1,2)
+ local n=0x100*a+b
+ if n>=0x8000 then
+ return n-0xFFFF-1
+ else
+ return n
+ end
+end
+function files.readcardinal3(f)
+ local a,b,c=byte(f:read(3),1,3)
+ return 0x10000*a+0x100*b+c
+end
+function files.readcardinal4(f)
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 0x1000000*a+0x10000*b+0x100*c+d
+end
+function files.readinteger4(f)
+ local a,b,c,d=byte(f:read(4),1,4)
+ local n=0x1000000*a+0x10000*b+0x100*c+d
+ if n>=0x8000000 then
+ return n-0xFFFFFFFF-1
+ else
+ return n
+ end
+end
+function files.readfixed4(f)
+ local a,b,c,d=byte(f:read(4),1,4)
+ local n=0x100*a+b
+ if n>=0x8000 then
+ return n-0xFFFF-1+(0x100*c+d)/0xFFFF
+ else
+ return n+(0x100*c+d)/0xFFFF
+ end
+end
+function files.read2dot14(f)
+ local a,b=byte(f:read(2),1,2)
+ local n=0x100*a+b
+ local m=extract(n,0,30)
+ if n>0x7FFF then
+ n=extract(n,30,2)
+ return m/0x4000-4
+ else
+ n=extract(n,30,2)
+ return n+m/0x4000
+ end
+end
+function files.skipshort(f,n)
+ f:read(2*(n or 1))
+end
+function files.skiplong(f,n)
+ f:read(4*(n or 1))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
if not modules then modules={} end modules ['luat-basics-gen']={
version=1.100,
comment="companion to luatex-*.tex",
@@ -3529,13 +3672,17 @@ logs={
report=dummyfunction,
}
callbacks={
- register=function(n,f) return callback.register(n,f) end,
+ register=function(n,f)
+ return callback.register(n,f)
+ end,
}
-utilities={
- storage={
- allocate=function(t) return t or {} end,
- mark=function(t) return t or {} end,
- },
+utilities=utilities or {} utilities.storage={
+ allocate=function(t)
+ return t or {}
+ end,
+ mark=function(t)
+ return t or {}
+ end,
}
characters=characters or {
data={}
@@ -3735,10 +3882,17 @@ function caches.compile(data,luaname,lucname)
end
function table.setmetatableindex(t,f)
if type(t)~="table" then
- f=f or t
- t={}
+ f,t=t,{}
+ end
+ local m=getmetatable(t)
+ if f=="table" then
+ f=function(t,k) local v={} t[k]=v return v end
+ end
+ if m then
+ m.__index=f
+ else
+ setmetatable(t,{ __index=f })
end
- setmetatable(t,{ __index=f })
return t
end
arguments={}
@@ -4032,6 +4186,42 @@ nuts.getlist=direct.getlist
nuts.setlist=direct.setlist or function(n,l) setfield(n,"list",l) end
nuts.getleader=direct.getleader
nuts.setleader=direct.setleader or function(n,l) setfield(n,"leader",l) end
+if not direct.is_glyph then
+ local getchar=direct.getchar
+ local getid=direct.getid
+ local getfont=direct.getfont
+ local glyph_code=nodes.nodecodes.glyph
+ function direct.is_glyph(n,f)
+ local id=getid(n)
+ if id==glyph_code then
+ if f and getfont(n)==f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil,id
+ end
+ end
+ function direct.is_char(n,f)
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)>=256 then
+ return false
+ elseif f and getfont(n)==f then
+ return getchar(n)
+ else
+ return false
+ end
+ else
+ return nil,id
+ end
+ end
+end
+nuts.ischar=direct.is_char
+nuts.is_char=direct.is_char
+nuts.isglyph=direct.is_glyph
+nuts.is_glyph=direct.is_glyph
nuts.insert_before=direct.insert_before
nuts.insert_after=direct.insert_after
nuts.delete=direct.delete
@@ -4085,6 +4275,760 @@ end -- closure
do -- begin closure to overcome local limits and interference
+
+characters=characters or {}
+characters.blockrange={}
+characters.classifiers={
+ [1536]=4,
+ [1537]=4,
+ [1538]=4,
+ [1539]=4,
+ [1540]=4,
+ [1541]=4,
+ [1542]=6,
+ [1543]=6,
+ [1544]=4,
+ [1545]=6,
+ [1546]=6,
+ [1547]=4,
+ [1548]=6,
+ [1549]=6,
+ [1550]=6,
+ [1551]=6,
+ [1552]=5,
+ [1553]=5,
+ [1554]=5,
+ [1555]=5,
+ [1556]=5,
+ [1557]=5,
+ [1558]=5,
+ [1559]=5,
+ [1560]=5,
+ [1561]=5,
+ [1562]=5,
+ [1563]=6,
+ [1564]=6,
+ [1566]=6,
+ [1567]=6,
+ [1568]=2,
+ [1569]=4,
+ [1570]=3,
+ [1571]=3,
+ [1572]=3,
+ [1573]=3,
+ [1574]=2,
+ [1575]=3,
+ [1576]=2,
+ [1577]=3,
+ [1578]=2,
+ [1579]=2,
+ [1580]=2,
+ [1581]=2,
+ [1582]=2,
+ [1583]=3,
+ [1584]=3,
+ [1585]=3,
+ [1586]=3,
+ [1587]=2,
+ [1588]=2,
+ [1589]=2,
+ [1590]=2,
+ [1591]=2,
+ [1592]=2,
+ [1593]=2,
+ [1594]=2,
+ [1595]=2,
+ [1596]=2,
+ [1597]=2,
+ [1598]=2,
+ [1599]=2,
+ [1600]=2,
+ [1601]=2,
+ [1602]=2,
+ [1603]=2,
+ [1604]=2,
+ [1605]=2,
+ [1606]=2,
+ [1607]=2,
+ [1608]=3,
+ [1609]=2,
+ [1610]=2,
+ [1611]=5,
+ [1612]=5,
+ [1613]=5,
+ [1614]=5,
+ [1615]=5,
+ [1616]=5,
+ [1617]=5,
+ [1618]=5,
+ [1619]=5,
+ [1620]=5,
+ [1621]=5,
+ [1622]=5,
+ [1623]=5,
+ [1624]=5,
+ [1625]=5,
+ [1626]=5,
+ [1627]=5,
+ [1628]=5,
+ [1629]=5,
+ [1630]=5,
+ [1631]=5,
+ [1632]=6,
+ [1633]=6,
+ [1634]=6,
+ [1635]=6,
+ [1636]=6,
+ [1637]=6,
+ [1638]=6,
+ [1639]=6,
+ [1640]=6,
+ [1641]=6,
+ [1642]=6,
+ [1643]=6,
+ [1644]=6,
+ [1645]=6,
+ [1646]=2,
+ [1647]=2,
+ [1648]=5,
+ [1649]=3,
+ [1650]=3,
+ [1651]=3,
+ [1652]=4,
+ [1653]=3,
+ [1654]=3,
+ [1655]=3,
+ [1656]=2,
+ [1657]=2,
+ [1658]=2,
+ [1659]=2,
+ [1660]=2,
+ [1661]=2,
+ [1662]=2,
+ [1663]=2,
+ [1664]=2,
+ [1665]=2,
+ [1666]=2,
+ [1667]=2,
+ [1668]=2,
+ [1669]=2,
+ [1670]=2,
+ [1671]=2,
+ [1672]=3,
+ [1673]=3,
+ [1674]=3,
+ [1675]=3,
+ [1676]=3,
+ [1677]=3,
+ [1678]=3,
+ [1679]=3,
+ [1680]=3,
+ [1681]=3,
+ [1682]=3,
+ [1683]=3,
+ [1684]=3,
+ [1685]=3,
+ [1686]=3,
+ [1687]=3,
+ [1688]=3,
+ [1689]=3,
+ [1690]=2,
+ [1691]=2,
+ [1692]=2,
+ [1693]=2,
+ [1694]=2,
+ [1695]=2,
+ [1696]=2,
+ [1697]=2,
+ [1698]=2,
+ [1699]=2,
+ [1700]=2,
+ [1701]=2,
+ [1702]=2,
+ [1703]=2,
+ [1704]=2,
+ [1705]=2,
+ [1706]=2,
+ [1707]=2,
+ [1708]=2,
+ [1709]=2,
+ [1710]=2,
+ [1711]=2,
+ [1712]=2,
+ [1713]=2,
+ [1714]=2,
+ [1715]=2,
+ [1716]=2,
+ [1717]=2,
+ [1718]=2,
+ [1719]=2,
+ [1720]=2,
+ [1721]=2,
+ [1722]=2,
+ [1723]=2,
+ [1724]=2,
+ [1725]=2,
+ [1726]=2,
+ [1727]=2,
+ [1728]=3,
+ [1729]=2,
+ [1730]=2,
+ [1731]=3,
+ [1732]=3,
+ [1733]=3,
+ [1734]=3,
+ [1735]=3,
+ [1736]=3,
+ [1737]=3,
+ [1738]=3,
+ [1739]=3,
+ [1740]=2,
+ [1741]=3,
+ [1742]=2,
+ [1743]=3,
+ [1744]=2,
+ [1745]=2,
+ [1746]=3,
+ [1747]=3,
+ [1748]=6,
+ [1749]=3,
+ [1750]=5,
+ [1751]=5,
+ [1752]=5,
+ [1753]=5,
+ [1754]=5,
+ [1755]=5,
+ [1756]=5,
+ [1757]=4,
+ [1758]=6,
+ [1759]=5,
+ [1760]=5,
+ [1761]=5,
+ [1762]=5,
+ [1763]=5,
+ [1764]=5,
+ [1765]=6,
+ [1766]=6,
+ [1767]=5,
+ [1768]=5,
+ [1769]=6,
+ [1770]=5,
+ [1771]=5,
+ [1772]=5,
+ [1773]=5,
+ [1774]=3,
+ [1775]=3,
+ [1776]=6,
+ [1777]=6,
+ [1778]=6,
+ [1779]=6,
+ [1780]=6,
+ [1781]=6,
+ [1782]=6,
+ [1783]=6,
+ [1784]=6,
+ [1785]=6,
+ [1786]=2,
+ [1787]=2,
+ [1788]=2,
+ [1789]=6,
+ [1790]=6,
+ [1791]=2,
+ [1792]=6,
+ [1793]=6,
+ [1794]=6,
+ [1795]=6,
+ [1796]=6,
+ [1797]=6,
+ [1798]=6,
+ [1799]=6,
+ [1800]=6,
+ [1801]=6,
+ [1802]=6,
+ [1803]=6,
+ [1804]=6,
+ [1805]=6,
+ [1807]=6,
+ [1808]=3,
+ [1809]=5,
+ [1810]=2,
+ [1811]=2,
+ [1812]=2,
+ [1813]=3,
+ [1814]=3,
+ [1815]=3,
+ [1816]=3,
+ [1817]=3,
+ [1818]=2,
+ [1819]=2,
+ [1820]=2,
+ [1821]=2,
+ [1822]=3,
+ [1823]=2,
+ [1824]=2,
+ [1825]=2,
+ [1826]=2,
+ [1827]=2,
+ [1828]=2,
+ [1829]=2,
+ [1830]=2,
+ [1831]=2,
+ [1832]=3,
+ [1833]=2,
+ [1834]=3,
+ [1835]=2,
+ [1836]=3,
+ [1837]=2,
+ [1838]=2,
+ [1839]=3,
+ [1840]=5,
+ [1841]=5,
+ [1842]=5,
+ [1843]=5,
+ [1844]=5,
+ [1845]=5,
+ [1846]=5,
+ [1847]=5,
+ [1848]=5,
+ [1849]=5,
+ [1850]=5,
+ [1851]=5,
+ [1852]=5,
+ [1853]=5,
+ [1854]=5,
+ [1855]=5,
+ [1856]=5,
+ [1857]=5,
+ [1858]=5,
+ [1859]=5,
+ [1860]=5,
+ [1861]=5,
+ [1862]=5,
+ [1863]=5,
+ [1864]=5,
+ [1865]=5,
+ [1866]=5,
+ [1869]=3,
+ [1870]=2,
+ [1871]=2,
+ [1872]=2,
+ [1873]=2,
+ [1874]=2,
+ [1875]=2,
+ [1876]=2,
+ [1877]=2,
+ [1878]=2,
+ [1879]=2,
+ [1880]=2,
+ [1881]=3,
+ [1882]=3,
+ [1883]=3,
+ [1884]=2,
+ [1885]=2,
+ [1886]=2,
+ [1887]=2,
+ [1888]=2,
+ [1889]=2,
+ [1890]=2,
+ [1891]=2,
+ [1892]=2,
+ [1893]=2,
+ [1894]=2,
+ [1895]=2,
+ [1896]=2,
+ [1897]=2,
+ [1898]=2,
+ [1899]=3,
+ [1900]=3,
+ [1901]=2,
+ [1902]=2,
+ [1903]=2,
+ [1904]=2,
+ [1905]=3,
+ [1906]=2,
+ [1907]=3,
+ [1908]=3,
+ [1909]=2,
+ [1910]=2,
+ [1911]=2,
+ [1912]=3,
+ [1913]=3,
+ [1914]=2,
+ [1915]=2,
+ [1916]=2,
+ [1917]=2,
+ [1918]=2,
+ [1919]=2,
+ [1984]=6,
+ [1985]=6,
+ [1986]=6,
+ [1987]=6,
+ [1988]=6,
+ [1989]=6,
+ [1990]=6,
+ [1991]=6,
+ [1992]=6,
+ [1993]=6,
+ [1994]=2,
+ [1995]=2,
+ [1996]=2,
+ [1997]=2,
+ [1998]=2,
+ [1999]=2,
+ [2000]=2,
+ [2001]=2,
+ [2002]=2,
+ [2003]=2,
+ [2004]=2,
+ [2005]=2,
+ [2006]=2,
+ [2007]=2,
+ [2008]=2,
+ [2009]=2,
+ [2010]=2,
+ [2011]=2,
+ [2012]=2,
+ [2013]=2,
+ [2014]=2,
+ [2015]=2,
+ [2016]=2,
+ [2017]=2,
+ [2018]=2,
+ [2019]=2,
+ [2020]=2,
+ [2021]=2,
+ [2022]=2,
+ [2023]=2,
+ [2024]=2,
+ [2025]=2,
+ [2026]=2,
+ [2027]=5,
+ [2028]=5,
+ [2029]=5,
+ [2030]=5,
+ [2031]=5,
+ [2032]=5,
+ [2033]=5,
+ [2034]=5,
+ [2035]=5,
+ [2036]=6,
+ [2037]=6,
+ [2038]=6,
+ [2039]=6,
+ [2040]=6,
+ [2041]=6,
+ [2042]=2,
+ [2112]=3,
+ [2113]=2,
+ [2114]=2,
+ [2115]=2,
+ [2116]=2,
+ [2117]=2,
+ [2118]=3,
+ [2119]=3,
+ [2120]=2,
+ [2121]=3,
+ [2122]=2,
+ [2123]=2,
+ [2124]=2,
+ [2125]=2,
+ [2126]=2,
+ [2127]=2,
+ [2128]=2,
+ [2129]=2,
+ [2130]=2,
+ [2131]=2,
+ [2132]=3,
+ [2133]=2,
+ [2134]=4,
+ [2135]=4,
+ [2136]=4,
+ [2208]=2,
+ [2209]=2,
+ [2210]=2,
+ [2211]=2,
+ [2212]=2,
+ [2213]=2,
+ [2214]=2,
+ [2215]=2,
+ [2216]=2,
+ [2217]=2,
+ [2218]=3,
+ [2219]=3,
+ [2220]=3,
+ [2221]=4,
+ [2222]=3,
+ [2223]=2,
+ [2224]=2,
+ [2225]=3,
+ [2226]=3,
+ [2227]=2,
+ [2228]=2,
+ [6150]=4,
+ [6151]=2,
+ [6154]=2,
+ [6158]=4,
+ [6176]=2,
+ [6177]=2,
+ [6178]=2,
+ [6179]=2,
+ [6180]=2,
+ [6181]=2,
+ [6182]=2,
+ [6183]=2,
+ [6184]=2,
+ [6185]=2,
+ [6186]=2,
+ [6187]=2,
+ [6188]=2,
+ [6189]=2,
+ [6190]=2,
+ [6191]=2,
+ [6192]=2,
+ [6193]=2,
+ [6194]=2,
+ [6195]=2,
+ [6196]=2,
+ [6197]=2,
+ [6198]=2,
+ [6199]=2,
+ [6200]=2,
+ [6201]=2,
+ [6202]=2,
+ [6203]=2,
+ [6204]=2,
+ [6205]=2,
+ [6206]=2,
+ [6207]=2,
+ [6208]=2,
+ [6209]=2,
+ [6210]=2,
+ [6211]=2,
+ [6212]=2,
+ [6213]=2,
+ [6214]=2,
+ [6215]=2,
+ [6216]=2,
+ [6217]=2,
+ [6218]=2,
+ [6219]=2,
+ [6220]=2,
+ [6221]=2,
+ [6222]=2,
+ [6223]=2,
+ [6224]=2,
+ [6225]=2,
+ [6226]=2,
+ [6227]=2,
+ [6228]=2,
+ [6229]=2,
+ [6230]=2,
+ [6231]=2,
+ [6232]=2,
+ [6233]=2,
+ [6234]=2,
+ [6235]=2,
+ [6236]=2,
+ [6237]=2,
+ [6238]=2,
+ [6239]=2,
+ [6240]=2,
+ [6241]=2,
+ [6242]=2,
+ [6243]=2,
+ [6244]=2,
+ [6245]=2,
+ [6246]=2,
+ [6247]=2,
+ [6248]=2,
+ [6249]=2,
+ [6250]=2,
+ [6251]=2,
+ [6252]=2,
+ [6253]=2,
+ [6254]=2,
+ [6255]=2,
+ [6256]=2,
+ [6257]=2,
+ [6258]=2,
+ [6259]=2,
+ [6260]=2,
+ [6261]=2,
+ [6262]=2,
+ [6263]=2,
+ [6272]=4,
+ [6273]=4,
+ [6274]=4,
+ [6275]=4,
+ [6276]=4,
+ [6277]=4,
+ [6278]=4,
+ [6279]=2,
+ [6280]=2,
+ [6281]=2,
+ [6282]=2,
+ [6283]=2,
+ [6284]=2,
+ [6285]=2,
+ [6286]=2,
+ [6287]=2,
+ [6288]=2,
+ [6289]=2,
+ [6290]=2,
+ [6291]=2,
+ [6292]=2,
+ [6293]=2,
+ [6294]=2,
+ [6295]=2,
+ [6296]=2,
+ [6297]=2,
+ [6298]=2,
+ [6299]=2,
+ [6300]=2,
+ [6301]=2,
+ [6302]=2,
+ [6303]=2,
+ [6304]=2,
+ [6305]=2,
+ [6306]=2,
+ [6307]=2,
+ [6308]=2,
+ [6309]=2,
+ [6310]=2,
+ [6311]=2,
+ [6312]=2,
+ [6314]=2,
+ [8204]=4,
+ [8205]=2,
+ [8294]=4,
+ [8295]=4,
+ [8296]=4,
+ [8297]=4,
+ [43072]=2,
+ [43073]=2,
+ [43074]=2,
+ [43075]=2,
+ [43076]=2,
+ [43077]=2,
+ [43078]=2,
+ [43079]=2,
+ [43080]=2,
+ [43081]=2,
+ [43082]=2,
+ [43083]=2,
+ [43084]=2,
+ [43085]=2,
+ [43086]=2,
+ [43087]=2,
+ [43088]=2,
+ [43089]=2,
+ [43090]=2,
+ [43091]=2,
+ [43092]=2,
+ [43093]=2,
+ [43094]=2,
+ [43095]=2,
+ [43096]=2,
+ [43097]=2,
+ [43098]=2,
+ [43099]=2,
+ [43100]=2,
+ [43101]=2,
+ [43102]=2,
+ [43103]=2,
+ [43104]=2,
+ [43105]=2,
+ [43106]=2,
+ [43107]=2,
+ [43108]=2,
+ [43109]=2,
+ [43110]=2,
+ [43111]=2,
+ [43112]=2,
+ [43113]=2,
+ [43114]=2,
+ [43115]=2,
+ [43116]=2,
+ [43117]=2,
+ [43118]=2,
+ [43119]=2,
+ [43120]=2,
+ [43121]=2,
+ [43122]=1,
+ [43123]=4,
+ [68288]=2,
+ [68289]=2,
+ [68290]=2,
+ [68291]=2,
+ [68292]=2,
+ [68293]=3,
+ [68294]=4,
+ [68295]=3,
+ [68296]=4,
+ [68297]=3,
+ [68298]=3,
+ [68299]=4,
+ [68300]=4,
+ [68301]=1,
+ [68302]=3,
+ [68303]=3,
+ [68304]=3,
+ [68305]=3,
+ [68306]=3,
+ [68307]=2,
+ [68308]=2,
+ [68309]=2,
+ [68310]=2,
+ [68311]=1,
+ [68312]=2,
+ [68313]=2,
+ [68314]=2,
+ [68315]=2,
+ [68316]=2,
+ [68317]=3,
+ [68318]=2,
+ [68319]=2,
+ [68320]=2,
+ [68321]=3,
+ [68322]=4,
+ [68323]=4,
+ [68324]=3,
+ [68331]=2,
+ [68332]=2,
+ [68333]=2,
+ [68334]=2,
+ [68335]=3,
+ [68480]=2,
+ [68481]=3,
+ [68482]=2,
+ [68483]=3,
+ [68484]=3,
+ [68485]=3,
+ [68486]=2,
+ [68487]=2,
+ [68488]=2,
+ [68489]=3,
+ [68490]=2,
+ [68491]=2,
+ [68492]=3,
+ [68493]=2,
+ [68494]=3,
+ [68495]=3,
+ [68496]=2,
+ [68497]=3,
+ [68521]=3,
+ [68522]=3,
+ [68523]=3,
+ [68524]=3,
+ [68525]=2,
+ [68526]=2,
+ [68527]=4,
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
if not modules then modules={} end modules ['font-ini']={
version=1.001,
comment="companion to font-ini.mkiv",
@@ -5773,10 +6717,9 @@ local fonts=fonts
fonts.names=fonts.names or {}
fonts.names.version=1.001
fonts.names.basename="luatex-fonts-names"
-fonts.names.new_to_old={}
-fonts.names.old_to_new={}
fonts.names.cache=containers.define("fonts","data",fonts.names.version,true)
-local data,loaded=nil,false
+local data=nil
+local loaded=false
local fileformats={ "lua","tex","other text files" }
function fonts.names.reportmissingbase()
texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
@@ -7018,46 +7961,6 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['luatex-fonts-tfm']={
- version=1.001,
- comment="companion to luatex-*.tex",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
- license="see context related readme files"
-}
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-local fonts=fonts
-local tfm={}
-fonts.handlers.tfm=tfm
-fonts.formats.tfm="type1"
-function fonts.readers.tfm(specification)
- local fullname=specification.filename or ""
- if fullname=="" then
- local forced=specification.forced or ""
- if forced~="" then
- fullname=specification.name.."."..forced
- else
- fullname=specification.name
- end
- end
- local foundname=resolvers.findbinfile(fullname,'tfm') or ""
- if foundname=="" then
- foundname=resolvers.findbinfile(fullname,'ofm') or ""
- end
- if foundname~="" then
- specification.filename=foundname
- specification.format="ofm"
- return font.read_tfm(specification.filename,specification.size)
- end
-end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
if not modules then modules={} end modules ['font-oti']={
version=1.001,
comment="companion to font-ini.mkiv",
@@ -7199,1374 +8102,5600 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-otf']={
+if not modules then modules={} end modules ['font-otr']={
version=1.001,
comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local utfbyte=utf.byte
-local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
-local type,next,tonumber,tostring=type,next,tonumber,tostring
-local abs=math.abs
-local reversed,concat,insert,remove,sortedkeys=table.reversed,table.concat,table.insert,table.remove,table.sortedkeys
-local ioflush=io.flush
-local fastcopy,tohash,derivetable,copy=table.fastcopy,table.tohash,table.derive,table.copy
-local formatters=string.formatters
-local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match
+if not characters then
+ require("char-def")
+ require("char-ini")
+end
+local next,type,unpack=next,type,unpack
+local byte,lower,char,strip,gsub=string.byte,string.lower,string.char,string.strip,string.gsub
+local bittest=bit32.btest
+local concat,remove,unpack=table.concat,table.remov,table.unpack
+local floor,mod,abs,sqrt,round=math.floor,math.mod,math.abs,math.sqrt,math.round
+local P,R,S,C,Cs,Cc,Ct,Carg,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Ct,lpeg.Carg,lpeg.Cmt
+local lpegmatch=lpeg.match
local setmetatableindex=table.setmetatableindex
-local allocate=utilities.storage.allocate
-local registertracker=trackers.register
-local registerdirective=directives.register
-local starttiming=statistics.starttiming
-local stoptiming=statistics.stoptiming
-local elapsedtime=statistics.elapsedtime
-local findbinfile=resolvers.findbinfile
-local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
-local trace_subfonts=false registertracker("otf.subfonts",function(v) trace_subfonts=v end)
-local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
-local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
-local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
-local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
-local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
-local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
-local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end)
-local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end)
-local report_otf=logs.reporter("fonts","otf loading")
-local fonts=fonts
-local otf=fonts.handlers.otf
-otf.glists={ "gsub","gpos" }
-otf.version=2.823
-otf.cache=containers.define("fonts","otf",otf.version,true)
-local hashes=fonts.hashes
-local definers=fonts.definers
-local readers=fonts.readers
-local constructors=fonts.constructors
-local fontdata=hashes and hashes.identifiers
-local chardata=characters and characters.data
-local otffeatures=constructors.newfeatures("otf")
-local registerotffeature=otffeatures.register
-local enhancers=allocate()
-otf.enhancers=enhancers
-local patches={}
-enhancers.patches=patches
-local forceload=false
-local cleanup=0
-local packdata=true
-local syncspace=true
-local forcenotdef=false
-local includesubfonts=false
-local overloadkerns=false
-local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
-local wildcard="*"
-local default="dflt"
-local fontloader=fontloader
-local open_font=fontloader.open
-local close_font=fontloader.close
-local font_fields=fontloader.fields
-local apply_featurefile=fontloader.apply_featurefile
-local mainfields=nil
-local glyphfields=nil
-local formats=fonts.formats
-formats.otf="opentype"
-formats.ttf="truetype"
-formats.ttc="truetype"
-formats.dfont="truetype"
-registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
-registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
-registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
-registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
-registerdirective("fonts.otf.loader.overloadkerns",function(v) overloadkerns=v end)
-function otf.fileformat(filename)
- local leader=lower(io.loadchunk(filename,4))
- local suffix=lower(file.suffix(filename))
- if leader=="otto" then
- return formats.otf,suffix=="otf"
- elseif leader=="ttcf" then
- return formats.ttc,suffix=="ttc"
- elseif suffix=="ttc" then
- return formats.ttc,true
- elseif suffix=="dfont" then
- return formats.dfont,true
+local formatters=string.formatters
+local sortedkeys=table.sortedkeys
+local sortedhash=table.sortedhash
+local stripstring=string.strip
+local utf16_to_utf8_be=utf.utf16_to_utf8_be
+local report=logs.reporter("otf reader")
+local trace_cmap=false
+fonts=fonts or {}
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local otf=handlers.otf or {}
+handlers.otf=otf
+local readers=otf.readers or {}
+otf.readers=readers
+local streamreader=utilities.files
+readers.streamreader=streamreader
+local openfile=streamreader.open
+local closefile=streamreader.close
+local skipbytes=streamreader.skip
+local setposition=streamreader.setposition
+local skipshort=streamreader.skipshort
+local readbytes=streamreader.readbytes
+local readstring=streamreader.readstring
+local readbyte=streamreader.readcardinal1
+local readushort=streamreader.readcardinal2
+local readuint=streamreader.readcardinal3
+local readulong=streamreader.readcardinal4
+local readchar=streamreader.readinteger1
+local readshort=streamreader.readinteger2
+local readlong=streamreader.readinteger4
+local readfixed=streamreader.readfixed4
+local readfword=readshort
+local readufword=readushort
+local readoffset=readushort
+local read2dot14=streamreader.read2dot14
+function streamreader.readtag(f)
+ return lower(strip(readstring(f,4)))
+end
+local function readlongdatetime(f)
+ local a,b,c,d,e,f,g,h=readbytes(f,8)
+ return 0x100000000*d+0x1000000*e+0x10000*f+0x100*g+h
+end
+local tableversion=0.004
+local privateoffset=fonts.constructors and fonts.constructors.privateoffset or 0xF0000
+readers.tableversion=tableversion
+local reportedskipped={}
+local function reportskippedtable(tag)
+ if not reportedskipped[tag] then
+ report("loading of table %a skipped (reported once only)",tag)
+ reportedskipped[tag]=true
+ end
+end
+local reservednames={ [0]="copyright",
+ "family",
+ "subfamily",
+ "uniqueid",
+ "fullname",
+ "version",
+ "postscriptname",
+ "trademark",
+ "manufacturer",
+ "designer",
+ "description",
+ "venderurl",
+ "designerurl",
+ "license",
+ "licenseurl",
+ "reserved",
+ "typographicfamily",
+ "typographicsubfamily",
+ "compatiblefullname",
+ "sampletext",
+ "cidfindfontname",
+ "wwsfamily",
+ "wwssubfamily",
+ "lightbackgroundpalette",
+ "darkbackgroundpalette",
+}
+local platforms={ [0]="unicode",
+ "macintosh",
+ "iso",
+ "windows",
+ "custom",
+}
+local encodings={
+ unicode={ [0]="unicode 1.0 semantics",
+ "unicode 1.1 semantics",
+ "iso/iec 10646",
+ "unicode 2.0 bmp",
+ "unicode 2.0 full",
+ "unicode variation sequences",
+ "unicode full repertoire",
+ },
+ macintosh={ [0]="roman","japanese","chinese (traditional)","korean","arabic","hebrew","greek","russian",
+ "rsymbol","devanagari","gurmukhi","gujarati","oriya","bengali","tamil","telugu","kannada",
+ "malayalam","sinhalese","burmese","khmer","thai","laotian","georgian","armenian",
+ "chinese (simplified)","tibetan","mongolian","geez","slavic","vietnamese","sindhi",
+ "uninterpreted",
+ },
+ iso={ [0]="7-bit ascii",
+ "iso 10646",
+ "iso 8859-1",
+ },
+ windows={ [0]="symbol",
+ "unicode bmp",
+ "shiftjis",
+ "prc",
+ "big5",
+ "wansung",
+ "johab",
+ "reserved 7",
+ "reserved 8",
+ "reserved 9",
+ "unicode ucs-4",
+ },
+ custom={
+ }
+}
+local decoders={
+ unicode={},
+ macintosh={},
+ iso={},
+ windows={
+ ["unicode bmp"]=utf16_to_utf8_be
+ },
+ custom={},
+}
+local languages={
+ unicode={
+ [ 0]="english",
+ },
+ macintosh={
+ [ 0]="english",
+ },
+ iso={},
+ windows={
+ [0x0409]="english - united states",
+ },
+ custom={},
+}
+local standardromanencoding={ [0]=
+ "notdef",".null","nonmarkingreturn","space","exclam","quotedbl",
+ "numbersign","dollar","percent","ampersand","quotesingle","parenleft",
+ "parenright","asterisk","plus","comma","hyphen","period","slash",
+ "zero","one","two","three","four","five","six","seven","eight",
+ "nine","colon","semicolon","less","equal","greater","question","at",
+ "A","B","C","D","E","F","G","H","I","J","K","L","M","N","O",
+ "P","Q","R","S","T","U","V","W","X","Y","Z","bracketleft",
+ "backslash","bracketright","asciicircum","underscore","grave","a","b",
+ "c","d","e","f","g","h","i","j","k","l","m","n","o","p","q",
+ "r","s","t","u","v","w","x","y","z","braceleft","bar",
+ "braceright","asciitilde","Adieresis","Aring","Ccedilla","Eacute",
+ "Ntilde","Odieresis","Udieresis","aacute","agrave","acircumflex",
+ "adieresis","atilde","aring","ccedilla","eacute","egrave",
+ "ecircumflex","edieresis","iacute","igrave","icircumflex","idieresis",
+ "ntilde","oacute","ograve","ocircumflex","odieresis","otilde","uacute",
+ "ugrave","ucircumflex","udieresis","dagger","degree","cent","sterling",
+ "section","bullet","paragraph","germandbls","registered","copyright",
+ "trademark","acute","dieresis","notequal","AE","Oslash","infinity",
+ "plusminus","lessequal","greaterequal","yen","mu","partialdiff",
+ "summation","product","pi","integral","ordfeminine","ordmasculine",
+ "Omega","ae","oslash","questiondown","exclamdown","logicalnot",
+ "radical","florin","approxequal","Delta","guillemotleft",
+ "guillemotright","ellipsis","nonbreakingspace","Agrave","Atilde",
+ "Otilde","OE","oe","endash","emdash","quotedblleft","quotedblright",
+ "quoteleft","quoteright","divide","lozenge","ydieresis","Ydieresis",
+ "fraction","currency","guilsinglleft","guilsinglright","fi","fl",
+ "daggerdbl","periodcentered","quotesinglbase","quotedblbase",
+ "perthousand","Acircumflex","Ecircumflex","Aacute","Edieresis","Egrave",
+ "Iacute","Icircumflex","Idieresis","Igrave","Oacute","Ocircumflex",
+ "apple","Ograve","Uacute","Ucircumflex","Ugrave","dotlessi",
+ "circumflex","tilde","macron","breve","dotaccent","ring","cedilla",
+ "hungarumlaut","ogonek","caron","Lslash","lslash","Scaron","scaron",
+ "Zcaron","zcaron","brokenbar","Eth","eth","Yacute","yacute","Thorn",
+ "thorn","minus","multiply","onesuperior","twosuperior","threesuperior",
+ "onehalf","onequarter","threequarters","franc","Gbreve","gbreve",
+ "Idotaccent","Scedilla","scedilla","Cacute","cacute","Ccaron","ccaron",
+ "dcroat",
+}
+local weights={
+ [100]="thin",
+ [200]="extralight",
+ [300]="light",
+ [400]="normal",
+ [500]="medium",
+ [600]="semibold",
+ [700]="bold",
+ [800]="extrabold",
+ [900]="black",
+}
+local widths={
+ [1]="ultracondensed",
+ [2]="extracondensed",
+ [3]="condensed",
+ [4]="semicondensed",
+ [5]="normal",
+ [6]="semiexpanded",
+ [7]="expanded",
+ [8]="extraexpanded",
+ [9]="ultraexpanded",
+}
+setmetatableindex(weights,function(t,k)
+ local r=floor((k+50)/100)*100
+ local v=(r>900 and "black") or rawget(t,r) or "normal"
+ return v
+end)
+setmetatableindex(widths,function(t,k)
+ return "normal"
+end)
+local panoseweights={
+ [ 0]="normal",
+ [ 1]="normal",
+ [ 2]="verylight",
+ [ 3]="light",
+ [ 4]="thin",
+ [ 5]="book",
+ [ 6]="medium",
+ [ 7]="demi",
+ [ 8]="bold",
+ [ 9]="heavy",
+ [10]="black",
+}
+local panosewidths={
+ [ 0]="normal",
+ [ 1]="normal",
+ [ 2]="normal",
+ [ 3]="normal",
+ [ 4]="normal",
+ [ 5]="expanded",
+ [ 6]="condensed",
+ [ 7]="veryexpanded",
+ [ 8]="verycondensed",
+ [ 9]="monospaced",
+}
+function readers.name(f,fontdata)
+ local datatable=fontdata.tables.name
+ if datatable then
+ setposition(f,datatable.offset)
+ local format=readushort(f)
+ local nofnames=readushort(f)
+ local offset=readushort(f)
+ local namelists={
+ unicode={},
+ windows={},
+ macintosh={},
+ }
+ for i=1,nofnames do
+ local platform=platforms[readushort(f)]
+ if platform then
+ local namelist=namelists[platform]
+ if namelist then
+ local encoding=readushort(f)
+ local language=readushort(f)
+ local encodings=encodings[platform]
+ local languages=languages[platform]
+ if encodings and languages then
+ local encoding=encodings[encoding]
+ local language=languages[language]
+ if encoding and language then
+ local name=reservednames[readushort(f)]
+ if name then
+ namelist[#namelist+1]={
+ platform=platform,
+ encoding=encoding,
+ language=language,
+ name=name,
+ length=readushort(f),
+ offset=readushort(f),
+ }
+ else
+ skipshort(f,2)
+ end
+ else
+ skipshort(f,3)
+ end
+ else
+ skipshort(f,3)
+ end
+ else
+ skipshort(f,5)
+ end
+ else
+ skipshort(f,5)
+ end
+ end
+ local start=datatable.offset+offset
+ local names={}
+ local done={}
+ local function filter(platform,e,l)
+ local namelist=namelists[platform]
+ for i=1,#namelist do
+ local name=namelist[i]
+ local nametag=name.name
+ if not done[nametag] then
+ local encoding=name.encoding
+ local language=name.language
+ if (not e or encoding==e) and (not l or language==l) then
+ setposition(f,start+name.offset)
+ local content=readstring(f,name.length)
+ local decoder=decoders[platform]
+ if decoder then
+ decoder=decoder[encoding]
+ end
+ if decoder then
+ content=decoder(content)
+ end
+ names[nametag]={
+ content=content,
+ platform=platform,
+ encoding=encoding,
+ language=language,
+ }
+ done[nametag]=true
+ end
+ end
+ end
+ end
+ filter("windows","unicode bmp","english - united states")
+ filter("macintosh","roman","english")
+ filter("windows")
+ filter("macintosh")
+ filter("unicode")
+ fontdata.names=names
else
- return formats.ttf,suffix=="ttf"
+ fontdata.names={}
+ end
+end
+readers["os/2"]=function(f,fontdata)
+ local datatable=fontdata.tables["os/2"]
+ if datatable then
+ setposition(f,datatable.offset)
+ local version=readushort(f)
+ local windowsmetrics={
+ version=version,
+ averagewidth=readshort(f),
+ weightclass=readushort(f),
+ widthclass=readushort(f),
+ fstype=readushort(f),
+ subscriptxsize=readshort(f),
+ subscriptysize=readshort(f),
+ subscriptxoffset=readshort(f),
+ subscriptyoffset=readshort(f),
+ superscriptxsize=readshort(f),
+ superscriptysize=readshort(f),
+ superscriptxoffset=readshort(f),
+ superscriptyoffset=readshort(f),
+ strikeoutsize=readshort(f),
+ strikeoutpos=readshort(f),
+ familyclass=readshort(f),
+ panose={ readbytes(f,10) },
+ unicoderanges={ readulong(f),readulong(f),readulong(f),readulong(f) },
+ vendor=readstring(f,4),
+ fsselection=readushort(f),
+ firstcharindex=readushort(f),
+ lastcharindex=readushort(f),
+ typoascender=readshort(f),
+ typodescender=readshort(f),
+ typolinegap=readshort(f),
+ winascent=readushort(f),
+ windescent=readushort(f),
+ }
+ if version>=1 then
+ windowsmetrics.codepageranges={ readulong(f),readulong(f) }
+ end
+ if version>=3 then
+ windowsmetrics.xheight=readshort(f)
+ windowsmetrics.capheight=readshort(f)
+ windowsmetrics.defaultchar=readushort(f)
+ windowsmetrics.breakchar=readushort(f)
+ end
+ windowsmetrics.weight=windowsmetrics.weightclass and weights[windowsmetrics.weightclass]
+ windowsmetrics.width=windowsmetrics.widthclass and widths [windowsmetrics.widthclass]
+ windowsmetrics.panoseweight=panoseweights[windowsmetrics.panose[3]]
+ windowsmetrics.panosewidth=panosewidths [windowsmetrics.panose[4]]
+ fontdata.windowsmetrics=windowsmetrics
+ else
+ fontdata.windowsmetrics={}
+ end
+end
+readers.head=function(f,fontdata)
+ local datatable=fontdata.tables.head
+ if datatable then
+ setposition(f,datatable.offset)
+ local fontheader={
+ version=readfixed(f),
+ revision=readfixed(f),
+ checksum=readulong(f),
+ magic=readulong(f),
+ flags=readushort(f),
+ units=readushort(f),
+ created=readlongdatetime(f),
+ modified=readlongdatetime(f),
+ xmin=readshort(f),
+ ymin=readshort(f),
+ xmax=readshort(f),
+ ymax=readshort(f),
+ macstyle=readushort(f),
+ smallpixels=readushort(f),
+ directionhint=readshort(f),
+ indextolocformat=readshort(f),
+ glyphformat=readshort(f),
+ }
+ fontdata.fontheader=fontheader
+ else
+ fontdata.fontheader={}
+ end
+ fontdata.nofglyphs=0
+end
+readers.hhea=function(f,fontdata,specification)
+ if specification.details then
+ local datatable=fontdata.tables.hhea
+ if datatable then
+ setposition(f,datatable.offset)
+ fontdata.horizontalheader={
+ version=readfixed(f),
+ ascender=readfword(f),
+ descender=readfword(f),
+ linegap=readfword(f),
+ maxadvancewidth=readufword(f),
+ minleftsidebearing=readfword(f),
+ minrightsidebearing=readfword(f),
+ maxextent=readfword(f),
+ caretsloperise=readshort(f),
+ caretsloperun=readshort(f),
+ caretoffset=readshort(f),
+ reserved_1=readshort(f),
+ reserved_2=readshort(f),
+ reserved_3=readshort(f),
+ reserved_4=readshort(f),
+ metricdataformat=readshort(f),
+ nofhmetrics=readushort(f),
+ }
+ else
+ fontdata.horizontalheader={
+ nofhmetrics=0,
+ }
+ end
end
end
-local function otf_format(filename)
- local format,okay=otf.fileformat(filename)
- if not okay then
- report_otf("font %a is actually an %a file",filename,format)
+readers.maxp=function(f,fontdata,specification)
+ if specification.details then
+ local datatable=fontdata.tables.maxp
+ if datatable then
+ setposition(f,datatable.offset)
+ local version=readfixed(f)
+ local nofglyphs=readushort(f)
+ fontdata.nofglyphs=nofglyphs
+ if version==0.5 then
+ fontdata.maximumprofile={
+ version=version,
+ nofglyphs=nofglyphs,
+ }
+ return
+ elseif version==1.0 then
+ fontdata.maximumprofile={
+ version=version,
+ nofglyphs=nofglyphs,
+ points=readushort(f),
+ contours=readushort(f),
+ compositepoints=readushort(f),
+ compositecontours=readushort(f),
+ zones=readushort(f),
+ twilightpoints=readushort(f),
+ storage=readushort(f),
+ functiondefs=readushort(f),
+ instructiondefs=readushort(f),
+ stackelements=readushort(f),
+ sizeofinstructions=readushort(f),
+ componentelements=readushort(f),
+ componentdepth=readushort(f),
+ }
+ return
+ end
+ end
+ fontdata.maximumprofile={
+ version=version,
+ nofglyphs=0,
+ }
end
- return format
end
-local function load_featurefile(raw,featurefile)
- if featurefile and featurefile~="" then
- if trace_loading then
- report_otf("using featurefile %a",featurefile)
+readers.hmtx=function(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.hmtx
+ if datatable then
+ setposition(f,datatable.offset)
+ local nofmetrics=fontdata.horizontalheader.nofhmetrics
+ local glyphs=fontdata.glyphs
+ local nofglyphs=fontdata.nofglyphs
+ local nofrepeated=nofglyphs-nofmetrics
+ local width=0
+ local leftsidebearing=0
+ for i=0,nofmetrics-1 do
+ local glyph=glyphs[i]
+ width=readshort(f)
+ leftsidebearing=readshort(f)
+ if advance~=0 then
+ glyph.width=width
+ end
+ end
+ for i=nofmetrics,nofrepeated do
+ local glyph=glyphs[i]
+ if width~=0 then
+ glyph.width=width
+ end
+ end
+ end
+ end
+end
+readers.post=function(f,fontdata,specification)
+ local datatable=fontdata.tables.post
+ if datatable then
+ setposition(f,datatable.offset)
+ local version=readfixed(f)
+ fontdata.postscript={
+ version=version,
+ italicangle=round(1000*readfixed(f))/1000,
+ underlineposition=readfword(f),
+ underlinethickness=readfword(f),
+ monospaced=readulong(f),
+ minmemtype42=readulong(f),
+ maxmemtype42=readulong(f),
+ minmemtype1=readulong(f),
+ maxmemtype1=readulong(f),
+ }
+ if not specification.glyphs then
+ elseif version==1.0 then
+ for index=0,#standardromanencoding do
+ glyphs[index].name=standardromanencoding[index]
+ end
+ elseif version==2.0 then
+ local glyphs=fontdata.glyphs
+ local nofglyphs=readushort(f)
+ local indices={}
+ local names={}
+ local maxnames=0
+ for i=0,nofglyphs-1 do
+ local nameindex=readushort(f)
+ if nameindex>=258 then
+ maxnames=maxnames+1
+ nameindex=nameindex-257
+ indices[nameindex]=i
+ else
+ glyphs[i].name=standardromanencoding[nameindex]
+ end
+ end
+ for i=1,maxnames do
+ local mapping=indices[i]
+ if not mapping then
+ report("quit post name fetching at %a of %a: %s",i,maxnames,"no index")
+ break
+ else
+ local length=readbyte(f)
+ if length>0 then
+ glyphs[mapping].name=readstring(f,length)
+ else
+ report("quit post name fetching at %a of %a: %s",i,maxnames,"overflow")
+ break
+ end
+ end
+ end
+ elseif version==2.5 then
+ elseif version==3.0 then
end
- apply_featurefile(raw,featurefile)
+ else
+ fontdata.postscript={}
end
end
-local function showfeatureorder(rawdata,filename)
- local sequences=rawdata.resources.sequences
- if sequences and #sequences>0 then
- if trace_loading then
- report_otf("font %a has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence=sequences[nos]
- local typ=sequence.type or "no-type"
- local name=sequence.name or "no-name"
- local subtables=sequence.subtables or { "no-subtables" }
- local features=sequence.features
- if trace_loading then
- report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+readers.cff=function(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("cff")
+ end
+end
+local formatreaders={}
+local duplicatestoo=true
+local sequence={
+ { 3,1,4 },
+ { 3,10,12 },
+ { 0,3,4 },
+ { 0,1,4 },
+ { 0,0,6 },
+ { 3,0,6 },
+ { 0,5,14 },
+}
+local supported={}
+for i=1,#sequence do
+ local sp,se,sf=unpack(sequence[i])
+ local p=supported[sp]
+ if not p then
+ p={}
+ supported[sp]=p
+ end
+ local e=p[se]
+ if not e then
+ e={}
+ p[se]=e
+ end
+ e[sf]=true
+end
+formatreaders[4]=function(f,fontdata,offset)
+ setposition(f,offset+2)
+ local length=readushort(f)
+ local language=readushort(f)
+ local nofsegments=readushort(f)/2
+ skipshort(f,3)
+ local endchars={}
+ local startchars={}
+ local deltas={}
+ local offsets={}
+ local indices={}
+ local mapping=fontdata.mapping
+ local glyphs=fontdata.glyphs
+ local duplicates=fontdata.duplicates
+ local nofdone=0
+ for i=1,nofsegments do
+ endchars[i]=readushort(f)
+ end
+ local reserved=readushort(f)
+ for i=1,nofsegments do
+ startchars[i]=readushort(f)
+ end
+ for i=1,nofsegments do
+ deltas[i]=readshort(f)
+ end
+ for i=1,nofsegments do
+ offsets[i]=readushort(f)
+ end
+ local size=(length-2*2-5*2-4*nofsegments*2)/2
+ for i=1,size-1 do
+ indices[i]=readushort(f)
+ end
+ for segment=1,nofsegments do
+ local startchar=startchars[segment]
+ local endchar=endchars[segment]
+ local offset=offsets[segment]
+ local delta=deltas[segment]
+ if startchar==0xFFFF and endchar==0xFFFF then
+ elseif startchar==0xFFFF and offset==0 then
+ elseif offset==0xFFFF then
+ elseif offset==0 then
+ if trace_cmap then
+ report("format 4.%i segment %2i from %C upto %C at index %H",1,segment,startchar,endchar,mod(startchar+delta,65536))
+ end
+ for unicode=startchar,endchar do
+ local index=mod(unicode+delta,65536)
+ if index and index>0 then
+ local glyph=glyphs[index]
+ if glyph then
+ local gu=glyph.unicode
+ if not gu then
+ glyph.unicode=unicode
+ nofdone=nofdone+1
+ elseif gu~=unicode then
+ if duplicatestoo then
+ local d=duplicates[gu]
+ if d then
+ d[unicode]=true
+ else
+ duplicates[gu]={ [unicode]=true }
+ end
+ else
+ report("duplicate case 1: %C %04i %s",unicode,index,glyphs[index].name)
+ end
+ end
+ if not mapping[index] then
+ mapping[index]=unicode
+ end
+ end
+ end
end
- if features then
- for feature,scripts in next,features do
- local tt={}
- if type(scripts)=="table" then
- for script,languages in next,scripts do
- local ttt={}
- for language,_ in next,languages do
- ttt[#ttt+1]=language
+ else
+ local shift=(segment-nofsegments+offset/2)-startchar
+ if trace_cmap then
+ report("format 4.%i segment %2i from %C upto %C at index %H",0,segment,startchar,endchar,mod(startchar+delta,65536))
+ end
+ for unicode=startchar,endchar do
+ local slot=shift+unicode
+ local index=indices[slot]
+ if index and index>0 then
+ index=mod(index+delta,65536)
+ local glyph=glyphs[index]
+ if glyph then
+ local gu=glyph.unicode
+ if not gu then
+ glyph.unicode=unicode
+ nofdone=nofdone+1
+ elseif gu~=unicode then
+ if duplicatestoo then
+ local d=duplicates[gu]
+ if d then
+ d[unicode]=true
+ else
+ duplicates[gu]={ [unicode]=true }
+ end
+ else
+ report("duplicate case 2: %C %04i %s",unicode,index,glyphs[index].name)
end
- tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
end
- if trace_loading then
- report_otf(" %s: % t",feature,tt)
+ if not mapping[index] then
+ mapping[index]=unicode
end
+ end
+ end
+ end
+ end
+ end
+ return nofdone
+end
+formatreaders[6]=function(f,fontdata,offset)
+ setposition(f,offset)
+ local format=readushort(f)
+ local length=readushort(f)
+ local language=readushort(f)
+ local mapping=fontdata.mapping
+ local glyphs=fontdata.glyphs
+ local duplicates=fontdata.duplicates
+ local start=readushort(f)
+ local count=readushort(f)
+ local stop=start+count-1
+ local nofdone=0
+ if trace_cmap then
+ report("format 6 from %C to %C",2,start,stop)
+ end
+ for unicode=start,stop do
+ local index=readushort(f)
+ if index>0 then
+ local glyph=glyphs[index]
+ if glyph then
+ local gu=glyph.unicode
+ if not gu then
+ glyph.unicode=unicode
+ nofdone=nofdone+1
+ elseif gu~=unicode then
+ end
+ if not mapping[index] then
+ mapping[index]=unicode
+ end
+ end
+ end
+ end
+ return nofdone
+end
+formatreaders[12]=function(f,fontdata,offset)
+ setposition(f,offset+2+2+4+4)
+ local mapping=fontdata.mapping
+ local glyphs=fontdata.glyphs
+ local duplicates=fontdata.duplicates
+ local nofgroups=readulong(f)
+ local nofdone=0
+ for i=1,nofgroups do
+ local first=readulong(f)
+ local last=readulong(f)
+ local index=readulong(f)
+ if trace_cmap then
+ report("format 12 from %C to %C",first,last)
+ end
+ for unicode=first,last do
+ local glyph=glyphs[index]
+ if glyph then
+ local gu=glyph.unicode
+ if not gu then
+ glyph.unicode=unicode
+ nofdone=nofdone+1
+ elseif gu~=unicode then
+ local d=duplicates[gu]
+ if d then
+ d[unicode]=true
+ else
+ duplicates[gu]={ [unicode]=true }
+ end
+ end
+ if not mapping[index] then
+ mapping[index]=unicode
+ end
+ end
+ index=index+1
+ end
+ end
+ return nofdone
+end
+formatreaders[14]=function(f,fontdata,offset)
+ if offset and offset~=0 then
+ setposition(f,offset)
+ local format=readushort(f)
+ local length=readulong(f)
+ local nofrecords=readulong(f)
+ local records={}
+ local variants={}
+ local nofdone=0
+ fontdata.variants=variants
+ for i=1,nofrecords do
+ records[i]={
+ selector=readuint(f),
+ default=readulong(f),
+ other=readulong(f),
+ }
+ end
+ for i=1,nofrecords do
+ local record=records[i]
+ local selector=record.selector
+ local default=record.default
+ local other=record.other
+ local other=record.other
+ if other~=0 then
+ setposition(f,offset+other)
+ local mapping={}
+ local count=readulong(f)
+ for i=1,count do
+ mapping[readuint(f)]=readushort(f)
+ end
+ nofdone=nofdone+count
+ variants[selector]=mapping
+ end
+ end
+ return nofdone
+ else
+ return 0
+ end
+end
+local function checkcmap(f,fontdata,records,platform,encoding,format)
+ local data=records[platform]
+ if not data then
+ return 0
+ end
+ data=data[encoding]
+ if not data then
+ return 0
+ end
+ data=data[format]
+ if not data then
+ return 0
+ end
+ local reader=formatreaders[format]
+ if not reader then
+ return 0
+ end
+ local p=platforms[platform]
+ local e=encodings[p]
+ local n=reader(f,fontdata,data) or 0
+ report("cmap checked: platform %i (%s), encoding %i (%s), format %i, new unicodes %i",platform,p,encoding,e and e[encoding] or "?",format,n)
+ return n
+end
+function readers.cmap(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.cmap
+ if datatable then
+ local tableoffset=datatable.offset
+ setposition(f,tableoffset)
+ local version=readushort(f)
+ local noftables=readushort(f)
+ local records={}
+ local unicodecid=false
+ local variantcid=false
+ local variants={}
+ local duplicates=fontdata.duplicates or {}
+ fontdata.duplicates=duplicates
+ for i=1,noftables do
+ local platform=readushort(f)
+ local encoding=readushort(f)
+ local offset=readulong(f)
+ local record=records[platform]
+ if not record then
+ records[platform]={
+ [encoding]={
+ offsets={ offset },
+ formats={},
+ }
+ }
+ else
+ local subtables=record[encoding]
+ if not subtables then
+ record[encoding]={
+ offsets={ offset },
+ formats={},
+ }
else
- if trace_loading then
- report_otf(" %s: %S",feature,scripts)
+ local offsets=subtables.offsets
+ offsets[#offsets+1]=offset
+ end
+ end
+ end
+ report("found cmaps:")
+ for platform,record in sortedhash(records) do
+ local p=platforms[platform]
+ local e=encodings[p]
+ local sp=supported[platform]
+ local ps=p or "?"
+ if sp then
+ report(" platform %i: %s",platform,ps)
+ else
+ report(" platform %i: %s (unsupported)",platform,ps)
+ end
+ for encoding,subtables in sortedhash(record) do
+ local se=sp and sp[encoding]
+ local es=e and e[encoding] or "?"
+ if se then
+ report(" encoding %i: %s",encoding,es)
+ else
+ report(" encoding %i: %s (unsupported)",encoding,es)
+ end
+ local offsets=subtables.offsets
+ local formats=subtables.formats
+ for i=1,#offsets do
+ local offset=tableoffset+offsets[i]
+ setposition(f,offset)
+ formats[readushort(f)]=offset
+ end
+ record[encoding]=formats
+ local list=sortedkeys(formats)
+ for i=1,#list do
+ if not (se and se[list[i]]) then
+ list[i]=list[i].." (unsupported)"
end
end
+ report(" formats: % t",list)
+ end
+ end
+ local ok=false
+ for i=1,#sequence do
+ local sp,se,sf=unpack(sequence[i])
+ if checkcmap(f,fontdata,records,sp,se,sf)>0 then
+ ok=true
end
end
+ if not ok then
+ report("no useable unicode cmap found")
+ end
+ fontdata.cidmaps={
+ version=version,
+ noftables=noftables,
+ records=records,
+ }
+ else
+ fontdata.cidmaps={}
end
- if trace_loading then
- report_otf("\n")
+ end
+end
+function readers.loca(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("loca")
+ end
+end
+function readers.glyf(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("glyf")
+ end
+end
+function readers.kern(f,fontdata,specification)
+ if specification.kerns then
+ local datatable=fontdata.tables.kern
+ if datatable then
+ setposition(f,datatable.offset)
+ local version=readushort(f)
+ local noftables=readushort(f)
+ for i=1,noftables do
+ local version=readushort(f)
+ local length=readushort(f)
+ local coverage=readushort(f)
+ local format=bit32.rshift(coverage,8)
+ if format==0 then
+ local nofpairs=readushort(f)
+ local searchrange=readushort(f)
+ local entryselector=readushort(f)
+ local rangeshift=readushort(f)
+ local kerns={}
+ local glyphs=fontdata.glyphs
+ for i=1,nofpairs do
+ local left=readushort(f)
+ local right=readushort(f)
+ local kern=readfword(f)
+ local glyph=glyphs[left]
+ local kerns=glyph.kerns
+ if kerns then
+ kerns[right]=kern
+ else
+ glyph.kerns={ [right]=kern }
+ end
+ end
+ elseif format==2 then
+ report("todo: kern classes")
+ else
+ report("todo: kerns")
+ end
+ end
end
- elseif trace_loading then
- report_otf("font %a has no sequences",filename)
- end
-end
-local valid_fields=table.tohash {
- "ascent",
- "cidinfo",
- "copyright",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- "hasvmetrics",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- "macstyle",
- "notdef_loc",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- "serifcheck",
- "sfd_version",
- "strokedfont",
- "strokewidth",
- "table_version",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
-}
-local ordered_enhancers={
- "prepare tables",
- "prepare glyphs",
- "prepare lookups",
- "analyze glyphs",
- "analyze math",
- "reorganize lookups",
- "reorganize mark classes",
- "reorganize anchor classes",
- "reorganize glyph kerns",
- "reorganize glyph lookups",
- "reorganize glyph anchors",
- "merge kern classes",
- "reorganize features",
- "reorganize subtables",
- "check glyphs",
- "check metadata",
- "prepare tounicode",
- "check encoding",
- "add duplicates",
- "expand lookups",
- "check extra features",
- "cleanup tables",
- "compact lookups",
- "purge names",
-}
-local actions=allocate()
-local before=allocate()
-local after=allocate()
-patches.before=before
-patches.after=after
-local function enhance(name,data,filename,raw)
- local enhancer=actions[name]
- if enhancer then
- if trace_loading then
- report_otf("apply enhancement %a to file %a",name,filename)
- ioflush()
+ end
+end
+function readers.gdef(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gdef")
+ end
+end
+function readers.gsub(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gsub")
+ end
+end
+function readers.gpos(f,fontdata,specification)
+ if specification.details then
+ reportskippedtable("gpos")
+ end
+end
+function readers.math(f,fontdata,specification)
+ if specification.glyphs then
+ reportskippedtable("math")
+ end
+end
+local function packoutlines(data,makesequence)
+ local subfonts=data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ packoutlines(subfonts[i],makesequence)
+ end
+ return
+ end
+ local common=data.segments
+ if common then
+ return
+ end
+ local glyphs=data.glyphs
+ if not glyphs then
+ return
+ end
+ if makesequence then
+ for index=1,#glyphs do
+ local glyph=glyphs[index]
+ local segments=glyph.segments
+ if segments then
+ local sequence={}
+ local nofsequence=0
+ for i=1,#segments do
+ local segment=segments[i]
+ local nofsegment=#segment
+ nofsequence=nofsequence+1
+ sequence[nofsequence]=segment[nofsegment]
+ for i=1,nofsegment-1 do
+ nofsequence=nofsequence+1
+ sequence[nofsequence]=segment[i]
+ end
+ end
+ glyph.sequence=sequence
+ glyph.segments=nil
+ end
end
- enhancer(data,filename,raw)
else
+ local hash={}
+ local common={}
+ local reverse={}
+ local last=0
+ for index=1,#glyphs do
+ local segments=glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local h=concat(segments[i]," ")
+ hash[h]=(hash[h] or 0)+1
+ end
+ end
+ end
+ for index=1,#glyphs do
+ local segments=glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local segment=segments[i]
+ local h=concat(segment," ")
+ if hash[h]>1 then
+ local idx=reverse[h]
+ if not idx then
+ last=last+1
+ reverse[h]=last
+ common[last]=segment
+ idx=last
+ end
+ segments[i]=idx
+ end
+ end
+ end
+ end
+ if last>0 then
+ data.segments=common
+ end
end
end
-function enhancers.apply(data,filename,raw)
- local basename=file.basename(lower(filename))
- if trace_loading then
- report_otf("%s enhancing file %a","start",filename)
+local function unpackoutlines(data)
+ local subfonts=data.subfonts
+ if subfonts then
+ for i=1,#subfonts do
+ unpackoutlines(subfonts[i])
+ end
+ return
end
- ioflush()
- for e=1,#ordered_enhancers do
- local enhancer=ordered_enhancers[e]
- local b=before[enhancer]
- if b then
- for pattern,action in next,b do
- if find(basename,pattern) then
- action(data,filename,raw)
+ local common=data.segments
+ if not common then
+ return
+ end
+ local glyphs=data.glyphs
+ if not glyphs then
+ return
+ end
+ for index=1,#glyphs do
+ local segments=glyphs[index].segments
+ if segments then
+ for i=1,#segments do
+ local c=common[segments[i]]
+ if c then
+ segments[i]=c
end
end
end
- enhance(enhancer,data,filename,raw)
- local a=after[enhancer]
- if a then
- for pattern,action in next,a do
- if find(basename,pattern) then
- action(data,filename,raw)
+ end
+ data.segments=nil
+end
+otf.packoutlines=packoutlines
+otf.unpackoutlines=unpackoutlines
+local validutf=lpeg.patterns.validutf8
+local function getname(fontdata,key)
+ local names=fontdata.names
+ if names then
+ local value=names[key]
+ if value then
+ local content=value.content
+ return lpegmatch(validutf,content) and content or nil
+ end
+ end
+end
+local function getinfo(maindata,sub)
+ local fontdata=sub and maindata.subfonts and maindata.subfonts[sub] or maindata
+ local names=fontdata.names
+ if names then
+ local metrics=fontdata.windowsmetrics or {}
+ local postscript=fontdata.postscript or {}
+ local fontheader=fontdata.fontheader or {}
+ local cffinfo=fontdata.cffinfo or {}
+ local filename=fontdata.filename
+ local weight=getname(fontdata,"weight") or cffinfo.weight or metrics.weight
+ local width=getname(fontdata,"width") or cffinfo.width or metrics.width
+ return {
+ subfontindex=fontdata.subfontindex or sub or 0,
+ fontname=getname(fontdata,"postscriptname"),
+ fullname=getname(fontdata,"fullname"),
+ familyname=getname(fontdata,"typographicfamily") or getname(fontdata,"family"),
+ subfamily=getname(fontdata,"subfamily"),
+ modifiers=getname(fontdata,"typographicsubfamily"),
+ weight=weight and lower(weight),
+ width=width and lower(width),
+ pfmweight=metrics.weightclass or 400,
+ pfmwidth=metrics.widthclass or 5,
+ panosewidth=metrics.panosewidth,
+ panoseweight=metrics.panoseweight,
+ italicangle=postscript.italicangle or 0,
+ units=fontheader.units or 0,
+ designsize=fontdata.designsize,
+ minsize=fontdata.minsize,
+ maxsize=fontdata.maxsize,
+ monospaced=(tonumber(postscript.monospaced or 0)>0) or metrics.panosewidth=="monospaced",
+ averagewidth=metrics.averagewidth,
+ xheight=metrics.xheight,
+ ascender=metrics.typoascender,
+ descender=metrics.typodescender,
+ }
+ elseif n then
+ return {
+ filename=fontdata.filename,
+ comment="there is no info for subfont "..n,
+ }
+ else
+ return {
+ filename=fontdata.filename,
+ comment="there is no info",
+ }
+ end
+end
+local function loadtables(f,specification,offset)
+ if offset then
+ setposition(f,offset)
+ end
+ local tables={}
+ local basename=file.basename(specification.filename)
+ local filesize=specification.filesize
+ local filetime=specification.filetime
+ local fontdata={
+ filename=basename,
+ filesize=filesize,
+ filetime=filetime,
+ version=readstring(f,4),
+ noftables=readushort(f),
+ searchrange=readushort(f),
+ entryselector=readushort(f),
+ rangeshift=readushort(f),
+ tables=tables,
+ }
+ for i=1,fontdata.noftables do
+ local tag=lower(stripstring(readstring(f,4)))
+ local checksum=readulong(f)
+ local offset=readulong(f)
+ local length=readulong(f)
+ if offset+length>filesize then
+ report("bad %a table in file %a",tag,basename)
+ end
+ tables[tag]={
+ checksum=checksum,
+ offset=offset,
+ length=length,
+ }
+ end
+ if tables.cff then
+ fontdata.format="opentype"
+ else
+ fontdata.format="truetype"
+ end
+ return fontdata
+end
+local function prepareglyps(fontdata)
+ local glyphs=setmetatableindex(function(t,k)
+ local v={
+ index=k,
+ }
+ t[k]=v
+ return v
+ end)
+ fontdata.glyphs=glyphs
+ fontdata.mapping={}
+end
+local function readdata(f,offset,specification)
+ local fontdata=loadtables(f,specification,offset)
+ if specification.glyphs then
+ prepareglyps(fontdata)
+ end
+ readers["name"](f,fontdata,specification)
+ local askedname=specification.askedname
+ if askedname then
+ local fullname=getname(fontdata,"fullname") or ""
+ local cleanname=gsub(askedname,"[^a-zA-Z0-9]","")
+ local foundname=gsub(fullname,"[^a-zA-Z0-9]","")
+ if lower(cleanname)~=lower(foundname) then
+ return
+ end
+ end
+ readers["os/2"](f,fontdata,specification)
+ readers["head"](f,fontdata,specification)
+ readers["maxp"](f,fontdata,specification)
+ readers["hhea"](f,fontdata,specification)
+ readers["hmtx"](f,fontdata,specification)
+ readers["post"](f,fontdata,specification)
+ readers["cff" ](f,fontdata,specification)
+ readers["cmap"](f,fontdata,specification)
+ readers["loca"](f,fontdata,specification)
+ readers["glyf"](f,fontdata,specification)
+ readers["kern"](f,fontdata,specification)
+ readers["gdef"](f,fontdata,specification)
+ readers["gsub"](f,fontdata,specification)
+ readers["gpos"](f,fontdata,specification)
+ readers["math"](f,fontdata,specification)
+ fontdata.locations=nil
+ fontdata.tables=nil
+ fontdata.cidmaps=nil
+ fontdata.dictionaries=nil
+ return fontdata
+end
+local function loadfontdata(specification)
+ local filename=specification.filename
+ local fileattr=lfs.attributes(filename)
+ local filesize=fileattr and fileattr.size or 0
+ local filetime=fileattr and fileattr.modification or 0
+ local f=openfile(filename,true)
+ if not f then
+ report("unable to open %a",filename)
+ elseif filesize==0 then
+ report("empty file %a",filename)
+ closefile(f)
+ else
+ specification.filesize=filesize
+ specification.filetime=filetime
+ local version=readstring(f,4)
+ local fontdata=nil
+ if version=="OTTO" or version=="true" or version=="\0\1\0\0" then
+ fontdata=readdata(f,0,specification)
+ elseif version=="ttcf" then
+ local subfont=tonumber(specification.subfont)
+ local offsets={}
+ local ttcversion=readulong(f)
+ local nofsubfonts=readulong(f)
+ for i=1,nofsubfonts do
+ offsets[i]=readulong(f)
+ end
+ if subfont then
+ if subfont>=1 and subfont<=nofsubfonts then
+ fontdata=readdata(f,offsets[subfont],specification)
+ else
+ report("no subfont %a in file %a",subfont,filename)
+ end
+ else
+ subfont=specification.subfont
+ if type(subfont)=="string" and subfont~="" then
+ specification.askedname=subfont
+ for i=1,nofsubfonts do
+ fontdata=readdata(f,offsets[i],specification)
+ if fontdata then
+ fontdata.subfontindex=i
+ report("subfont named %a has index %a",subfont,i)
+ break
+ end
+ end
+ if not fontdata then
+ report("no subfont named %a",subfont)
+ end
+ else
+ local subfonts={}
+ fontdata={
+ filename=filename,
+ filesize=filesize,
+ filetime=filetime,
+ version=version,
+ subfonts=subfonts,
+ ttcversion=ttcversion,
+ nofsubfonts=nofsubfonts,
+ }
+ for i=1,fontdata.nofsubfonts do
+ subfonts[i]=readdata(f,offsets[i],specification)
+ end
end
end
+ else
+ report("unknown version %a in file %a",version,filename)
end
- ioflush()
+ closefile(f)
+ return fontdata or {}
end
- if trace_loading then
- report_otf("%s enhancing file %a","stop",filename)
+end
+local function loadfont(specification,n)
+ if type(specification)=="string" then
+ specification={
+ filename=specification,
+ info=true,
+ details=true,
+ glyphs=true,
+ shapes=true,
+ kerns=true,
+ globalkerns=true,
+ lookups=true,
+ subfont=n or true,
+ tounicode=false,
+ }
+ end
+ if specification.shapes or specification.lookups or specification.kerns then
+ specification.glyphs=true
+ end
+ if specification.glyphs then
+ specification.details=true
+ end
+ if specification.details then
+ specification.info=true
+ end
+ local function message(str)
+ report("fatal error in file %a: %s\n%s",specification.filename,str,debug.traceback())
+ end
+ local ok,result=xpcall(loadfontdata,message,specification)
+ if ok then
+ return result
end
- ioflush()
end
-function patches.register(what,where,pattern,action)
- local pw=patches[what]
- if pw then
- local ww=pw[where]
- if ww then
- ww[pattern]=action
+function readers.loadshapes(filename,n)
+ local fontdata=loadfont {
+ filename=filename,
+ shapes=true,
+ subfont=n,
+ }
+ return fontdata and {
+ filename=filename,
+ format=fontdata.format,
+ glyphs=fontdata.glyphs,
+ units=fontdata.fontheader.units,
+ } or {
+ filename=filename,
+ format="unknown",
+ glyphs={},
+ units=0,
+ }
+end
+function readers.loadfont(filename,n)
+ local fontdata=loadfont {
+ filename=filename,
+ glyphs=true,
+ shapes=false,
+ lookups=true,
+ subfont=n,
+ }
+ if fontdata then
+ return {
+ tableversion=tableversion,
+ creator="context mkiv",
+ size=fontdata.filesize,
+ time=fontdata.filetime,
+ glyphs=fontdata.glyphs,
+ descriptions=fontdata.descriptions,
+ format=fontdata.format,
+ goodies={},
+ metadata=getinfo(fontdata,n),
+ properties={
+ hasitalics=fontdata.hasitalics or false,
+ },
+ resources={
+ filename=filename,
+ private=privateoffset,
+ duplicates=fontdata.duplicates or {},
+ features=fontdata.features or {},
+ sublookups=fontdata.sublookups or {},
+ marks=fontdata.marks or {},
+ markclasses=fontdata.markclasses or {},
+ marksets=fontdata.marksets or {},
+ sequences=fontdata.sequences or {},
+ variants=fontdata.variants,
+ version=getname(fontdata,"version"),
+ cidinfo=fontdata.cidinfo,
+ mathconstants=fontdata.mathconstants,
+ },
+ }
+ end
+end
+function readers.getinfo(filename,n,details)
+ local fontdata=loadfont {
+ filename=filename,
+ details=true,
+ }
+ if fontdata then
+ local subfonts=fontdata.subfonts
+ if not subfonts then
+ return getinfo(fontdata)
+ elseif type(n)~="number" then
+ local info={}
+ for i=1,#subfonts do
+ info[i]=getinfo(fontdata,i)
+ end
+ return info
+ elseif n>1 and n<=subfonts then
+ return getinfo(fontdata,n)
else
- pw[where]={ [pattern]=action}
+ return {
+ filename=filename,
+ comment="there is no subfont "..n.." in this file"
+ }
end
+ else
+ return {
+ filename=filename,
+ comment="the file cannot be opened for reading",
+ }
end
end
-function patches.report(fmt,...)
- if trace_loading then
- report_otf("patching: %s",formatters[fmt](...))
+function readers.rehash(fontdata,hashmethod)
+ report("the %a helper is not yet implemented","rehash")
+end
+function readers.checkhash(fontdata)
+ report("the %a helper is not yet implemented","checkhash")
+end
+function readers.pack(fontdata,hashmethod)
+ report("the %a helper is not yet implemented","pack")
+end
+function readers.unpack(fontdata)
+ report("the %a helper is not yet implemented","unpack")
+end
+function readers.expand(fontdata)
+ report("the %a helper is not yet implemented","unpack")
+end
+function readers.compact(fontdata)
+ report("the %a helper is not yet implemented","compact")
+end
+local extenders={}
+function readers.registerextender(extender)
+ extenders[#extenders+1]=extender
+end
+function readers.extend(fontdata)
+ for i=1,#extenders do
+ local extender=extenders[i]
+ local name=extender.name or "unknown"
+ local action=extender.action
+ if action then
+ action(fontdata)
+ end
end
end
-function enhancers.register(what,action)
- actions[what]=action
+if fonts.hashes then
+ local identifiers=fonts.hashes.identifiers
+ local loadshapes=readers.loadshapes
+ readers.version=0.006
+ readers.cache=containers.define("fonts","shapes",readers.version,true)
+ local function load(filename,sub)
+ local base=file.basename(filename)
+ local name=file.removesuffix(base)
+ local kind=file.suffix(filename)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ local sub=tonumber(sub)
+ if size>0 and (kind=="otf" or kind=="ttf" or kind=="tcc") then
+ local hash=containers.cleanname(base)
+ if sub then
+ hash=hash.."-"..sub
+ end
+ data=containers.read(readers.cache,hash)
+ if not data or data.time~=time or data.size~=size then
+ data=loadshapes(filename,sub)
+ if data then
+ data.size=size
+ data.format=data.format or (kind=="otf" and "opentype") or "truetype"
+ data.time=time
+ packoutlines(data)
+ containers.write(readers.cache,hash,data)
+ data=containers.read(readers.cache,hash)
+ end
+ end
+ unpackoutlines(data)
+ else
+ data={
+ filename=filename,
+ size=0,
+ time=time,
+ format="unknown",
+ units=1000,
+ glyphs={}
+ }
+ end
+ return data
+ end
+ fonts.hashes.shapes=table.setmetatableindex(function(t,k)
+ local d=identifiers[k]
+ local v=load(d.properties.filename,d.subindex)
+ t[k]=v
+ return v
+ end)
end
-function otf.load(filename,sub,featurefile)
- local base=file.basename(file.removesuffix(filename))
- local name=file.removesuffix(base)
- local attr=lfs.attributes(filename)
- local size=attr and attr.size or 0
- local time=attr and attr.modification or 0
- if featurefile then
- name=name.."@"..file.removesuffix(file.basename(featurefile))
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cff']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type,tonumber=next,type,tonumber
+local byte=string.byte
+local concat,remove=table.concat,table.remove
+local floor,abs,round,ceil=math.floor,math.abs,math.round,math.ceil
+local P,C,R,S,C,Cs,Ct=lpeg.P,lpeg.C,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Ct
+local lpegmatch=lpeg.match
+local readers=fonts.handlers.otf.readers
+local streamreader=readers.streamreader
+local readbytes=streamreader.readbytes
+local readstring=streamreader.readstring
+local readbyte=streamreader.readcardinal1
+local readushort=streamreader.readcardinal2
+local readuint=streamreader.readcardinal3
+local readulong=streamreader.readcardinal4
+local setposition=streamreader.setposition
+local getposition=streamreader.getposition
+local setmetatableindex=table.setmetatableindex
+local trace_charstrings=false trackers.register("fonts.cff.charstrings",function(v) trace_charstrings=v end)
+local report=logs.reporter("otf reader","cff")
+local parsedictionaries
+local parsecharstring
+local parsecharstrings
+local resetcharstrings
+local parseprivates
+local defaultstrings={ [0]=
+ ".notdef","space","exclam","quotedbl","numbersign","dollar","percent",
+ "ampersand","quoteright","parenleft","parenright","asterisk","plus",
+ "comma","hyphen","period","slash","zero","one","two","three","four",
+ "five","six","seven","eight","nine","colon","semicolon","less",
+ "equal","greater","question","at","A","B","C","D","E","F","G","H",
+ "I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W",
+ "X","Y","Z","bracketleft","backslash","bracketright","asciicircum",
+ "underscore","quoteleft","a","b","c","d","e","f","g","h","i","j",
+ "k","l","m","n","o","p","q","r","s","t","u","v","w","x","y",
+ "z","braceleft","bar","braceright","asciitilde","exclamdown","cent",
+ "sterling","fraction","yen","florin","section","currency",
+ "quotesingle","quotedblleft","guillemotleft","guilsinglleft",
+ "guilsinglright","fi","fl","endash","dagger","daggerdbl",
+ "periodcentered","paragraph","bullet","quotesinglbase","quotedblbase",
+ "quotedblright","guillemotright","ellipsis","perthousand","questiondown",
+ "grave","acute","circumflex","tilde","macron","breve","dotaccent",
+ "dieresis","ring","cedilla","hungarumlaut","ogonek","caron","emdash",
+ "AE","ordfeminine","Lslash","Oslash","OE","ordmasculine","ae",
+ "dotlessi","lslash","oslash","oe","germandbls","onesuperior",
+ "logicalnot","mu","trademark","Eth","onehalf","plusminus","Thorn",
+ "onequarter","divide","brokenbar","degree","thorn","threequarters",
+ "twosuperior","registered","minus","eth","multiply","threesuperior",
+ "copyright","Aacute","Acircumflex","Adieresis","Agrave","Aring",
+ "Atilde","Ccedilla","Eacute","Ecircumflex","Edieresis","Egrave",
+ "Iacute","Icircumflex","Idieresis","Igrave","Ntilde","Oacute",
+ "Ocircumflex","Odieresis","Ograve","Otilde","Scaron","Uacute",
+ "Ucircumflex","Udieresis","Ugrave","Yacute","Ydieresis","Zcaron",
+ "aacute","acircumflex","adieresis","agrave","aring","atilde",
+ "ccedilla","eacute","ecircumflex","edieresis","egrave","iacute",
+ "icircumflex","idieresis","igrave","ntilde","oacute","ocircumflex",
+ "odieresis","ograve","otilde","scaron","uacute","ucircumflex",
+ "udieresis","ugrave","yacute","ydieresis","zcaron","exclamsmall",
+ "Hungarumlautsmall","dollaroldstyle","dollarsuperior","ampersandsmall",
+ "Acutesmall","parenleftsuperior","parenrightsuperior","twodotenleader",
+ "onedotenleader","zerooldstyle","oneoldstyle","twooldstyle",
+ "threeoldstyle","fouroldstyle","fiveoldstyle","sixoldstyle",
+ "sevenoldstyle","eightoldstyle","nineoldstyle","commasuperior",
+ "threequartersemdash","periodsuperior","questionsmall","asuperior",
+ "bsuperior","centsuperior","dsuperior","esuperior","isuperior",
+ "lsuperior","msuperior","nsuperior","osuperior","rsuperior","ssuperior",
+ "tsuperior","ff","ffi","ffl","parenleftinferior","parenrightinferior",
+ "Circumflexsmall","hyphensuperior","Gravesmall","Asmall","Bsmall",
+ "Csmall","Dsmall","Esmall","Fsmall","Gsmall","Hsmall","Ismall",
+ "Jsmall","Ksmall","Lsmall","Msmall","Nsmall","Osmall","Psmall",
+ "Qsmall","Rsmall","Ssmall","Tsmall","Usmall","Vsmall","Wsmall",
+ "Xsmall","Ysmall","Zsmall","colonmonetary","onefitted","rupiah",
+ "Tildesmall","exclamdownsmall","centoldstyle","Lslashsmall",
+ "Scaronsmall","Zcaronsmall","Dieresissmall","Brevesmall","Caronsmall",
+ "Dotaccentsmall","Macronsmall","figuredash","hypheninferior",
+ "Ogoneksmall","Ringsmall","Cedillasmall","questiondownsmall","oneeighth",
+ "threeeighths","fiveeighths","seveneighths","onethird","twothirds",
+ "zerosuperior","foursuperior","fivesuperior","sixsuperior",
+ "sevensuperior","eightsuperior","ninesuperior","zeroinferior",
+ "oneinferior","twoinferior","threeinferior","fourinferior",
+ "fiveinferior","sixinferior","seveninferior","eightinferior",
+ "nineinferior","centinferior","dollarinferior","periodinferior",
+ "commainferior","Agravesmall","Aacutesmall","Acircumflexsmall",
+ "Atildesmall","Adieresissmall","Aringsmall","AEsmall","Ccedillasmall",
+ "Egravesmall","Eacutesmall","Ecircumflexsmall","Edieresissmall",
+ "Igravesmall","Iacutesmall","Icircumflexsmall","Idieresissmall",
+ "Ethsmall","Ntildesmall","Ogravesmall","Oacutesmall","Ocircumflexsmall",
+ "Otildesmall","Odieresissmall","OEsmall","Oslashsmall","Ugravesmall",
+ "Uacutesmall","Ucircumflexsmall","Udieresissmall","Yacutesmall",
+ "Thornsmall","Ydieresissmall","001.000","001.001","001.002","001.003",
+ "Black","Bold","Book","Light","Medium","Regular","Roman","Semibold",
+}
+local cffreaders={
+ readbyte,
+ readushort,
+ readuint,
+ readulong,
+}
+local function readheader(f)
+ local offset=getposition(f)
+ local header={
+ offset=offset,
+ major=readbyte(f),
+ minor=readbyte(f),
+ size=readbyte(f),
+ osize=readbyte(f),
+ }
+ setposition(f,offset+header.size)
+ return header
+end
+local function readlengths(f)
+ local count=readushort(f)
+ if count==0 then
+ return {}
end
- if sub=="" then
- sub=false
+ local osize=readbyte(f)
+ local read=cffreaders[osize]
+ if not read then
+ report("bad offset size: %i",osize)
+ return {}
end
- local hash=name
- if sub then
- hash=hash.."-"..sub
+ local lengths={}
+ local previous=read(f)
+ for i=1,count do
+ local offset=read(f)
+ lengths[i]=offset-previous
+ previous=offset
end
- hash=containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles={}
- for s in gmatch(featurefile,"[^,]+") do
- local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name=="" then
- report_otf("loading error, no featurefile %a",s)
- else
- local attr=lfs.attributes(name)
- featurefiles[#featurefiles+1]={
- name=name,
- size=attr and attr.size or 0,
- time=attr and attr.modification or 0,
+ return lengths
+end
+local function readfontnames(f)
+ local names=readlengths(f)
+ for i=1,#names do
+ names[i]=readstring(f,names[i])
+ end
+ return names
+end
+local function readtopdictionaries(f)
+ local dictionaries=readlengths(f)
+ for i=1,#dictionaries do
+ dictionaries[i]=readstring(f,dictionaries[i])
+ end
+ return dictionaries
+end
+local function readstrings(f)
+ local lengths=readlengths(f)
+ local strings=setmetatableindex({},defaultstrings)
+ local index=#defaultstrings
+ for i=1,#lengths do
+ index=index+1
+ strings[index]=readstring(f,lengths[i])
+ end
+ return strings
+end
+do
+ local stack={}
+ local top=0
+ local result={}
+ local strings={}
+ local p_single=P("\00")/function()
+ result.version=strings[stack[top]] or "unset"
+ top=0
+ end+P("\01")/function()
+ result.notice=strings[stack[top]] or "unset"
+ top=0
+ end+P("\02")/function()
+ result.fullname=strings[stack[top]] or "unset"
+ top=0
+ end+P("\03")/function()
+ result.familyname=strings[stack[top]] or "unset"
+ top=0
+ end+P("\04")/function()
+ result.weight=strings[stack[top]] or "unset"
+ top=0
+ end+P("\05")/function()
+ result.fontbbox={ unpack(stack,1,4) }
+ top=0
+ end
++P("\13")/function()
+ result.uniqueid=stack[top]
+ top=0
+ end+P("\14")/function()
+ result.xuid=concat(stack,"",1,top)
+ top=0
+ end+P("\15")/function()
+ result.charset=stack[top]
+ top=0
+ end+P("\16")/function()
+ result.encoding=stack[top]
+ top=0
+ end+P("\17")/function()
+ result.charstrings=stack[top]
+ top=0
+ end+P("\18")/function()
+ result.private={
+ size=stack[top-1],
+ offset=stack[top],
+ }
+ top=0
+ end+P("\19")/function()
+ result.subroutines=stack[top]
+ end+P("\20")/function()
+ result.defaultwidthx=stack[top]
+ end+P("\21")/function()
+ result.nominalwidthx=stack[top]
+ end
+ local p_double=P("\12")*(
+ P("\00")/function()
+ result.copyright=stack[top]
+ top=0
+ end+P("\01")/function()
+ result.monospaced=stack[top]==1 and true or false
+ top=0
+ end+P("\02")/function()
+ result.italicangle=stack[top]
+ top=0
+ end+P("\03")/function()
+ result.underlineposition=stack[top]
+ top=0
+ end+P("\04")/function()
+ result.underlinethickness=stack[top]
+ top=0
+ end+P("\05")/function()
+ result.painttype=stack[top]
+ top=0
+ end+P("\06")/function()
+ result.charstringtype=stack[top]
+ top=0
+ end+P("\07")/function()
+ result.fontmatrix={ unpack(stack,1,6) }
+ top=0
+ end+P("\08")/function()
+ result.strokewidth=stack[top]
+ top=0
+ end+P("\20")/function()
+ result.syntheticbase=stack[top]
+ top=0
+ end+P("\21")/function()
+ result.postscript=strings[stack[top]] or "unset"
+ top=0
+ end+P("\22")/function()
+ result.basefontname=strings[stack[top]] or "unset"
+ top=0
+ end+P("\21")/function()
+ result.basefontblend=stack[top]
+ top=0
+ end+P("\30")/function()
+ result.cid.registry=strings[stack[top-2]] or "unset"
+ result.cid.ordering=strings[stack[top-1]] or "unset"
+ result.cid.supplement=stack[top]
+ top=0
+ end+P("\31")/function()
+ result.cid.fontversion=stack[top]
+ top=0
+ end+P("\32")/function()
+ result.cid.fontrevision=stack[top]
+ top=0
+ end+P("\33")/function()
+ result.cid.fonttype=stack[top]
+ top=0
+ end+P("\34")/function()
+ result.cid.count=stack[top]
+ top=0
+ end+P("\35")/function()
+ result.cid.uidbase=stack[top]
+ top=0
+ end+P("\36")/function()
+ result.cid.fdarray=stack[top]
+ top=0
+ end+P("\37")/function()
+ result.cid.fdselect=stack[top]
+ top=0
+ end+P("\38")/function()
+ result.cid.fontname=strings[stack[top]] or "unset"
+ top=0
+ end
+ )
+ local p_last=P("\x0F")/"0"+P("\x1F")/"1"+P("\x2F")/"2"+P("\x3F")/"3"+P("\x4F")/"4"+P("\x5F")/"5"+P("\x6F")/"6"+P("\x7F")/"7"+P("\x8F")/"8"+P("\x9F")/"9"+P("\xAF")/""+P("\xBF")/""+P("\xCF")/""+P("\xDF")/""+P("\xEF")/""+R("\xF0\xFF")/""
+ local remap={
+ ["\x00"]="00",["\x01"]="01",["\x02"]="02",["\x03"]="03",["\x04"]="04",["\x05"]="05",["\x06"]="06",["\x07"]="07",["\x08"]="08",["\x09"]="09",["\x0A"]="0.",["\x0B"]="0E",["\x0C"]="0E-",["\x0D"]="0",["\x0E"]="0-",["\x0F"]="0",
+ ["\x10"]="10",["\x11"]="11",["\x12"]="12",["\x13"]="13",["\x14"]="14",["\x15"]="15",["\x16"]="16",["\x17"]="17",["\x18"]="18",["\x19"]="19",["\x1A"]="0.",["\x1B"]="0E",["\x1C"]="0E-",["\x1D"]="0",["\x1E"]="0-",["\x1F"]="0",
+ ["\x20"]="20",["\x21"]="21",["\x22"]="22",["\x23"]="23",["\x24"]="24",["\x25"]="25",["\x26"]="26",["\x27"]="27",["\x28"]="28",["\x29"]="29",["\x2A"]="0.",["\x2B"]="0E",["\x2C"]="0E-",["\x2D"]="0",["\x2E"]="0-",["\x2F"]="0",
+ ["\x30"]="30",["\x31"]="31",["\x32"]="32",["\x33"]="33",["\x34"]="34",["\x35"]="35",["\x36"]="36",["\x37"]="37",["\x38"]="38",["\x39"]="39",["\x3A"]="0.",["\x3B"]="0E",["\x3C"]="0E-",["\x3D"]="0",["\x3E"]="0-",["\x3F"]="0",
+ ["\x40"]="40",["\x41"]="41",["\x42"]="42",["\x43"]="43",["\x44"]="44",["\x45"]="45",["\x46"]="46",["\x47"]="47",["\x48"]="48",["\x49"]="49",["\x4A"]="0.",["\x4B"]="0E",["\x4C"]="0E-",["\x4D"]="0",["\x4E"]="0-",["\x4F"]="0",
+ ["\x50"]="50",["\x51"]="51",["\x52"]="52",["\x53"]="53",["\x54"]="54",["\x55"]="55",["\x56"]="56",["\x57"]="57",["\x58"]="58",["\x59"]="59",["\x5A"]="0.",["\x5B"]="0E",["\x5C"]="0E-",["\x5D"]="0",["\x5E"]="0-",["\x5F"]="0",
+ ["\x60"]="60",["\x61"]="61",["\x62"]="62",["\x63"]="63",["\x64"]="64",["\x65"]="65",["\x66"]="66",["\x67"]="67",["\x68"]="68",["\x69"]="69",["\x6A"]="0.",["\x6B"]="0E",["\x6C"]="0E-",["\x6D"]="0",["\x6E"]="0-",["\x6F"]="0",
+ ["\x70"]="70",["\x71"]="71",["\x72"]="72",["\x73"]="73",["\x74"]="74",["\x75"]="75",["\x76"]="76",["\x77"]="77",["\x78"]="78",["\x79"]="79",["\x7A"]="0.",["\x7B"]="0E",["\x7C"]="0E-",["\x7D"]="0",["\x7E"]="0-",["\x7F"]="0",
+ ["\x80"]="80",["\x81"]="81",["\x82"]="82",["\x83"]="83",["\x84"]="84",["\x85"]="85",["\x86"]="86",["\x87"]="87",["\x88"]="88",["\x89"]="89",["\x8A"]="0.",["\x8B"]="0E",["\x8C"]="0E-",["\x8D"]="0",["\x8E"]="0-",["\x8F"]="0",
+ ["\x90"]="90",["\x91"]="91",["\x92"]="92",["\x93"]="93",["\x94"]="94",["\x95"]="95",["\x96"]="96",["\x97"]="97",["\x98"]="98",["\x99"]="99",["\x9A"]="0.",["\x9B"]="0E",["\x9C"]="0E-",["\x9D"]="0",["\x9E"]="0-",["\x9F"]="0",
+ ["\xA0"]=".0",["\xA1"]=".1",["\xA2"]=".2",["\xA3"]=".3",["\xA4"]=".4",["\xA5"]=".5",["\xA6"]=".6",["\xA7"]=".7",["\xA8"]=".8",["\xA9"]=".9",["\xAA"]="..",["\xAB"]=".E",["\xAC"]=".E-",["\xAD"]=".",["\xAE"]=".-",["\xAF"]=".",
+ ["\xB0"]="E0",["\xB1"]="E1",["\xB2"]="E2",["\xB3"]="E3",["\xB4"]="E4",["\xB5"]="E5",["\xB6"]="E6",["\xB7"]="E7",["\xB8"]="E8",["\xB9"]="E9",["\xBA"]="E.",["\xBB"]="EE",["\xBC"]="EE-",["\xBD"]="E",["\xBE"]="E-",["\xBF"]="E",
+ ["\xC0"]="E-0",["\xC1"]="E-1",["\xC2"]="E-2",["\xC3"]="E-3",["\xC4"]="E-4",["\xC5"]="E-5",["\xC6"]="E-6",["\xC7"]="E-7",["\xC8"]="E-8",["\xC9"]="E-9",["\xCA"]="E-.",["\xCB"]="E-E",["\xCC"]="E-E-",["\xCD"]="E-",["\xCE"]="E--",["\xCF"]="E-",
+ ["\xD0"]="-0",["\xD1"]="-1",["\xD2"]="-2",["\xD3"]="-3",["\xD4"]="-4",["\xD5"]="-5",["\xD6"]="-6",["\xD7"]="-7",["\xD8"]="-8",["\xD9"]="-9",["\xDA"]="-.",["\xDB"]="-E",["\xDC"]="-E-",["\xDD"]="-",["\xDE"]="--",["\xDF"]="-",
+ }
+ local p_nibbles=P("\30")*Cs(((1-p_last)/remap)^0+p_last)/function(n)
+ top=top+1
+ stack[top]=tonumber(n) or 0
+ end
+ local p_byte=C(R("\32\246"))/function(b0)
+ top=top+1
+ stack[top]=byte(b0)-139
+ end
+ local p_positive=C(R("\247\250"))*C(1)/function(b0,b1)
+ top=top+1
+ stack[top]=(byte(b0)-247)*256+byte(b1)+108
+ end
+ local p_negative=C(R("\251\254"))*C(1)/function(b0,b1)
+ top=top+1
+ stack[top]=-(byte(b0)-251)*256-byte(b1)-108
+ end
+ local p_short=P("\28")*C(1)*C(1)/function(b1,b2)
+ top=top+1
+ local n=0x100*byte(b1)+byte(b2)
+ if n>=0x8000 then
+ stack[top]=n-0xFFFF-1
+ else
+ stack[top]=n
+ end
+ end
+ local p_long=P("\29")*C(1)*C(1)*C(1)*C(1)/function(b1,b2,b3,b4)
+ top=top+1
+ local n=0x1000000*byte(b1)+0x10000*byte(b2)+0x100*byte(b3)+byte(b4)
+ if n>=0x8000000 then
+ stack[top]=n-0xFFFFFFFF-1
+ else
+ stack[top]=n
+ end
+ end
+ local p_unsupported=P(1)/function(detail)
+ top=0
+ end
+ local p_dictionary=(
+ p_byte+p_positive+p_negative+p_short+p_long+p_nibbles+p_single+p_double+p_unsupported
+ )^1
+ parsedictionaries=function(data,dictionaries)
+ stack={}
+ strings=data.strings
+ for i=1,#dictionaries do
+ top=0
+ result={
+ monospaced=false,
+ italicangle=0,
+ underlineposition=-100,
+ underlinethickness=50,
+ painttype=0,
+ charstringtype=2,
+ fontmatrix={ 0.001,0,0,0.001,0,0 },
+ fontbbox={ 0,0,0,0 },
+ strokewidth=0,
+ charset=0,
+ encoding=0,
+ cid={
+ fontversion=0,
+ fontrevision=0,
+ fonttype=0,
+ count=8720,
}
+ }
+ lpegmatch(p_dictionary,dictionaries[i])
+ dictionaries[i]=result
+ end
+ result={}
+ top=0
+ stack={}
+ end
+ parseprivates=function(data,dictionaries)
+ stack={}
+ strings=data.strings
+ for i=1,#dictionaries do
+ local private=dictionaries[i].private
+ if private and private.data then
+ top=0
+ result={
+ forcebold=false,
+ languagegroup=0,
+ expansionfactor=0.06,
+ initialrandomseed=0,
+ subroutines=0,
+ defaultwidthx=0,
+ nominalwidthx=0,
+ cid={
+ },
+ }
+ lpegmatch(p_dictionary,private.data)
+ private.data=result
+ end
+ end
+ result={}
+ top=0
+ stack={}
+ end
+ local x=0
+ local y=0
+ local width=false
+ local r=0
+ local stems=0
+ local globalbias=0
+ local localbias=0
+ local globals=false
+ local locals=false
+ local depth=1
+ local xmin=0
+ local xmax=0
+ local ymin=0
+ local ymax=0
+ local checked=false
+ local keepcurve=false
+ local function showstate(where)
+ report("%w%-10s : [%s] n=%i",depth*2,where,concat(stack," ",1,top),top)
+ end
+ local function showvalue(where,value,showstack)
+ if showstack then
+ report("%w%-10s : %s : [%s] n=%i",depth*2,where,tostring(value),concat(stack," ",1,top),top)
+ else
+ report("%w%-10s : %s",depth*2,where,tostring(value))
+ end
+ end
+ local function moveto(x,y)
+ if keepcurve then
+ r=r+1
+ result[r]={ x,y,"m" }
+ end
+ if checked then
+ if x<xmin then xmin=x elseif x>xmax then xmax=x end
+ if y<ymin then ymin=y elseif y>ymax then ymax=y end
+ else
+ xmin=x
+ ymin=y
+ xmax=x
+ ymax=y
+ checked=true
+ end
+ end
+ local function lineto(x,y)
+ if keepcurve then
+ r=r+1
+ result[r]={ x,y,"l" }
+ end
+ if checked then
+ if x<xmin then xmin=x elseif x>xmax then xmax=x end
+ if y<ymin then ymin=y elseif y>ymax then ymax=y end
+ else
+ xmin=x
+ ymin=y
+ xmax=x
+ ymax=y
+ checked=true
+ end
+ end
+ local function curveto(x1,y1,x2,y2,x3,y3)
+ if keepcurve then
+ r=r+1
+ result[r]={ x1,y1,x2,y2,x3,y3,"c" }
+ end
+ if checked then
+ if x1<xmin then xmin=x1 elseif x1>xmax then xmax=x1 end
+ if y1<ymin then ymin=y1 elseif y1>ymax then ymax=y1 end
+ else
+ xmin=x1
+ ymin=y1
+ xmax=x1
+ ymax=y1
+ checked=true
+ end
+ if x2<xmin then xmin=x2 elseif x2>xmax then xmax=x2 end
+ if y2<ymin then ymin=y2 elseif y2>ymax then ymax=y2 end
+ if x3<xmin then xmin=x3 elseif x3>xmax then xmax=x3 end
+ if y3<ymin then ymin=y3 elseif y3>ymax then ymax=y3 end
+ end
+ local function rmoveto()
+ if top>2 then
+ if not width then
+ width=stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
end
+ elseif not width then
+ width=true
end
- if #featurefiles==0 then
- featurefiles=nil
+ if trace_charstrings then
+ showstate("rmoveto")
end
+ x=x+stack[top-1]
+ y=y+stack[top]
+ top=0
+ moveto(x,y)
end
- local data=containers.read(otf.cache,hash)
- local reload=not data or data.size~=size or data.time~=time
- if forceload then
- report_otf("forced reload of %a due to hard coded flag",filename)
- reload=true
+ local function hmoveto()
+ if top>1 then
+ if not width then
+ width=stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ elseif not width then
+ width=true
+ end
+ if trace_charstrings then
+ showstate("hmoveto")
+ end
+ x=x+stack[top]
+ top=0
+ moveto(x,y)
end
- if not reload then
- local featuredata=data.featuredata
- if featurefiles then
- if not featuredata or #featuredata~=#featurefiles then
- reload=true
- else
- for i=1,#featurefiles do
- local fi,fd=featurefiles[i],featuredata[i]
- if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
- reload=true
- break
- end
+ local function vmoveto()
+ if top>1 then
+ if not width then
+ width=stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
end
end
- elseif featuredata then
- reload=true
+ elseif not width then
+ width=true
end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ if trace_charstrings then
+ showstate("vmoveto")
end
- end
- if reload then
- starttiming("fontloader")
- report_otf("loading %a, hash %a",filename,hash)
- local fontdata,messages
- if sub then
- fontdata,messages=open_font(filename,sub)
+ y=y+stack[top]
+ top=0
+ moveto(x,y)
+ end
+ local function rlineto()
+ if trace_charstrings then
+ showstate("rlineto")
+ end
+ for i=1,top,2 do
+ x=x+stack[i]
+ y=y+stack[i+1]
+ lineto(x,y)
+ end
+ top=0
+ end
+ local function xlineto(swap)
+ for i=1,top do
+ if swap then
+ x=x+stack[i]
+ swap=false
+ else
+ y=y+stack[i]
+ swap=true
+ end
+ lineto(x,y)
+ end
+ top=0
+ end
+ local function hlineto()
+ if trace_charstrings then
+ showstate("hlineto")
+ end
+ xlineto(true)
+ end
+ local function vlineto()
+ if trace_charstrings then
+ showstate("vlineto")
+ end
+ xlineto(false)
+ end
+ local function rrcurveto()
+ if trace_charstrings then
+ showstate("rrcurveto")
+ end
+ for i=1,top,6 do
+ local ax=x+stack[i]
+ local ay=y+stack[i+1]
+ local bx=ax+stack[i+2]
+ local by=ay+stack[i+3]
+ x=bx+stack[i+4]
+ y=by+stack[i+5]
+ curveto(ax,ay,bx,by,x,y)
+ end
+ top=0
+ end
+ local function hhcurveto()
+ if trace_charstrings then
+ showstate("hhcurveto")
+ end
+ local s=1
+ if top%2~=0 then
+ y=y+stack[1]
+ s=2
+ end
+ for i=s,top,4 do
+ local ax=x+stack[i]
+ local ay=y
+ local bx=ax+stack[i+1]
+ local by=ay+stack[i+2]
+ x=bx+stack[i+3]
+ y=by
+ curveto(ax,ay,bx,by,x,y)
+ end
+ top=0
+ end
+ local function vvcurveto()
+ if trace_charstrings then
+ showstate("vvcurveto")
+ end
+ local s=1
+ local d=0
+ if top%2~=0 then
+ d=stack[1]
+ s=2
+ end
+ for i=s,top,4 do
+ local ax=x+d
+ local ay=y+stack[i]
+ local bx=ax+stack[i+1]
+ local by=ay+stack[i+2]
+ x=bx
+ y=by+stack[i+3]
+ curveto(ax,ay,bx,by,x,y)
+ d=0
+ end
+ top=0
+ end
+ local function xxcurveto(swap)
+ local last=top%4~=0 and stack[top]
+ if last then
+ top=top-1
+ end
+ local sw=swap
+ for i=1,top,4 do
+ local ax,ay,bx,by
+ if swap then
+ ax=x+stack[i]
+ ay=y
+ bx=ax+stack[i+1]
+ by=ay+stack[i+2]
+ y=by+stack[i+3]
+ if last and i+3==top then
+ x=bx+last
+ else
+ x=bx
+ end
+ swap=false
+ else
+ ax=x
+ ay=y+stack[i]
+ bx=ax+stack[i+1]
+ by=ay+stack[i+2]
+ x=bx+stack[i+3]
+ if last and i+3==top then
+ y=by+last
+ else
+ y=by
+ end
+ swap=true
+ end
+ curveto(ax,ay,bx,by,x,y)
+ end
+ top=0
+ end
+ local function hvcurveto()
+ if trace_charstrings then
+ showstate("hvcurveto")
+ end
+ xxcurveto(true)
+ end
+ local function vhcurveto()
+ if trace_charstrings then
+ showstate("vhcurveto")
+ end
+ xxcurveto(false)
+ end
+ local function rcurveline()
+ if trace_charstrings then
+ showstate("rcurveline")
+ end
+ for i=1,top-2,6 do
+ local ax=x+stack[i]
+ local ay=y+stack[i+1]
+ local bx=ax+stack[i+2]
+ local by=ay+stack[i+3]
+ x=bx+stack[i+4]
+ y=by+stack[i+5]
+ curveto(ax,ay,bx,by,x,y)
+ end
+ x=x+stack[top-1]
+ y=y+stack[top]
+ lineto(x,y)
+ top=0
+ end
+ local function rlinecurve()
+ if trace_charstrings then
+ showstate("rlinecurve")
+ end
+ if top>6 then
+ for i=1,top-6,2 do
+ x=x+stack[i]
+ y=y+stack[i+1]
+ lineto(x,y)
+ end
+ end
+ local ax=x+stack[top-5]
+ local ay=y+stack[top-4]
+ local bx=ax+stack[top-3]
+ local by=ay+stack[top-2]
+ x=bx+stack[top-1]
+ y=by+stack[top]
+ curveto(ax,ay,bx,by,x,y)
+ top=0
+ end
+ local function flex()
+ if trace_charstrings then
+ showstate("flex")
+ end
+ local ax=x+stack[1]
+ local ay=y+stack[2]
+ local bx=ax+stack[3]
+ local by=ay+stack[4]
+ local cx=bx+stack[5]
+ local cy=by+stack[6]
+ curveto(ax,ay,bx,by,cx,cy)
+ local dx=cx+stack[7]
+ local dy=cy+stack[8]
+ local ex=dx+stack[9]
+ local ey=dy+stack[10]
+ x=ex+stack[11]
+ y=ey+stack[12]
+ curveto(dx,dy,ex,ey,x,y)
+ top=0
+ end
+ local function hflex()
+ if trace_charstrings then
+ showstate("hflex")
+ end
+ local ax=x+stack[1]
+ local ay=y
+ local bx=ax+stack[2]
+ local by=ay+stack[3]
+ local cx=bx+stack[4]
+ local cy=by
+ curveto(ax,ay,bx,by,cx,cy)
+ local dx=cx+stack[5]
+ local dy=by
+ local ex=dx+stack[6]
+ local ey=y
+ x=ex+stack[7]
+ curveto(dx,dy,ex,ey,x,y)
+ top=0
+ end
+ local function hflex1()
+ if trace_charstrings then
+ showstate("hflex1")
+ end
+ local ax=x+stack[1]
+ local ay=y+stack[2]
+ local bx=ax+stack[3]
+ local by=ay+stack[4]
+ local cx=bx+stack[5]
+ local cy=by
+ curveto(ax,ay,bx,by,cx,cy)
+ local dx=cx+stack[6]
+ local dy=by
+ local ex=dx+stack[7]
+ local ey=dy+stack[8]
+ x=ex+stack[9]
+ curveto(dx,dy,ex,ey,x,y)
+ top=0
+ end
+ local function flex1()
+ if trace_charstrings then
+ showstate("flex1")
+ end
+ local ax=x+stack[1]
+ local ay=y+stack[2]
+ local bx=ax+stack[3]
+ local by=ay+stack[4]
+ local cx=bx+stack[5]
+ local cy=by+stack[6]
+ curveto(ax,ay,bx,by,cx,cy)
+ local dx=cx+stack[7]
+ local dy=cy+stack[8]
+ local ex=dx+stack[9]
+ local ey=dy+stack[10]
+ if abs(ex-x)>abs(ey-y) then
+ x=ex+stack[11]
else
- fontdata,messages=open_font(filename)
+ y=ey+stack[11]
end
- if fontdata then
- mainfields=mainfields or (font_fields and font_fields(fontdata))
+ curveto(dx,dy,ex,ey,x,y)
+ top=0
+ end
+ local function getstem()
+ if top==0 then
+ elseif top%2~=0 then
+ if width then
+ remove(stack,1)
+ else
+ width=remove(stack,1)
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ end
+ top=top-1
end
- if trace_loading and messages and #messages>0 then
- if type(messages)=="string" then
- report_otf("warning: %s",messages)
+ if trace_charstrings then
+ showstate("stem")
+ end
+ stems=stems+top/2
+ top=0
+ end
+ local function getmask()
+ if top==0 then
+ elseif top%2~=0 then
+ if width then
+ remove(stack,1)
else
- for m=1,#messages do
- report_otf("warning: %S",messages[m])
+ width=remove(stack,1)
+ if trace_charstrings then
+ showvalue("width",width)
end
end
+ top=top-1
+ end
+ if trace_charstrings then
+ showstate(operator==19 and "hintmark" or "cntrmask")
+ end
+ stems=stems+top/2
+ top=0
+ if stems==0 then
+ elseif stems<=8 then
+ return 1
else
- report_otf("loading done")
+ return floor((stems+7)/8)
+ end
+ end
+ local function unsupported()
+ if trace_charstrings then
+ showstate("unsupported")
+ end
+ top=0
+ end
+ local actions={ [0]=unsupported,
+ getstem,
+ unsupported,
+ getstem,
+ vmoveto,
+ rlineto,
+ hlineto,
+ vlineto,
+ rrcurveto,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ unsupported,
+ getstem,
+ getmask,
+ getmask,
+ rmoveto,
+ hmoveto,
+ getstem,
+ rcurveline,
+ rlinecurve,
+ vvcurveto,
+ hhcurveto,
+ unsupported,
+ unsupported,
+ vhcurveto,
+ hvcurveto,
+ }
+ local subactions={
+ [034]=hflex,
+ [035]=flex,
+ [036]=hflex1,
+ [037]=flex1,
+ }
+ local p_bytes=Ct((P(1)/byte)^0)
+ local function call(scope,list,bias,process)
+ local index=stack[top]+bias
+ top=top-1
+ if trace_charstrings then
+ showvalue(scope,index,true)
+ end
+ local str=list[index]
+ if str then
+ if type(str)=="string" then
+ str=lpegmatch(p_bytes,str)
+ list[index]=str
+ end
+ depth=depth+1
+ process(str)
+ depth=depth-1
+ else
+ report("unknown %s %i",scope,index)
end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
+ end
+ local function process(tab)
+ local i=1
+ local n=#tab
+ while i<=n do
+ local t=tab[i]
+ if t>=32 and t<=246 then
+ top=top+1
+ stack[top]=t-139
+ i=i+1
+ elseif t>=247 and t<=250 then
+ top=top+1
+ stack[top]=(t-247)*256+tab[i+1]+108
+ i=i+2
+ elseif t>=251 and t<=254 then
+ top=top+1
+ stack[top]=-(t-251)*256-tab[i+1]-108
+ i=i+2
+ elseif t==28 then
+ top=top+1
+ local n=0x100*tab[i+1]+tab[i+2]
+ if n>=0x8000 then
+ stack[top]=n-0xFFFF-1
+ else
+ stack[top]=n
+ end
+ i=i+3
+ elseif t==255 then
+ local n=0x100*tab[i+1]+tab[i+2]
+ top=top+1
+ if n>=0x8000 then
+ stack[top]=n-0xFFFF-1+(0x100*tab[i+3]+tab[i+4])/0xFFFF
+ else
+ stack[top]=n+(0x100*tab[i+3]+tab[i+4])/0xFFFF
+ end
+ i=i+5
+ elseif t==11 then
+ if trace_charstrings then
+ showstate("return")
+ end
+ return
+ elseif t==10 then
+ call("local",locals,localbias,process)
+ i=i+1
+ elseif t==14 then
+ if width then
+ elseif top>0 then
+ width=stack[1]
+ if trace_charstrings then
+ showvalue("width",width)
+ end
+ else
+ width=true
+ end
+ if trace_charstrings then
+ showstate("endchar")
end
+ return
+ elseif t==29 then
+ call("global",globals,globalbias,process)
+ i=i+1
+ elseif t==12 then
+ i=i+1
+ local t=tab[i]
+ local a=subactions[t]
+ if a then
+ a()
+ else
+ if trace_charstrings then
+ showvalue("<subaction>",t)
+ end
+ top=0
+ end
+ i=i+1
+ else
+ local a=actions[t]
+ if a then
+ local s=a()
+ if s then
+ i=i+s
+ end
+ else
+ if trace_charstrings then
+ showvalue("<action>",t)
+ end
+ top=0
+ end
+ i=i+1
end
- local unicodes={
- }
- local splitter=lpeg.splitter(" ",unicodes)
- data={
- size=size,
- time=time,
- subfont=sub,
- format=otf_format(filename),
- featuredata=featurefiles,
- resources={
- filename=resolvers.unresolve(filename),
- version=otf.version,
- creator="context mkiv",
- unicodes=unicodes,
- indices={
- },
- duplicates={
- },
- variants={
- },
- lookuptypes={},
- },
- warnings={},
- metadata={
- },
- properties={
- },
- descriptions={},
- goodies={},
- helpers={
- tounicodelist=splitter,
- tounicodetable=Ct(splitter),
- },
+ end
+ end
+ parsecharstrings=function(data,glyphs,doshapes)
+ local dictionary=data.dictionaries[1]
+ local charstrings=dictionary.charstrings
+ local charset=dictionary.charset
+ keepcurve=doshapes
+ stack={}
+ glyphs=glyphs or {}
+ strings=data.strings
+ locals=dictionary.subroutines
+ globals=data.routines
+ globalbias=#globals
+ localbias=#locals
+ globalbias=((globalbias<1240 and 107) or (globalbias<33900 and 1131) or 32768)+1
+ localbias=((localbias<1240 and 107) or (localbias<33900 and 1131) or 32768)+1
+ local nominalwidth=dictionary.private.data.nominalwidthx or 0
+ local defaultwidth=dictionary.private.data.defaultwidthx or 0
+ for i=1,#charstrings do
+ local str=charstrings[i]
+ local tab=lpegmatch(p_bytes,str)
+ local index=i-1
+ x=0
+ y=0
+ width=false
+ r=0
+ top=0
+ stems=0
+ result={}
+ xmin=0
+ xmax=0
+ ymin=0
+ ymax=0
+ checked=false
+ if trace_charstrings then
+ report("glyph: %i",index)
+ report("data: % t",tab)
+ end
+ process(tab)
+ local boundingbox={ round(xmin),round(ymin),round(xmax),round(ymax) }
+ if width==true or width==false then
+ width=defaultwidth
+ else
+ width=nominalwidth+width
+ end
+ local glyph=glyphs[index]
+ if not glyph then
+ glyphs[index]={
+ segments=doshapes~=false and result or nil,
+ boundingbox=boundingbox,
+ width=width,
+ name=charset[index],
+ }
+ else
+ glyph.segments=doshapes~=false and result or nil
+ glyph.boundingbox=boundingbox
+ if not glyph.width then
+ glyph.width=width
+ end
+ if charset and not glyph.name then
+ glyph.name=charset[index]
+ end
+ end
+ if trace_charstrings then
+ report("width: %s",tostring(width))
+ report("boundingbox: % t",boundingbox)
+ end
+ charstrings[i]=nil
+ end
+ return glyphs
+ end
+ parsecharstring=function(data,dictionary,charstring,glyphs,index,doshapes)
+ local private=dictionary.private
+ keepcurve=doshapes
+ strings=data.strings
+ locals=dictionary.subroutines or {}
+ globals=data.routines or {}
+ globalbias=#globals
+ localbias=#locals
+ globalbias=((globalbias<1240 and 107) or (globalbias<33900 and 1131) or 32768)+1
+ localbias=((localbias<1240 and 107) or (localbias<33900 and 1131) or 32768)+1
+ local nominalwidth=private and private.data.nominalwidthx or 0
+ local defaultwidth=private and private.data.defaultwidthx or 0
+ local tab=lpegmatch(p_bytes,charstring)
+ x=0
+ y=0
+ width=false
+ r=0
+ top=0
+ stems=0
+ result={}
+ xmin=0
+ xmax=0
+ ymin=0
+ ymax=0
+ checked=false
+ if trace_charstrings then
+ report("glyph: %i",index)
+ report("data: % t",tab)
+ end
+ process(tab)
+ local boundingbox={ xmin,ymin,xmax,ymax }
+ if width==true or width==false then
+ width=defaultwidth
+ else
+ width=nominalwidth+width
+ end
+index=index-1
+ local glyph=glyphs[index]
+ if not glyph then
+ glyphs[index]={
+ segments=doshapes~=false and result or nil,
+ boundingbox=boundingbox,
+ width=width,
+ name=charset[index],
}
- report_otf("file size: %s",size)
- enhancers.apply(data,filename,fontdata)
- local packtime={}
- if packdata then
- if cleanup>0 then
- collectgarbage("collect")
+ else
+ glyph.segments=doshapes~=false and result or nil
+ glyph.boundingbox=boundingbox
+ if not glyph.width then
+ glyph.width=width
+ end
+ if charset and not glyph.name then
+ glyph.name=charset[index]
+ end
+ end
+ if trace_charstrings then
+ report("width: %s",tostring(width))
+ report("boundingbox: % t",boundingbox)
+ end
+ return charstring
+ end
+ resetcharstrings=function()
+ result={}
+ top=0
+ stack={}
+ end
+end
+local function readglobals(f,data)
+ local routines=readlengths(f)
+ for i=1,#routines do
+ routines[i]=readstring(f,routines[i])
+ end
+ data.routines=routines
+end
+local function readencodings(f,data)
+ data.encodings={}
+end
+local function readcharsets(f,data,dictionary)
+ local header=data.header
+ local strings=data.strings
+ local nofglyphs=data.nofglyphs
+ local charsetoffset=dictionary.charset
+ if charsetoffset~=0 then
+ setposition(f,header.offset+charsetoffset)
+ local format=readbyte(f)
+ local charset={ [0]=".notdef" }
+ dictionary.charset=charset
+ if format==0 then
+ for i=1,nofglyphs do
+ charset[i]=strings[readushort(f)]
+ end
+ elseif format==1 or format==2 then
+ local readcount=format==1 and readbyte or readushort
+ local i=1
+ while i<=nofglyphs do
+ local sid=readushort(f)
+ local n=readcount(f)
+ for s=sid,sid+n do
+ charset[i]=strings[s]
+ i=i+1
+ if i>nofglyphs then
+ break
+ end
end
- starttiming(packtime)
- enhance("pack",data,filename,nil)
- stoptiming(packtime)
end
- report_otf("saving %a in cache",filename)
- data=containers.write(otf.cache,hash,data)
- if cleanup>1 then
- collectgarbage("collect")
+ else
+ report("cff parser: unsupported charset format %a",format)
+ end
+ end
+end
+local function readprivates(f,data)
+ local header=data.header
+ local dictionaries=data.dictionaries
+ local private=dictionaries[1].private
+ if private then
+ setposition(f,header.offset+private.offset)
+ private.data=readstring(f,private.size)
+ end
+end
+local function readlocals(f,data,dictionary)
+ local header=data.header
+ local private=dictionary.private
+ if private then
+ local subroutineoffset=private.data.subroutines
+ if subroutineoffset~=0 then
+ setposition(f,header.offset+private.offset+subroutineoffset)
+ local subroutines=readlengths(f)
+ for i=1,#subroutines do
+ subroutines[i]=readstring(f,subroutines[i])
end
- stoptiming("fontloader")
- if elapsedtime then
- report_otf("loading, optimizing, packing and caching time %s, pack time %s",
- elapsedtime("fontloader"),packdata and elapsedtime(packtime) or 0)
+ dictionary.subroutines=subroutines
+ private.data.subroutines=nil
+ else
+ dictionary.subroutines={}
+ end
+ else
+ dictionary.subroutines={}
+ end
+end
+local function readcharstrings(f,data)
+ local header=data.header
+ local dictionaries=data.dictionaries
+ local dictionary=dictionaries[1]
+ local type=dictionary.charstringtype
+ local offset=dictionary.charstrings
+ if type==2 then
+ setposition(f,header.offset+offset)
+ local charstrings=readlengths(f)
+ local nofglyphs=#charstrings
+ for i=1,nofglyphs do
+ charstrings[i]=readstring(f,charstrings[i])
+ end
+ data.nofglyphs=nofglyphs
+ dictionary.charstrings=charstrings
+ else
+ report("unsupported charstr type %i",type)
+ data.nofglyphs=0
+ dictionary.charstrings={}
+ end
+end
+local function readcidprivates(f,data)
+ local header=data.header
+ local dictionaries=data.dictionaries[1].cid.dictionaries
+ for i=1,#dictionaries do
+ local dictionary=dictionaries[i]
+ local private=dictionary.private
+ if private then
+ setposition(f,header.offset+private.offset)
+ private.data=readstring(f,private.size)
+ end
+ end
+ parseprivates(data,dictionaries)
+end
+local function readnoselect(f,data,glyphs,doshapes)
+ local dictionaries=data.dictionaries
+ local dictionary=dictionaries[1]
+ readglobals(f,data)
+ readcharstrings(f,data)
+ readencodings(f,data)
+ readcharsets(f,data,dictionary)
+ readprivates(f,data)
+ parseprivates(data,data.dictionaries)
+ readlocals(f,data,dictionary)
+ parsecharstrings(data,glyphs,doshapes)
+ resetcharstrings()
+end
+local function readfdselect(f,data,glyphs,doshapes)
+ local header=data.header
+ local dictionaries=data.dictionaries
+ local dictionary=dictionaries[1]
+ local cid=dictionary.cid
+ local cidselect=cid and cid.fdselect
+ readglobals(f,data)
+ readcharstrings(f,data)
+ readencodings(f,data)
+ local charstrings=dictionary.charstrings
+ local fdindex={}
+ local nofglyphs=data.nofglyphs
+ local maxindex=-1
+ setposition(f,header.offset+cidselect)
+ local format=readbyte(f)
+ if format==1 then
+ for i=0,nofglyphs do
+ local index=readbyte(i)
+ fdindex[i]=index
+ if index>maxindex then
+ maxindex=index
+ end
+ end
+ elseif format==3 then
+ local nofranges=readushort(f)
+ local first=readushort(f)
+ local index=readbyte(f)
+ while true do
+ local last=readushort(f)
+ if index>maxindex then
+ maxindex=index
end
- close_font(fontdata)
- if cleanup>3 then
- collectgarbage("collect")
+ for i=first,last do
+ fdindex[i]=index
end
- data=containers.read(otf.cache,hash)
- if cleanup>2 then
- collectgarbage("collect")
+ if last>=nofglyphs then
+ break
+ else
+ first=last+1
+ index=readbyte(f)
end
- else
- stoptiming("fontloader")
- data=nil
- report_otf("loading failed due to read error")
end
+ else
end
- if data then
- if trace_defining then
- report_otf("loading from cache using hash %a",hash)
- end
- enhance("unpack",data,filename,nil,false)
- local resources=data.resources
- local lookuptags=resources.lookuptags
- local unicodes=resources.unicodes
- if not lookuptags then
- lookuptags={}
- resources.lookuptags=lookuptags
+ if maxindex>=0 then
+ local cidarray=cid.fdarray
+ setposition(f,header.offset+cidarray)
+ local dictionaries=readlengths(f)
+ for i=1,#dictionaries do
+ dictionaries[i]=readstring(f,dictionaries[i])
+ end
+ parsedictionaries(data,dictionaries)
+ cid.dictionaries=dictionaries
+ readcidprivates(f,data)
+ for i=1,#dictionaries do
+ readlocals(f,data,dictionaries[i])
+ end
+ for i=1,#charstrings do
+ parsecharstring(data,dictionaries[fdindex[i]+1],charstrings[i],glyphs,i,doshapes)
+ end
+ resetcharstrings()
+ end
+end
+function readers.cff(f,fontdata,specification)
+ if specification.details then
+ local datatable=fontdata.tables.cff
+ if datatable then
+ local offset=datatable.offset
+ local glyphs=fontdata.glyphs
+ if not f then
+ report("invalid filehandle")
+ return
+ end
+ if offset then
+ setposition(f,offset)
+ end
+ local header=readheader(f)
+ if header.major>1 then
+ report("version mismatch")
+ return
+ end
+ local names=readfontnames(f)
+ local dictionaries=readtopdictionaries(f)
+ local strings=readstrings(f)
+ local data={
+ header=header,
+ names=names,
+ dictionaries=dictionaries,
+ strings=strings,
+ nofglyphs=fontdata.nofglyphs,
+ }
+ parsedictionaries(data,data.dictionaries)
+ local d=dictionaries[1]
+ local c=d.cid
+ fontdata.cffinfo={
+ familynamename=d.familyname,
+ fullname=d.fullname,
+ boundingbox=d.boundingbox,
+ weight=d.weight,
+ italicangle=d.italicangle,
+ underlineposition=d.underlineposition,
+ underlinethickness=d.underlinethickness,
+ monospaced=d.monospaced,
+ }
+ fontdata.cidinfo=c and {
+ registry=c.registry,
+ ordering=c.ordering,
+ supplement=c.supplement,
+ }
+ if not specification.glyphs then
+ else
+ local cid=d.cid
+ if cid and cid.fdselect then
+ readfdselect(f,data,glyphs,specification.shapes or false)
+ else
+ readnoselect(f,data,glyphs,specification.shapes or false)
+ end
+ end
end
- setmetatableindex(lookuptags,function(t,k)
- local v=type(k)=="number" and ("lookup "..k) or k
- t[k]=v
- return v
- end)
- if not unicodes then
- unicodes={}
- resources.unicodes=unicodes
- setmetatableindex(unicodes,function(t,k)
- setmetatableindex(unicodes,nil)
- for u,d in next,data.descriptions do
- local n=d.name
- if n then
- t[n]=u
- else
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ttf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type,unpack=next,type,unpack
+local bittest=bit32.btest
+local sqrt=math.sqrt
+local report=logs.reporter("otf reader","ttf")
+local readers=fonts.handlers.otf.readers
+local streamreader=readers.streamreader
+local setposition=streamreader.setposition
+local getposition=streamreader.getposition
+local skipbytes=streamreader.skip
+local readbyte=streamreader.readcardinal1
+local readushort=streamreader.readcardinal2
+local readulong=streamreader.readcardinal4
+local readchar=streamreader.readinteger1
+local readshort=streamreader.readinteger2
+local read2dot14=streamreader.read2dot14
+local function mergecomposites(glyphs,shapes)
+ local function merge(index,shape,components)
+ local contours={}
+ local nofcontours=0
+ for i=1,#components do
+ local component=components[i]
+ local subindex=component.index
+ local subshape=shapes[subindex]
+ local subcontours=subshape.contours
+ if not subcontours then
+ local subcomponents=subshape.components
+ if subcomponents then
+ subcontours=merge(subindex,subshape,subcomponents)
+ end
+ end
+ if subcontours then
+ local matrix=component.matrix
+ local xscale=matrix[1]
+ local xrotate=matrix[2]
+ local yrotate=matrix[3]
+ local yscale=matrix[4]
+ local xoffset=matrix[5]
+ local yoffset=matrix[6]
+ for i=1,#subcontours do
+ local points=subcontours[i]
+ local result={}
+ for i=1,#points do
+ local p=points[i]
+ local x=p[1]
+ local y=p[2]
+ result[i]={
+ xscale*x+xrotate*y+xoffset,
+ yscale*y+yrotate*x+yoffset,
+ p[3]
+ }
end
+ nofcontours=nofcontours+1
+ contours[nofcontours]=result
end
- return rawget(t,k)
- end)
+ else
+ report("missing contours composite %s, component %s of %s, glyph %s",index,i,#components,subindex)
+ end
end
- constructors.addcoreunicodes(unicodes)
- if applyruntimefixes then
- applyruntimefixes(filename,data)
+ shape.contours=contours
+ shape.components=nil
+ return contours
+ end
+ for index=1,#glyphs do
+ local shape=shapes[index]
+ local components=shape.components
+ if components then
+ merge(index,shape,components)
end
- enhance("add dimensions",data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
+ end
+end
+local function readnothing(f,nofcontours)
+ return {
+ type="nothing",
+ }
+end
+local function curveto(m_x,m_y,l_x,l_y,r_x,r_y)
+ return {
+ l_x+2/3*(m_x-l_x),l_y+2/3*(m_y-l_y),
+ r_x+2/3*(m_x-r_x),r_y+2/3*(m_y-r_y),
+ r_x,r_y,"c"
+ }
+end
+local function contours2outlines(glyphs,shapes)
+ local quadratic=true
+ for index=1,#glyphs do
+ local glyph=glyphs[index]
+ local shape=shapes[index]
+ local contours=shape.contours
+ if contours then
+ local nofcontours=#contours
+ local segments={}
+ local nofsegments=0
+ glyph.segments=segments
+ if nofcontours>0 then
+ for i=1,nofcontours do
+ local contour=contours[i]
+ local nofcontour=#contour
+ if nofcontour>0 then
+ local first_pt=contour[1]
+ local first_on=first_pt[3]
+ if nofcontour==1 then
+ first_pt[3]="m"
+ nofsegments=nofsegments+1
+ segments[nofsegments]=first_pt
+ else
+ local first_on=first_pt[3]
+ local last_pt=contour[nofcontour]
+ local last_on=last_pt[3]
+ local start=1
+ local control_pt=false
+ if first_on then
+ start=2
+ else
+ if last_on then
+ first_pt=last_pt
+ else
+ first_pt={ (first_pt[1]+last_pt[1])/2,(first_pt[2]+last_pt[2])/2,false }
+ end
+ control_pt=first_pt
+ end
+ nofsegments=nofsegments+1
+ segments[nofsegments]={ first_pt[1],first_pt[2],"m" }
+ local previous_pt=first_pt
+ for i=start,nofcontour do
+ local current_pt=contour[i]
+ local current_on=current_pt[3]
+ local previous_on=previous_pt[3]
+ if previous_on then
+ if current_on then
+ nofsegments=nofsegments+1
+ segments[nofsegments]={ current_pt[1],current_pt[2],"l" }
+ else
+ control_pt=current_pt
+ end
+ elseif current_on then
+ local ps=segments[nofsegments]
+ nofsegments=nofsegments+1
+ if quadratic then
+ segments[nofsegments]={ control_pt[1],control_pt[2],current_pt[1],current_pt[2],"q" }
+ else
+ local p=segments[nofsegments-1] local n=#p
+ segments[nofsegments]=curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],current_pt[1],current_pt[2])
+ end
+ control_pt=false
+ else
+ nofsegments=nofsegments+1
+ local halfway_x=(previous_pt[1]+current_pt[1])/2
+ local halfway_y=(previous_pt[2]+current_pt[2])/2
+ if quadratic then
+ segments[nofsegments]={ control_pt[1],control_pt[2],halfway_x,halfway_y,"q" }
+ else
+ local p=segments[nofsegments-1] local n=#p
+ segments[nofsegments]=curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],halfway_x,halfway_y)
+ end
+ control_pt=current_pt
+ end
+ previous_pt=current_pt
+ end
+ if first_pt==last_pt then
+ else
+ nofsegments=nofsegments+1
+ if not control_pt then
+ segments[nofsegments]={ first_pt[1],first_pt[2],"l" }
+ elseif quadratic then
+ segments[nofsegments]={ control_pt[1],control_pt[2],first_pt[1],first_pt[2],"q" }
+ else
+ local p=last_pt local n=#p
+ segments[nofsegments]=curveto(control_pt[1],control_pt[2],p[n-2],p[n-1],first_pt[1],first_pt[2])
+ end
+ end
+ end
+ end
+ end
+ end
end
end
- return data
end
-local mt={
- __index=function(t,k)
- if k=="height" then
- local ht=t.boundingbox[4]
- return ht<0 and 0 or ht
- elseif k=="depth" then
- local dp=-t.boundingbox[2]
- return dp<0 and 0 or dp
- elseif k=="width" then
- return 0
- elseif k=="name" then
- return forcenotdef and ".notdef"
+local function readglyph(f,nofcontours)
+ local points={}
+ local endpoints={}
+ local instructions={}
+ local flags={}
+ for i=1,nofcontours do
+ endpoints[i]=readshort(f)+1
+ end
+ local nofpoints=endpoints[nofcontours]
+ local nofinstructions=readushort(f)
+ skipbytes(f,nofinstructions)
+ local i=1
+ while i<=nofpoints do
+ local flag=readbyte(f)
+ flags[i]=flag
+ if bittest(flag,0x0008) then
+ for j=1,readbyte(f) do
+ i=i+1
+ flags[i]=flag
+ end
end
+ i=i+1
end
-}
-actions["prepare tables"]=function(data,filename,raw)
- data.properties.hasitalics=false
+ local x=0
+ for i=1,nofpoints do
+ local flag=flags[i]
+ local short=bittest(flag,0x0002)
+ local same=bittest(flag,0x0010)
+ if short then
+ if same then
+ x=x+readbyte(f)
+ else
+ x=x-readbyte(f)
+ end
+ elseif same then
+ else
+ x=x+readshort(f)
+ end
+ points[i]={ x,y,bittest(flag,0x0001) }
+ end
+ local y=0
+ for i=1,nofpoints do
+ local flag=flags[i]
+ local short=bittest(flag,0x0004)
+ local same=bittest(flag,0x0020)
+ if short then
+ if same then
+ y=y+readbyte(f)
+ else
+ y=y-readbyte(f)
+ end
+ elseif same then
+ else
+ y=y+readshort(f)
+ end
+ points[i][2]=y
+ end
+ local first=1
+ for i=1,#endpoints do
+ local last=endpoints[i]
+ endpoints[i]={ unpack(points,first,last) }
+ first=last+1
+ end
+ return {
+ type="glyph",
+ contours=endpoints,
+ }
end
-actions["add dimensions"]=function(data,filename)
- if data then
- local descriptions=data.descriptions
- local resources=data.resources
- local defaultwidth=resources.defaultwidth or 0
- local defaultheight=resources.defaultheight or 0
- local defaultdepth=resources.defaultdepth or 0
- local basename=trace_markwidth and file.basename(filename)
- for _,d in next,descriptions do
- local bb,wd=d.boundingbox,d.width
- if not wd then
- d.width=defaultwidth
- elseif trace_markwidth and wd~=0 and d.class=="mark" then
- report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+local function readcomposite(f)
+ local components={}
+ local nofcomponents=0
+ local instructions=false
+ while true do
+ local flags=readushort(f)
+ local index=readushort(f)
+ local f_xyarg=bittest(flags,0x0002)
+ local f_offset=bittest(flags,0x0800)
+ local xscale=1
+ local xrotate=0
+ local yrotate=0
+ local yscale=1
+ local xoffset=0
+ local yoffset=0
+ local base=false
+ local reference=false
+ if f_xyarg then
+ if bittest(flags,0x0001) then
+ xoffset=readshort(f)
+ yoffset=readshort(f)
+ else
+ xoffset=readchar(f)
+ yoffset=readchar(f)
end
- if bb then
- local ht=bb[4]
- local dp=-bb[2]
- if ht==0 or ht<0 then
- else
- d.height=ht
- end
- if dp==0 or dp<0 then
+ else
+ if bittest(flags,0x0001) then
+ base=readshort(f)
+ reference=readshort(f)
+ else
+ base=readchar(f)
+ reference=readchar(f)
+ end
+ end
+ if bittest(flags,0x0008) then
+ xscale=read2dot14(f)
+ yscale=xscale
+ if f_xyarg and f_offset then
+ xoffset=xoffset*xscale
+ yoffset=yoffset*yscale
+ end
+ elseif bittest(flags,0x0040) then
+ xscale=read2dot14(f)
+ yscale=read2dot14(f)
+ if f_xyarg and f_offset then
+ xoffset=xoffset*xscale
+ yoffset=yoffset*yscale
+ end
+ elseif bittest(flags,0x0080) then
+ xscale=read2dot14(f)
+ xrotate=read2dot14(f)
+ yrotate=read2dot14(f)
+ yscale=read2dot14(f)
+ if f_xyarg and f_offset then
+ xoffset=xoffset*sqrt(xscale^2+xrotate^2)
+ yoffset=yoffset*sqrt(yrotate^2+yscale^2)
+ end
+ end
+ nofcomponents=nofcomponents+1
+ components[nofcomponents]={
+ index=index,
+ usemine=bittest(flags,0x0200),
+ round=bittest(flags,0x0006),
+ base=base,
+ reference=reference,
+ matrix={ xscale,xrotate,yrotate,yscale,xoffset,yoffset },
+ }
+ if bittest(flags,0x0100) then
+ instructions=true
+ end
+ if not bittest(flags,0x0020) then
+ break
+ end
+ end
+ return {
+ type="composite",
+ components=components,
+ }
+end
+function readers.loca(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.loca
+ if datatable then
+ local offset=fontdata.tables.glyf.offset
+ local format=fontdata.fontheader.indextolocformat
+ local locations={}
+ setposition(f,datatable.offset)
+ if format==1 then
+ local nofglyphs=datatable.length/4-1
+ -1
+ for i=0,nofglyphs do
+ locations[i]=offset+readulong(f)
+ end
+ fontdata.nofglyphs=nofglyphs
+ else
+ local nofglyphs=datatable.length/2-1
+ -1
+ for i=0,nofglyphs do
+ locations[i]=offset+readushort(f)*2
+ end
+ fontdata.nofglyphs=nofglyphs
+ end
+ fontdata.locations=locations
+ end
+ end
+end
+function readers.glyf(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.glyf
+ if datatable then
+ local locations=fontdata.locations
+ if locations then
+ local glyphs=fontdata.glyphs
+ local nofglyphs=fontdata.nofglyphs
+ local filesize=fontdata.filesize
+ local nothing={ 0,0,0,0 }
+ local shapes={}
+ local loadshapes=specification.shapes
+ for index=0,nofglyphs do
+ local location=locations[index]
+ if location>=filesize then
+ report("discarding %s glyphs due to glyph location bug",nofglyphs-index+1)
+ fontdata.nofglyphs=index-1
+ fontdata.badfont=true
+ break
+ elseif location>0 then
+ setposition(f,location)
+ local nofcontours=readshort(f)
+ glyphs[index].boundingbox={
+ readshort(f),
+ readshort(f),
+ readshort(f),
+ readshort(f),
+ }
+ if not loadshapes then
+ elseif nofcontours==0 then
+ shapes[index]=readnothing(f,nofcontours)
+ elseif nofcontours>0 then
+ shapes[index]=readglyph(f,nofcontours)
+ else
+ shapes[index]=readcomposite(f,nofcontours)
+ end
else
- d.depth=dp
+ if loadshapes then
+ shapes[index]={}
+ end
+ glyphs[index].boundingbox=nothing
end
+ end
+ if loadshapes then
+ mergecomposites(glyphs,shapes)
+ contours2outlines(glyphs,shapes)
+ end
end
end
end
end
-local function somecopy(old)
- if old then
- local new={}
- if type(old)=="table" then
- for k,v in next,old do
- if k=="glyphs" then
- elseif type(v)=="table" then
- new[k]=somecopy(v)
- else
- new[k]=v
- end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-dsp']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type=next,type
+local bittest=bit32.btest
+local rshift=bit32.rshift
+local concat=table.concat
+local lower=string.lower
+local sub=string.sub
+local strip=string.strip
+local tohash=table.tohash
+local reversed=table.reversed
+local setmetatableindex=table.setmetatableindex
+local formatters=string.formatters
+local sortedkeys=table.sortedkeys
+local sortedhash=table.sortedhash
+local report=logs.reporter("otf reader")
+local readers=fonts.handlers.otf.readers
+local streamreader=readers.streamreader
+local setposition=streamreader.setposition
+local skipbytes=streamreader.skip
+local skipshort=streamreader.skipshort
+local readushort=streamreader.readcardinal2
+local readulong=streamreader.readcardinal4
+local readshort=streamreader.readinteger2
+local readfword=readshort
+local readstring=streamreader.readstring
+local readtag=streamreader.readtag
+local gsubhandlers={}
+local gposhandlers={}
+local lookupidoffset=-1
+local classes={
+ "base",
+ "ligature",
+ "mark",
+ "component",
+}
+local gsubtypes={
+ "single",
+ "multiple",
+ "alternate",
+ "ligature",
+ "context",
+ "chainedcontext",
+ "extension",
+ "reversechainedcontextsingle",
+}
+local gpostypes={
+ "single",
+ "pair",
+ "cursive",
+ "marktobase",
+ "marktoligature",
+ "marktomark",
+ "context",
+ "chainedcontext",
+ "extension",
+}
+local chaindirections={
+ context=0,
+ chainedcontext=1,
+ reversechainedcontextsingle=-1,
+}
+local lookupnames={
+ gsub={
+ single="gsub_single",
+ multiple="gsub_multiple",
+ alternate="gsub_alternate",
+ ligature="gsub_ligature",
+ context="gsub_context",
+ chainedcontext="gsub_contextchain",
+ reversechainedcontextsingle="gsub_reversecontextchain",
+ },
+ gpos={
+ single="gpos_single",
+ pair="gpos_pair",
+ cursive="gpos_cursive",
+ marktobase="gpos_mark2base",
+ marktoligature="gpos_mark2ligature",
+ marktomark="gpos_mark2mark",
+ context="gpos_context",
+ chainedcontext="gpos_contextchain",
+ }
+}
+local lookupflags=setmetatableindex(function(t,k)
+ local v={
+ bittest(k,0x0008) and true or false,
+ bittest(k,0x0004) and true or false,
+ bittest(k,0x0002) and true or false,
+ bittest(k,0x0001) and true or false,
+ }
+ t[k]=v
+ return v
+end)
+local function readcoverage(f,offset,simple)
+ setposition(f,offset)
+ local coverageformat=readushort(f)
+ local coverage={}
+ if coverageformat==1 then
+ local nofcoverage=readushort(f)
+ if simple then
+ for i=1,nofcoverage do
+ coverage[i]=readushort(f)
end
else
- for i=1,#mainfields do
- local k=mainfields[i]
- local v=old[k]
- if k=="glyphs" then
- elseif type(v)=="table" then
- new[k]=somecopy(v)
- else
- new[k]=v
+ for i=0,nofcoverage-1 do
+ coverage[readushort(f)]=i
+ end
+ end
+ elseif coverageformat==2 then
+ local nofranges=readushort(f)
+ local n=simple and 1 or 0
+ for i=1,nofranges do
+ local firstindex=readushort(f)
+ local lastindex=readushort(f)
+ local coverindex=readushort(f)
+ if simple then
+ for i=firstindex,lastindex do
+ coverage[n]=i
+ n=n+1
+ end
+ else
+ for i=firstindex,lastindex do
+ coverage[i]=n
+ n=n+1
end
end
end
- return new
else
- return {}
+ report("unknown coverage format %a ",coverageformat)
+ end
+ return coverage
+end
+local function readclassdef(f,offset)
+ setposition(f,offset)
+ local classdefformat=readushort(f)
+ local classdef={}
+ if classdefformat==1 then
+ local index=readushort(f)
+ local nofclassdef=readushort(f)
+ for i=1,nofclassdef do
+ classdef[index]=readushort(f)+1
+ index=index+1
+ end
+ elseif classdefformat==2 then
+ local nofranges=readushort(f)
+ local n=0
+ for i=1,nofranges do
+ local firstindex=readushort(f)
+ local lastindex=readushort(f)
+ local class=readushort(f)+1
+ for i=firstindex,lastindex do
+ classdef[i]=class
+ end
+ end
+ else
+ report("unknown classdef format %a ",classdefformat)
end
+ return classdef
end
-actions["prepare glyphs"]=function(data,filename,raw)
- local rawglyphs=raw.glyphs
- local rawsubfonts=raw.subfonts
- local rawcidinfo=raw.cidinfo
- local criterium=constructors.privateoffset
- local private=criterium
- local resources=data.resources
- local metadata=data.metadata
- local properties=data.properties
- local descriptions=data.descriptions
- local unicodes=resources.unicodes
- local indices=resources.indices
- local duplicates=resources.duplicates
- local variants=resources.variants
- local notdefindex=-1
- if rawsubfonts then
- metadata.subfonts=includesubfonts and {}
- properties.cidinfo=rawcidinfo
- if rawcidinfo.registry then
- local cidmap=fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname=cidmap.usedname
- local nofnames=0
- local nofunicodes=0
- local cidunicodes=cidmap.unicodes
- local cidnames=cidmap.names
- local cidtotal=0
- local unique=trace_subfonts and {}
- for cidindex=1,#rawsubfonts do
- local subfont=rawsubfonts[cidindex]
- local cidglyphs=subfont.glyphs
- if includesubfonts then
- metadata.subfonts[cidindex]=somecopy(subfont)
- end
- local cidcnt=subfont.glyphcnt
- local cidmin=subfont.glyphmin
- local cidmax=subfont.glyphmax
- local notdef=(tonumber(raw.table_version) or 0)>0.4 and subfont.notdef_loc or -1
- if notdeffound==-1 and notdef>=0 then
- notdeffound=notdef
- end
- if trace_subfonts then
- local cidtot=cidmax-cidmin+1
- cidtotal=cidtotal+cidtot
- report_otf("subfont: %i, min: %i, max: %i, cnt: %i, n: %i",cidindex,cidmin,cidmax,cidtot,cidcnt)
- end
- if cidcnt>0 then
- for index=cidmin,cidmax do
- local glyph=cidglyphs[index]
- if glyph then
- if trace_subfonts then
- unique[index]=true
- end
- local unicode=glyph.unicode
- if unicode>=0x00E000 and unicode<=0x00F8FF then
- unicode=-1
- elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
- unicode=-1
- elseif unicode>=0x100000 and unicode<=0x10FFFD then
- unicode=-1
- end
- local name=glyph.name or cidnames[index]
- if not unicode or unicode==-1 then
- unicode=cidunicodes[index]
- end
- if unicode and descriptions[unicode] then
- if trace_private then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
- end
- unicode=-1
+local function classtocoverage(defs)
+ if defs then
+ local list={}
+ for index,class in next,defs do
+ local c=list[class]
+ if c then
+ c[#c+1]=index
+ else
+ list[class]={ index }
+ end
+ end
+ return list
+ end
+end
+local function readposition(f,format)
+ if format==0 then
+ return nil
+ end
+ local x=bittest(format,0x0001) and readshort(f) or 0
+ local y=bittest(format,0x0002) and readshort(f) or 0
+ local h=bittest(format,0x0004) and readshort(f) or 0
+ local v=bittest(format,0x0008) and readshort(f) or 0
+ if x==0 and y==0 and h==0 and v==0 then
+ return nil
+ else
+ return { x,y,h,v }
+ end
+end
+local function readanchor(f,offset)
+ if not offset or offset==0 then
+ return nil
+ end
+ setposition(f,offset)
+ local format=readshort(f)
+ if format==0 then
+ report("invalid anchor format %i @ position %i",format,offset)
+ return false
+ elseif format>3 then
+ report("unsupported anchor format %i @ position %i",format,offset)
+ return false
+ end
+ return { readshort(f),readshort(f) }
+end
+local function readfirst(f,offset)
+ if offset then
+ setposition(f,offset)
+ end
+ return { readushort(f) }
+end
+local function readarray(f,offset,first)
+ if offset then
+ setposition(f,offset)
+ end
+ local n=readushort(f)
+ if first then
+ local t={ first }
+ for i=2,n do
+ t[i]=readushort(f)
+ end
+ return t,n
+ elseif n>0 then
+ local t={}
+ for i=1,n do
+ t[i]=readushort(f)
+ end
+ return t,n
+ end
+end
+local function readcoveragearray(f,offset,t,simple)
+ if not t then
+ return nil
+ end
+ local n=#t
+ if n==0 then
+ return nil
+ end
+ for i=1,n do
+ t[i]=readcoverage(f,offset+t[i],simple)
+ end
+ return t
+end
+local function covered(subset,all)
+ local used,u
+ for i=1,#subset do
+ local s=subset[i]
+ if all[s] then
+ if used then
+ u=u+1
+ used[u]=s
+ else
+ u=1
+ used={ s }
+ end
+ end
+ end
+ return used
+end
+local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local subclasssets=readarray(f)
+ local rules={}
+ if subclasssets then
+ coverage=readcoverage(f,tableoffset+coverage,true)
+ for i=1,#subclasssets do
+ local offset=subclasssets[i]
+ if offset>0 then
+ local firstcoverage=coverage[i]
+ local rulesoffset=tableoffset+offset
+ local subclassrules=readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset+subclassrules[rule])
+ local nofcurrent=readushort(f)
+ local noflookups=readushort(f)
+ local current={ { firstcoverage } }
+ for i=2,nofcurrent do
+ current[i]={ readushort(f) }
+ end
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
+ end
+ rules[#rules+1]={
+ current=current,
+ lookups=lookups
+ }
+ end
+ end
+ end
+ else
+ report("empty subclassset in %a subtype %i","unchainedcontext",subtype)
+ end
+ return {
+ format="glyphs",
+ rules=rules,
+ }
+ elseif subtype==2 then
+ local coverage=readushort(f)
+ local currentclassdef=readushort(f)
+ local subclasssets=readarray(f)
+ local rules={}
+ if subclasssets then
+ coverage=readcoverage(f,tableoffset+coverage)
+ currentclassdef=readclassdef(f,tableoffset+currentclassdef)
+ local currentclasses=classtocoverage(currentclassdef,fontdata.glyphs)
+ for class=1,#subclasssets do
+ local offset=subclasssets[class]
+ if offset>0 then
+ local firstcoverage=currentclasses[class]
+ if firstcoverage then
+ firstcoverage=covered(firstcoverage,coverage)
+ if firstcoverage then
+ local rulesoffset=tableoffset+offset
+ local subclassrules=readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset+subclassrules[rule])
+ local nofcurrent=readushort(f)
+ local noflookups=readushort(f)
+ local current={ firstcoverage }
+ for i=2,nofcurrent do
+ current[i]=currentclasses[readushort(f)+1]
end
- if not unicode or unicode==-1 then
- if not name then
- name=formatters["u%06X.ctx"](private)
- end
- unicode=private
- unicodes[name]=private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private=private+1
- nofnames=nofnames+1
- else
- if not name then
- name=formatters["u%06X.ctx"](unicode)
- end
- unicodes[name]=unicode
- nofunicodes=nofunicodes+1
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
end
- indices[index]=unicode
- local description={
- boundingbox=glyph.boundingbox,
- name=name or "unknown",
- index=index,
- glyph=glyph,
+ rules[#rules+1]={
+ current=current,
+ lookups=lookups
}
- descriptions[unicode]=description
- local altuni=glyph.altuni
- if altuni then
- for i=1,#altuni do
- local a=altuni[i]
- local u=a.unicode
- if u~=unicode then
- local v=a.variant
- if v then
- local vv=variants[v]
- if vv then
- vv[u]=unicode
- else
- vv={ [u]=unicode }
- variants[v]=vv
- end
- end
- end
- end
- end
end
+ else
+ report("no coverage")
end
else
- report_otf("potential problem: no glyphs found in subfont %i",cidindex)
+ report("no coverage class")
end
end
- if trace_subfonts then
- report_otf("nofglyphs: %i, unique: %i",cidtotal,table.count(unique))
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %a",filename)
end
- elseif trace_loading then
- report_otf("font %a has no glyphs",filename)
+ else
+ report("empty subclassset in %a subtype %i","unchainedcontext",subtype)
end
+ return {
+ format="class",
+ rules=rules,
+ }
+ elseif subtype==3 then
+ local current=readarray(f)
+ local noflookups=readushort(f)
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
+ end
+ current=readcoveragearray(f,tableoffset,current,true)
+ return {
+ format="coverage",
+ rules={
+ {
+ current=current,
+ lookups=lookups,
+ }
+ }
+ }
else
- local cnt=raw.glyphcnt or 0
- local min=raw.glyphmin or 0
- local max=raw.glyphmax or (raw.glyphcnt-1)
- notdeffound=(tonumber(raw.table_version) or 0)>0.4 and raw.notdef_loc or -1
- if cnt>0 then
- for index=min,max do
- local glyph=rawglyphs[index]
- if glyph then
- local unicode=glyph.unicode
- local name=glyph.name
- if not unicode or unicode==-1 then
- unicode=private
- unicodes[name]=private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ report("unsupported subtype %a in %a %s",subtype,"unchainedcontext",what)
+ end
+end
+local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local subclasssets=readarray(f)
+ local rules={}
+ if subclasssets then
+ coverage=readcoverage(f,tableoffset+coverage,true)
+ for i=1,#subclasssets do
+ local offset=subclasssets[i]
+ if offset>0 then
+ local firstcoverage=coverage[i]
+ local rulesoffset=tableoffset+offset
+ local subclassrules=readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset+subclassrules[rule])
+ local nofbefore=readushort(f)
+ local before
+ if nofbefore>0 then
+ before={}
+ for i=1,nofbefore do
+ before[i]={ readushort(f) }
+ end
end
- private=private+1
- else
- if unicode>criterium then
- local taken=descriptions[unicode]
- if taken then
- if unicode>=private then
- private=unicode+1
- else
- private=private+1
- end
- descriptions[private]=taken
- unicodes[taken.name]=private
- indices[taken.index]=private
- if trace_private then
- report_otf("slot %U is moved to %U due to private in font",unicode)
- end
- else
- if unicode>=private then
- private=unicode+1
- end
+ local nofcurrent=readushort(f)
+ local current={ { firstcoverage } }
+ for i=2,nofcurrent do
+ current[i]={ readushort(f) }
+ end
+ local nofafter=readushort(f)
+ local after
+ if nofafter>0 then
+ after={}
+ for i=1,nofafter do
+ after[i]={ readushort(f) }
end
end
- unicodes[name]=unicode
- end
- indices[index]=unicode
- descriptions[unicode]={
- boundingbox=glyph.boundingbox,
- name=name,
- index=index,
- glyph=glyph,
- }
- local altuni=glyph.altuni
- if altuni then
- for i=1,#altuni do
- local a=altuni[i]
- local u=a.unicode
- if u~=unicode then
- local v=a.variant
- if v then
- local vv=variants[v]
- if vv then
- vv[u]=unicode
- else
- vv={ [u]=unicode }
- variants[v]=vv
+ local noflookups=readushort(f)
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
+ end
+ rules[#rules+1]={
+ before=before,
+ current=current,
+ after=after,
+ lookups=lookups,
+ }
+ end
+ end
+ end
+ else
+ report("empty subclassset in %a subtype %i","chainedcontext",subtype)
+ end
+ return {
+ format="glyphs",
+ rules=rules,
+ }
+ elseif subtype==2 then
+ local coverage=readushort(f)
+ local beforeclassdef=readushort(f)
+ local currentclassdef=readushort(f)
+ local afterclassdef=readushort(f)
+ local subclasssets=readarray(f)
+ local rules={}
+ if subclasssets then
+ local coverage=readcoverage(f,tableoffset+coverage)
+ local beforeclassdef=readclassdef(f,tableoffset+beforeclassdef)
+ local currentclassdef=readclassdef(f,tableoffset+currentclassdef)
+ local afterclassdef=readclassdef(f,tableoffset+afterclassdef)
+ local beforeclasses=classtocoverage(beforeclassdef,fontdata.glyphs)
+ local currentclasses=classtocoverage(currentclassdef,fontdata.glyphs)
+ local afterclasses=classtocoverage(afterclassdef,fontdata.glyphs)
+ for class=1,#subclasssets do
+ local offset=subclasssets[class]
+ if offset>0 then
+ local firstcoverage=currentclasses[class]
+ if firstcoverage then
+ firstcoverage=covered(firstcoverage,coverage)
+ if firstcoverage then
+ local rulesoffset=tableoffset+offset
+ local subclassrules=readarray(f,rulesoffset)
+ for rule=1,#subclassrules do
+ setposition(f,rulesoffset+subclassrules[rule])
+ local nofbefore=readushort(f)
+ local before
+ if nofbefore>0 then
+ before={}
+ for i=1,nofbefore do
+ before[i]=beforeclasses[readushort(f)+1]
+ end
+ end
+ local nofcurrent=readushort(f)
+ local current={ firstcoverage }
+ for i=2,nofcurrent do
+ current[i]=currentclasses[readushort(f)+1]
+ end
+ local nofafter=readushort(f)
+ local after
+ if nofafter>0 then
+ after={}
+ for i=1,nofafter do
+ after[i]=afterclasses[readushort(f)+1]
end
end
+ local noflookups=readushort(f)
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
+ end
+ rules[#rules+1]={
+ before=before,
+ current=current,
+ after=after,
+ lookups=lookups,
+ }
end
+ else
+ report("no coverage")
end
+ else
+ report("class is not covered")
end
- else
- report_otf("potential problem: glyph %U is used but empty",index)
end
end
else
- report_otf("potential problem: no glyphs found")
+ report("empty subclassset in %a subtype %i","chainedcontext",subtype)
+ end
+ return {
+ format="class",
+ rules=rules,
+ }
+ elseif subtype==3 then
+ local before=readarray(f)
+ local current=readarray(f)
+ local after=readarray(f)
+ local noflookups=readushort(f)
+ local lookups={}
+ for i=1,noflookups do
+ lookups[readushort(f)+1]=readushort(f)+1
+ end
+ before=readcoveragearray(f,tableoffset,before,true)
+ current=readcoveragearray(f,tableoffset,current,true)
+ after=readcoveragearray(f,tableoffset,after,true)
+ return {
+ format="coverage",
+ rules={
+ {
+ before=before,
+ current=current,
+ after=after,
+ lookups=lookups,
+ }
+ }
+ }
+ else
+ report("unsupported subtype %a in %a %s",subtype,"chainedcontext",what)
+ end
+end
+local function extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,types,handlers,what)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local lookuptype=types[readushort(f)]
+ local faroffset=readulong(f)
+ local handler=handlers[lookuptype]
+ if handler then
+ return handler(f,fontdata,lookupid,tableoffset+faroffset,0,glyphs,nofglyphs),lookuptype
+ else
+ report("no handler for lookuptype %a subtype %a in %s %s",lookuptype,subtype,what,"extension")
end
+ else
+ report("unsupported subtype %a in %s %s",subtype,what,"extension")
+ end
+end
+function gsubhandlers.single(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local delta=readshort(f)
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index in next,coverage do
+ local newindex=index+delta
+ if index>nofglyphs or newindex>nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)","single",subtype,index,newindex,nofglyphs)
+ coverage[index]=nil
+ else
+ coverage[index]=newindex
+ end
+ end
+ return {
+ coverage=coverage
+ }
+ elseif subtype==2 then
+ local coverage=readushort(f)
+ local nofreplacements=readushort(f)
+ local replacements={}
+ for i=1,nofreplacements do
+ replacements[i]=readushort(f)
+ end
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ newindex=newindex+1
+ if index>nofglyphs or newindex>nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)","single",subtype,index,newindex,nofglyphs)
+ coverage[index]=nil
+ else
+ coverage[index]=replacements[newindex]
+ end
+ end
+ return {
+ coverage=coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"single")
+ end
+end
+local function sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local nofsequence=readushort(f)
+ local sequences={}
+ for i=1,nofsequence do
+ sequences[i]=readushort(f)
+ end
+ for i=1,nofsequence do
+ setposition(f,tableoffset+sequences[i])
+ local n=readushort(f)
+ local s={}
+ for i=1,n do
+ s[i]=readushort(f)
+ end
+ sequences[i]=s
+ end
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ newindex=newindex+1
+ if index>nofglyphs or newindex>nofglyphs then
+ report("invalid index in %s format %i: %i -> %i (max %i)",what,subtype,index,newindex,nofglyphs)
+ coverage[index]=nil
+ else
+ coverage[index]=sequences[newindex]
+ end
+ end
+ return {
+ coverage=coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,what)
+ end
+end
+function gsubhandlers.multiple(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"multiple")
+end
+function gsubhandlers.alternate(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return sethandler(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"alternate")
+end
+function gsubhandlers.ligature(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local nofsets=readushort(f)
+ local ligatures={}
+ for i=1,nofsets do
+ ligatures[i]=readushort(f)
+ end
+ for i=1,nofsets do
+ local offset=lookupoffset+offset+ligatures[i]
+ setposition(f,offset)
+ local n=readushort(f)
+ local l={}
+ for i=1,n do
+ l[i]=offset+readushort(f)
+ end
+ ligatures[i]=l
+ end
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ local hash={}
+ local ligatures=ligatures[newindex+1]
+ for i=1,#ligatures do
+ local offset=ligatures[i]
+ setposition(f,offset)
+ local lig=readushort(f)
+ local cnt=readushort(f)
+ local hsh=hash
+ for i=2,cnt do
+ local c=readushort(f)
+ local h=hsh[c]
+ if not h then
+ h={}
+ hsh[c]=h
+ end
+ hsh=h
+ end
+ hsh.ligature=lig
+ end
+ coverage[index]=hash
+ end
+ return {
+ coverage=coverage
+ }
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"ligature")
+ end
+end
+function gsubhandlers.context(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"substitution"),"context"
+end
+function gsubhandlers.chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"substitution"),"chainedcontext"
+end
+function gsubhandlers.extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,gsubtypes,gsubhandlers,"substitution")
+end
+function gsubhandlers.reversechainedcontextsingle(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local current=readfirst(f)
+ local before=readarray(f)
+ local after=readarray(f)
+ local replacements=readarray(f)
+ current=readcoveragearray(f,tableoffset,current,true)
+ before=readcoveragearray(f,tableoffset,before,true)
+ after=readcoveragearray(f,tableoffset,after,true)
+ return {
+ coverage={
+ format="reversecoverage",
+ before=before,
+ current=current,
+ after=after,
+ replacements=replacements,
+ }
+ },"reversechainedcontextsingle"
+ else
+ report("unsupported subtype %a in %a substitution",subtype,"reversechainedcontextsingle")
+ end
+end
+local function readpairsets(f,tableoffset,sets,format1,format2)
+ local done={}
+ for i=1,#sets do
+ local offset=sets[i]
+ local reused=done[offset]
+ if not reused then
+ setposition(f,tableoffset+offset)
+ local n=readushort(f)
+ reused={}
+ for i=1,n do
+ reused[i]={
+ readushort(f),
+ readposition(f,format1),
+ readposition(f,format2)
+ }
+ end
+ done[offset]=reused
+ end
+ sets[i]=reused
end
- if notdeffound==-1 then
- report_otf("warning: no .notdef found in %a",filename)
- elseif notdeffound~=0 then
- report_otf("warning: .notdef found at position %a in %a",notdeffound,filename)
+ return sets
+end
+local function readpairclasssets(f,nofclasses1,nofclasses2,format1,format2)
+ local classlist1={}
+ for i=1,nofclasses1 do
+ local classlist2={}
+ classlist1[i]=classlist2
+ for j=1,nofclasses2 do
+ local one=readposition(f,format1)
+ local two=readposition(f,format2)
+ if one or two then
+ classlist2[j]={ one,two }
+ else
+ classlist2[j]=false
+ end
+ end
end
- metadata.notdef=notdeffound
- resources.private=private
+ return classlist1
end
-actions["check encoding"]=function(data,filename,raw)
- local descriptions=data.descriptions
- local resources=data.resources
- local properties=data.properties
- local unicodes=resources.unicodes
- local indices=resources.indices
- local duplicates=resources.duplicates
- local mapdata=raw.map or {}
- local unicodetoindex=mapdata and mapdata.map or {}
- local indextounicode=mapdata and mapdata.backmap or {}
- local encname=lower(data.enc_name or mapdata.enc_name or "")
- local criterium=0xFFFF
- local privateoffset=constructors.privateoffset
- if find(encname,"unicode") then
- if trace_loading then
- report_otf("checking embedded unicode map %a",encname)
+function gposhandlers.single(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local format=readushort(f)
+ local value=readposition(f,format)
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ coverage[index]=value
end
- local reported={}
- for maybeunicode,index in next,unicodetoindex do
- if descriptions[maybeunicode] then
+ return {
+ format="pair",
+ coverage=coverage
+ }
+ elseif subtype==2 then
+ local coverage=readushort(f)
+ local format=readushort(f)
+ local values={}
+ local nofvalues=readushort(f)
+ for i=1,nofvalues do
+ values[i]=readposition(f,format)
+ end
+ local coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ coverage[index]=values[newindex+1]
+ end
+ return {
+ format="pair",
+ coverage=coverage
+ }
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"single")
+ end
+end
+function gposhandlers.pair(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=readushort(f)
+ local format1=readushort(f)
+ local format2=readushort(f)
+ local sets=readarray(f)
+ sets=readpairsets(f,tableoffset,sets,format1,format2)
+ coverage=readcoverage(f,tableoffset+coverage)
+ for index,newindex in next,coverage do
+ local set=sets[newindex+1]
+ local hash={}
+ for i=1,#set do
+ local value=set[i]
+ if value then
+ local other=value[1]
+ local first=value[2]
+ local second=value[3]
+ if first or second then
+ hash[other]={ first,second }
+ else
+ hash[other]=nil
+ end
+ end
+ end
+ coverage[index]=hash
+ end
+ return {
+ format="pair",
+ coverage=coverage
+ }
+ elseif subtype==2 then
+ local coverage=readushort(f)
+ local format1=readushort(f)
+ local format2=readushort(f)
+ local classdef1=readushort(f)
+ local classdef2=readushort(f)
+ local nofclasses1=readushort(f)
+ local nofclasses2=readushort(f)
+ local classlist=readpairclasssets(f,nofclasses1,nofclasses2,format1,format2)
+ coverage=readcoverage(f,tableoffset+coverage)
+ classdef1=readclassdef(f,tableoffset+classdef1)
+ classdef2=readclassdef(f,tableoffset+classdef2)
+ local usedcoverage={}
+ for g1,c1 in next,classdef1 do
+ if coverage[g1] then
+ local l1=classlist[c1]
+ if l1 then
+ local hash={}
+ for paired,class in next,classdef2 do
+ local offsets=l1[class]
+ if offsets then
+ local first=offsets[1]
+ local second=offsets[2]
+ if first or second then
+ hash[paired]={ first,second }
+ else
+ end
+ end
+ end
+ usedcoverage[g1]=hash
+ end
+ end
+ end
+ return {
+ format="pair",
+ coverage=usedcoverage
+ }
+ elseif subtype==3 then
+ report("yet unsupported subtype %a in %a positioning",subtype,"pair")
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"pair")
+ end
+end
+function gposhandlers.cursive(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local coverage=tableoffset+readushort(f)
+ local nofrecords=readushort(f)
+ local records={}
+ for i=1,nofrecords do
+ local entry=readushort(f)
+ local exit=readushort(f)
+ records[i]={
+ entry=entry~=0 and (tableoffset+entry) or false,
+ exit=exit~=0 and (tableoffset+exit ) or false,
+ }
+ end
+ coverage=readcoverage(f,coverage)
+ for i=1,nofrecords do
+ local r=records[i]
+ records[i]={
+ 1,
+ readanchor(f,r.entry) or nil,
+ readanchor(f,r.exit ) or nil,
+ }
+ end
+ for index,newindex in next,coverage do
+ coverage[index]=records[newindex+1]
+ end
+ return {
+ coverage=coverage
+ }
+ else
+ report("unsupported subtype %a in %a positioning",subtype,"cursive")
+ end
+end
+local function handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,ligature)
+ local tableoffset=lookupoffset+offset
+ setposition(f,tableoffset)
+ local subtype=readushort(f)
+ if subtype==1 then
+ local markcoverage=tableoffset+readushort(f)
+ local basecoverage=tableoffset+readushort(f)
+ local nofclasses=readushort(f)
+ local markoffset=tableoffset+readushort(f)
+ local baseoffset=tableoffset+readushort(f)
+ local markcoverage=readcoverage(f,markcoverage)
+ local basecoverage=readcoverage(f,basecoverage,true)
+ setposition(f,markoffset)
+ local markclasses={}
+ local nofmarkclasses=readushort(f)
+ local lastanchor=fontdata.lastanchor or 0
+ local usedanchors={}
+ for i=1,nofmarkclasses do
+ local class=readushort(f)+1
+ local offset=readushort(f)
+ if offset==0 then
+ markclasses[i]=false
else
- local unicode=indices[index]
- if not unicode then
- elseif maybeunicode==unicode then
- elseif unicode>privateoffset then
+ markclasses[i]={ class,markoffset+offset }
+ end
+ usedanchors[class]=true
+ end
+ for i=1,nofmarkclasses do
+ local mc=markclasses[i]
+ if mc then
+ mc[2]=readanchor(f,mc[2])
+ end
+ end
+ setposition(f,baseoffset)
+ local nofbaserecords=readushort(f)
+ local baserecords={}
+ if ligature then
+ for i=1,nofbaserecords do
+ local offset=readushort(f)
+ if offset==0 then
+ baserecords[i]=false
else
- local d=descriptions[unicode]
- if d then
- local c=d.copies
- if c then
- c[maybeunicode]=true
- else
- d.copies={ [maybeunicode]=true }
+ baserecords[i]=baseoffset+offset
+ end
+ end
+ for i=1,nofbaserecords do
+ local recordoffset=baserecords[i]
+ if recordoffset then
+ setposition(f,recordoffset)
+ local nofcomponents=readushort(f)
+ local components={}
+ for i=1,nofcomponents do
+ local classes={}
+ for i=1,nofclasses do
+ local offset=readushort(f)
+ if offset~=0 then
+ classes[i]=recordoffset+offset
+ else
+ classes[i]=false
+ end
end
- elseif index and not reported[index] then
- report_otf("missing index %i",index)
- reported[index]=true
+ components[i]=classes
+ end
+ baserecords[i]=components
+ end
+ end
+ local baseclasses={}
+ for i=1,nofclasses do
+ baseclasses[i]={}
+ end
+ for i=1,nofbaserecords do
+ local components=baserecords[i]
+ local b=basecoverage[i]
+ if components then
+ for c=1,#components do
+ local classes=components[i]
+ if classes then
+ for i=1,nofclasses do
+ local anchor=readanchor(f,classes[i])
+ local bclass=baseclasses[i]
+ local bentry=bclass[b]
+ if bentry then
+ bentry[c]=anchor
+ else
+ bclass[b]={ [c]=anchor }
+ end
+ end
+ end
+ components[i]=classes
end
end
end
- end
- for unicode,data in next,descriptions do
- local d=data.copies
- if d then
- duplicates[unicode]=sortedkeys(d)
- data.copies=nil
+ for index,newindex in next,markcoverage do
+ markcoverage[index]=markclasses[newindex+1] or nil
end
+ return {
+ format="ligature",
+ baseclasses=baseclasses,
+ coverage=markcoverage,
+ }
+ else
+ for i=1,nofbaserecords do
+ local r={}
+ for j=1,nofclasses do
+ local offset=readushort(f)
+ if offset==0 then
+ r[j]=false
+ else
+ r[j]=baseoffset+offset
+ end
+ end
+ baserecords[i]=r
+ end
+ local baseclasses={}
+ for i=1,nofclasses do
+ baseclasses[i]={}
+ end
+ for i=1,nofbaserecords do
+ local r=baserecords[i]
+ local b=basecoverage[i]
+ for j=1,nofclasses do
+ baseclasses[j][b]=readanchor(f,r[j])
+ end
+ end
+ for index,newindex in next,markcoverage do
+ markcoverage[index]=markclasses[newindex+1] or nil
+ end
+ return {
+ format="base",
+ baseclasses=baseclasses,
+ coverage=markcoverage,
+ }
end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
- report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
- end
- if mapdata then
- mapdata.map={}
- mapdata.backmap={}
+ report("unsupported subtype %a in",subtype)
end
end
-actions["add duplicates"]=function(data,filename,raw)
- local descriptions=data.descriptions
- local resources=data.resources
- local properties=data.properties
- local unicodes=resources.unicodes
- local indices=resources.indices
- local duplicates=resources.duplicates
- for unicode,d in table.sortedhash(duplicates) do
- local nofduplicates=#d
- if nofduplicates>4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+function gposhandlers.marktobase(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+end
+function gposhandlers.marktoligature(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,true)
+end
+function gposhandlers.marktomark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return handlemark(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+end
+function gposhandlers.context(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"positioning"),"context"
+end
+function gposhandlers.chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,"positioning"),"chainedcontext"
+end
+function gposhandlers.extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs)
+ return extension(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,gpostypes,gposhandlers,"positioning")
+end
+do
+ local plugins={}
+ function plugins.size(f,fontdata,tableoffset,parameters)
+ if not fontdata.designsize then
+ setposition(f,tableoffset+parameters)
+ local designsize=readushort(f)
+ if designsize>0 then
+ fontdata.designsize=designsize
+ skipshort(f,2)
+ fontdata.minsize=readushort(f)
+ fontdata.maxsize=readushort(f)
+ end
+ end
+ end
+ local function reorderfeatures(fontdata,scripts,features)
+ local scriptlangs={}
+ local featurehash={}
+ local featureorder={}
+ for script,languages in next,scripts do
+ for language,record in next,languages do
+ local hash={}
+ local list=record.featureindices
+ for k=1,#list do
+ local index=list[k]
+ local feature=features[index]
+ local lookups=feature.lookups
+ local tag=feature.tag
+ if tag then
+ hash[tag]=true
+ end
+ if lookups then
+ for i=1,#lookups do
+ local lookup=lookups[i]
+ local o=featureorder[lookup]
+ if o then
+ local okay=true
+ for i=1,#o do
+ if o[i]==tag then
+ okay=false
+ break
+ end
+ end
+ if okay then
+ o[#o+1]=tag
+ end
+ else
+ featureorder[lookup]={ tag }
+ end
+ local f=featurehash[lookup]
+ if f then
+ local h=f[tag]
+ if h then
+ local s=h[script]
+ if s then
+ s[language]=true
+ else
+ h[script]={ [language]=true }
+ end
+ else
+ f[tag]={ [script]={ [language]=true } }
+ end
+ else
+ featurehash[lookup]={ [tag]={ [script]={ [language]=true } } }
+ end
+ local h=scriptlangs[tag]
+ if h then
+ local s=h[script]
+ if s then
+ s[language]=true
+ else
+ h[script]={ [language]=true }
+ end
+ else
+ scriptlangs[tag]={ [script]={ [language]=true } }
+ end
+ end
+ end
+ end
end
- else
- for i=1,nofduplicates do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local n=0
- for _,description in next,descriptions do
- local kerns=description.kerns
- if kerns then
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
+ end
+ return scriptlangs,featurehash,featureorder
+ end
+ local function readscriplan(f,fontdata,scriptoffset)
+ setposition(f,scriptoffset)
+ local nofscripts=readushort(f)
+ local scripts={}
+ for i=1,nofscripts do
+ scripts[readtag(f)]=scriptoffset+readushort(f)
+ end
+ local languagesystems=setmetatableindex("table")
+ for script,offset in next,scripts do
+ setposition(f,offset)
+ local defaultoffset=readushort(f)
+ local noflanguages=readushort(f)
+ local languages={}
+ if defaultoffset>0 then
+ languages.dflt=languagesystems[offset+defaultoffset]
+ end
+ for i=1,noflanguages do
+ local language=readtag(f)
+ local offset=offset+readushort(f)
+ languages[language]=languagesystems[offset]
+ end
+ scripts[script]=languages
+ end
+ for offset,usedfeatures in next,languagesystems do
+ if offset>0 then
+ setposition(f,offset)
+ local featureindices={}
+ usedfeatures.featureindices=featureindices
+ usedfeatures.lookuporder=readushort(f)
+ usedfeatures.requiredindex=readushort(f)
+ local noffeatures=readushort(f)
+ for i=1,noffeatures do
+ featureindices[i]=readushort(f)+1
+ end
+ end
+ end
+ return scripts
+ end
+ local function readfeatures(f,fontdata,featureoffset)
+ setposition(f,featureoffset)
+ local features={}
+ local noffeatures=readushort(f)
+ for i=1,noffeatures do
+ features[i]={
+ tag=readtag(f),
+ offset=readushort(f)
+ }
+ end
+ for i=1,noffeatures do
+ local feature=features[i]
+ local offset=featureoffset+feature.offset
+ setposition(f,offset)
+ local parameters=readushort(f)
+ local noflookups=readushort(f)
+ if noflookups>0 then
+ local lookups={}
+ feature.lookups=lookups
+ for j=1,noflookups do
+ lookups[j]=readushort(f)+1
+ end
+ end
+ if parameters>0 then
+ feature.parameters=parameters
+ local plugin=plugins[feature.tag]
+ if plugin then
+ plugin(f,fontdata,offset,parameters)
+ end
+ end
+ end
+ return features
+ end
+ local function readlookups(f,lookupoffset,lookuptypes,featurehash,featureorder)
+ setposition(f,lookupoffset)
+ local lookups={}
+ local noflookups=readushort(f)
+ for i=1,noflookups do
+ lookups[i]=readushort(f)
+ end
+ for lookupid=1,noflookups do
+ local index=lookups[lookupid]
+ setposition(f,lookupoffset+index)
+ local subtables={}
+ local typebits=readushort(f)
+ local flagbits=readushort(f)
+ local lookuptype=lookuptypes[typebits]
+ local lookupflags=lookupflags[flagbits]
+ local nofsubtables=readushort(f)
+ for j=1,nofsubtables do
+ local offset=readushort(f)
+ subtables[j]=offset+index
+ end
+ local markclass=bittest(flagbits,0x0010)
+ if markclass then
+ markclass=readushort(f)
+ end
+ local markset=rshift(flagbits,8)
+ if markset>0 then
+ markclass=markset
+ end
+ lookups[lookupid]={
+ type=lookuptype,
+ flags=lookupflags,
+ name=lookupid,
+ subtables=subtables,
+ markclass=markclass,
+ features=featurehash[lookupid],
+ order=featureorder[lookupid],
+ }
+ end
+ return lookups
+ end
+ local function readscriptoffsets(f,fontdata,tableoffset)
+ if not tableoffset then
+ return
+ end
+ setposition(f,tableoffset)
+ local version=readulong(f)
+ if version~=0x00010000 then
+ report("table version %a of %a is not supported (yet), maybe font %s is bad",version,what,fontdata.filename)
+ return
+ end
+ return tableoffset+readushort(f),tableoffset+readushort(f),tableoffset+readushort(f)
+ end
+ local f_lookupname=formatters["%s_%s_%s"]
+ local function resolvelookups(f,lookupoffset,fontdata,lookups,lookuptypes,lookuphandlers,what)
+ local sequences=fontdata.sequences or {}
+ local sublookuplist=fontdata.sublookups or {}
+ fontdata.sequences=sequences
+ fontdata.sublookups=sublookuplist
+ local nofsublookups=#sublookuplist
+ local nofsequences=#sequences
+ local lastsublookup=nofsublookups
+ local lastsequence=nofsequences
+ local lookupnames=lookupnames[what]
+ local sublookuphash={}
+ local sublookupcheck={}
+ local glyphs=fontdata.glyphs
+ local nofglyphs=fontdata.nofglyphs or #glyphs
+ local noflookups=#lookups
+ local lookupprefix=sub(what,2,2)
+ for lookupid=1,noflookups do
+ local lookup=lookups[lookupid]
+ local lookuptype=lookup.type
+ local subtables=lookup.subtables
+ local features=lookup.features
+ local handler=lookuphandlers[lookuptype]
+ if handler then
+ local nofsubtables=#subtables
+ local order=lookup.order
+ local flags=lookup.flags
+ if flags[1] then flags[1]="mark" end
+ if flags[2] then flags[2]="ligature" end
+ if flags[3] then flags[3]="base" end
+ local markclass=lookup.markclass
+ if nofsubtables>0 then
+ local steps={}
+ local nofsteps=0
+ local oldtype=nil
+ for s=1,nofsubtables do
+ local step,lt=handler(f,fontdata,lookupid,lookupoffset,subtables[s],glyphs,nofglyphs)
+ if lt then
+ lookuptype=lt
+ if oldtype and lt~=oldtype then
+ report("messy %s lookup type %a and %a",what,lookuptype,oldtype)
+ end
+ oldtype=lookuptype
+ end
+ if not step then
+ report("unsupported %s lookup type %a",what,lookuptype)
+ else
+ nofsteps=nofsteps+1
+ steps[nofsteps]=step
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ local current=rule.current
+ local after=rule.after
+ if before then
+ for i=1,#before do
+ before[i]=tohash(before[i])
+ end
+ rule.before=reversed(before)
+ end
+ if current then
+ for i=1,#current do
+ current[i]=tohash(current[i])
+ end
+ end
+ if after then
+ for i=1,#after do
+ after[i]=tohash(after[i])
+ end
+ end
end
end
end
end
- if u>0 then
- local duplicate=copy(description)
- duplicate.comment=formatters["copy of %U"](unicode)
- descriptions[u]=duplicate
- if trace_loading then
- report_otf("duplicating %06U to %06U with index %H (%s kerns)",unicode,u,description.index,n)
+ if nofsteps~=nofsubtables then
+ report("bogus subtables removed in %s lookup type %a",what,lookuptype)
+ end
+ lookuptype=lookupnames[lookuptype] or lookuptype
+ if features then
+ nofsequences=nofsequences+1
+ local l={
+ index=nofsequences,
+ name=f_lookupname(lookupprefix,"s",lookupid+lookupidoffset),
+ steps=steps,
+ nofsteps=nofsteps,
+ type=lookuptype,
+ markclass=markclass or nil,
+ flags=flags,
+ order=order,
+ features=features,
+ }
+ sequences[nofsequences]=l
+ lookup.done=l
+ else
+ nofsublookups=nofsublookups+1
+ local l={
+ index=nofsublookups,
+ name=f_lookupname(lookupprefix,"l",lookupid+lookupidoffset),
+ steps=steps,
+ nofsteps=nofsteps,
+ type=lookuptype,
+ markclass=markclass or nil,
+ flags=flags,
+ }
+ sublookuplist[nofsublookups]=l
+ sublookuphash[lookupid]=nofsublookups
+ sublookupcheck[lookupid]=0
+ lookup.done=l
+ end
+ else
+ report("no subtables for lookup %a",lookupid)
+ end
+ else
+ report("no handler for lookup %a with type %a",lookupid,lookuptype)
+ end
+ end
+ local reported={}
+ for i=lastsequence+1,nofsequences do
+ local sequence=sequences[i]
+ local steps=sequence.steps
+ for i=1,#steps do
+ local step=steps[i]
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local rlookups=rule.lookups
+ if not rlookups then
+ local name=sequence.name
+ if not reported[name] then
+ report("rule %i in %s lookup %a has %s lookups",i,what,name,"no")
+ reported[name]=true
+ end
+ elseif not next(rlookups) then
+ local name=sequence.name
+ if not reported[name] then
+ report("rule %i in %s lookup %a has %s lookups",i,what,name,"empty")
+ reported[name]=true
+ end
+ rule.lookups=nil
+ else
+ for index,lookupid in sortedhash(rlookups) do
+ local h=sublookuphash[lookupid]
+ if not h then
+ nofsublookups=nofsublookups+1
+ local d=lookups[lookupid].done
+ h={
+ index=nofsublookups,
+ name=f_lookupname(lookupprefix,"d",lookupid+lookupidoffset),
+ derived=true,
+ steps=d.steps,
+ nofsteps=d.nofsteps,
+ type=d.lookuptype,
+ markclass=d.markclass or nil,
+ flags=d.flags,
+ }
+ sublookuplist[nofsublookups]=h
+ sublookuphash[lookupid]=nofsublookups
+ sublookupcheck[lookupid]=1
+ else
+ sublookupcheck[lookupid]=sublookupcheck[lookupid]+1
+ end
+ rlookups[index]=h
+ end
end
end
end
end
end
+ for i,n in sortedhash(sublookupcheck) do
+ local l=lookups[i]
+ local t=l.type
+ if n==0 and t~="extension" then
+ local d=l.done
+ report("%s lookup %s of type %a is not used",what,d and d.name or l.name,t)
+ end
+ end
end
-end
-actions["analyze glyphs"]=function(data,filename,raw)
- local descriptions=data.descriptions
- local resources=data.resources
- local metadata=data.metadata
- local properties=data.properties
- local hasitalics=false
- local widths={}
- local marks={}
- for unicode,description in next,descriptions do
- local glyph=description.glyph
- local italic=glyph.italic_correction
- if not italic then
- elseif italic==0 then
+ local function readscripts(f,fontdata,what,lookuptypes,lookuphandlers,lookupstoo)
+ local datatable=fontdata.tables[what]
+ if not datatable then
+ return
+ end
+ local tableoffset=datatable.offset
+ if not tableoffset then
+ return
+ end
+ local scriptoffset,featureoffset,lookupoffset=readscriptoffsets(f,fontdata,tableoffset)
+ if not scriptoffset then
+ return
+ end
+ local scripts=readscriplan(f,fontdata,scriptoffset)
+ local features=readfeatures(f,fontdata,featureoffset)
+ local scriptlangs,featurehash,featureorder=reorderfeatures(fontdata,scripts,features)
+ if fontdata.features then
+ fontdata.features[what]=scriptlangs
else
- description.italic=italic
- hasitalics=true
+ fontdata.features={ [what]=scriptlangs }
end
- local width=glyph.width
- widths[width]=(widths[width] or 0)+1
- local class=glyph.class
- if class then
- if class=="mark" then
- marks[unicode]=true
- end
- description.class=class
+ if not lookupstoo then
+ return
end
- end
- properties.hasitalics=hasitalics
- resources.marks=marks
- local wd,most=0,1
- for k,v in next,widths do
- if v>most then
- wd,most=k,v
+ local lookups=readlookups(f,lookupoffset,lookuptypes,featurehash,featureorder)
+ if lookups then
+ resolvelookups(f,lookupoffset,fontdata,lookups,lookuptypes,lookuphandlers,what)
end
end
- if most>1000 then
- if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ local function checkkerns(f,fontdata,specification)
+ local datatable=fontdata.tables.kern
+ if not datatable then
+ return
end
- for unicode,description in next,descriptions do
- if description.width==wd then
+ local features=fontdata.features
+ local gposfeatures=features and features.gpos
+ local name
+ if not gposfeatures or not gposfeatures.kern then
+ name="kern"
+ elseif specification.globalkerns then
+ name="globalkern"
+ else
+ report("ignoring global kern table using gpos kern feature")
+ return
+ end
+ report("adding global kern table as gpos feature %a",name)
+ setposition(f,datatable.offset)
+ local version=readushort(f)
+ local noftables=readushort(f)
+ local kerns=setmetatableindex("table")
+ for i=1,noftables do
+ local version=readushort(f)
+ local length=readushort(f)
+ local coverage=readushort(f)
+ local format=bit32.rshift(coverage,8)
+ if format==0 then
+ local nofpairs=readushort(f)
+ local searchrange=readushort(f)
+ local entryselector=readushort(f)
+ local rangeshift=readushort(f)
+ for i=1,nofpairs do
+ kerns[readushort(f)][readushort(f)]=readfword(f)
+ end
+ elseif format==2 then
else
- description.width=description.glyph.width
end
end
- resources.defaultwidth=wd
- else
- for unicode,description in next,descriptions do
- description.width=description.glyph.width
+ local feature={ dflt={ dflt=true } }
+ if not features then
+ fontdata.features={ gpos={ [name]=feature } }
+ elseif not gposfeatures then
+ fontdata.features.gpos={ [name]=feature }
+ else
+ gposfeatures[name]=feature
+ end
+ local sequences=fontdata.sequences
+ if not sequences then
+ sequences={}
+ fontdata.sequences=sequences
end
+ local nofsequences=#sequences+1
+ sequences[nofsequences]={
+ index=nofsequences,
+ name=name,
+ steps={
+ {
+ coverage=kerns,
+ format="kern",
+ },
+ },
+ nofsteps=1,
+ type="gpos_pair",
+ flags={ false,false,false,false },
+ order={ name },
+ features={ [name]=feature },
+ }
end
-end
-actions["reorganize mark classes"]=function(data,filename,raw)
- local mark_classes=raw.mark_classes
- if mark_classes then
- local resources=data.resources
- local unicodes=resources.unicodes
- local markclasses={}
- resources.markclasses=markclasses
- for name,class in next,mark_classes do
- local t={}
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]]=true
- end
- markclasses[name]=t
- end
- end
-end
-actions["reorganize features"]=function(data,filename,raw)
- local features={}
- data.resources.features=features
- for k=1,#otf.glists do
- local what=otf.glists[k]
- local dw=raw[what]
- if dw then
- local f={}
- features[what]=f
- for i=1,#dw do
- local d=dw[i]
- local dfeatures=d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df=dfeatures[i]
- local tag=strip(lower(df.tag))
- local ft=f[tag]
- if not ft then
- ft={}
- f[tag]=ft
- end
- local dscripts=df.scripts
- for i=1,#dscripts do
- local d=dscripts[i]
- local languages=d.langs
- local script=strip(lower(d.script))
- local fts=ft[script] if not fts then fts={} ft[script]=fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))]=true
+ function readers.gsub(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gsub",gsubtypes,gsubhandlers,specification.lookups)
+ end
+ end
+ function readers.gpos(f,fontdata,specification)
+ if specification.details then
+ readscripts(f,fontdata,"gpos",gpostypes,gposhandlers,specification.lookups)
+ if specification.lookups then
+ checkkerns(f,fontdata,specification)
+ end
+ end
+ end
+end
+function readers.gdef(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.gdef
+ if datatable then
+ local tableoffset=datatable.offset
+ setposition(f,tableoffset)
+ local version=readulong(f)
+ local classoffset=tableoffset+readushort(f)
+ local attachmentoffset=tableoffset+readushort(f)
+ local ligaturecarets=tableoffset+readushort(f)
+ local markclassoffset=tableoffset+readushort(f)
+ local marksetsoffset=version==0x00010002 and (tableoffset+readushort(f))
+ local glyphs=fontdata.glyphs
+ local marks={}
+ local markclasses=setmetatableindex("table")
+ local marksets=setmetatableindex("table")
+ fontdata.marks=marks
+ fontdata.markclasses=markclasses
+ fontdata.marksets=marksets
+ setposition(f,classoffset)
+ local classformat=readushort(f)
+ if classformat==1 then
+ local firstindex=readushort(f)
+ local lastindex=firstindex+readushort(f)-1
+ for index=firstindex,lastindex do
+ local class=classes[readushort(f)]
+ if class=="mark" then
+ marks[index]=true
+ end
+ glyphs[index].class=class
+ end
+ elseif classformat==2 then
+ local nofranges=readushort(f)
+ for i=1,nofranges do
+ local firstindex=readushort(f)
+ local lastindex=readushort(f)
+ local class=classes[readushort(f)]
+ if class then
+ for index=firstindex,lastindex do
+ glyphs[index].class=class
+ if class=="mark" then
+ marks[index]=true
end
end
end
end
end
+ setposition(f,markclassoffset)
+ local classformat=readushort(f)
+ if classformat==1 then
+ local firstindex=readushort(f)
+ local lastindex=firstindex+readushort(f)-1
+ for index=firstindex,lastindex do
+ markclasses[readushort(f)][index]=true
+ end
+ elseif classformat==2 then
+ local nofranges=readushort(f)
+ for i=1,nofranges do
+ local firstindex=readushort(f)
+ local lastindex=readushort(f)
+ local class=markclasses[readushort(f)]
+ for index=firstindex,lastindex do
+ class[index]=true
+ end
+ end
+ end
+ if marksetsoffset then
+ setposition(f,marksetsoffset)
+ local format=readushort(f)
+ if format==1 then
+ local nofsets=readushort(f)
+ local sets={}
+ for i=1,nofsets do
+ sets[i]=readulong(f)
+ end
+ for i=1,nofsets do
+ local offset=sets[i]
+ if offset~=0 then
+ marksets[i]=readcoverage(f,marksetsoffset+offset)
+ end
+ end
+ end
+ end
end
end
end
-actions["reorganize anchor classes"]=function(data,filename,raw)
- local resources=data.resources
- local anchor_to_lookup={}
- local lookup_to_anchor={}
- resources.anchor_to_lookup=anchor_to_lookup
- resources.lookup_to_anchor=lookup_to_anchor
- local classes=raw.anchor_classes
- if classes then
- for c=1,#classes do
- local class=classes[c]
- local anchor=class.name
- local lookups=class.lookup
- if type(lookups)~="table" then
- lookups={ lookups }
- end
- local a=anchor_to_lookup[anchor]
- if not a then
- a={}
- anchor_to_lookup[anchor]=a
- end
- for l=1,#lookups do
- local lookup=lookups[l]
- local l=lookup_to_anchor[lookup]
- if l then
- l[anchor]=true
+local function readmathvalue(f)
+ local v=readshort(f)
+ skipshort(f,1)
+ return v
+end
+local function readmathconstants(f,fontdata,offset)
+ setposition(f,offset)
+ fontdata.mathconstants={
+ ScriptPercentScaleDown=readshort(f),
+ ScriptScriptPercentScaleDown=readshort(f),
+ DelimitedSubFormulaMinHeight=readushort(f),
+ DisplayOperatorMinHeight=readushort(f),
+ MathLeading=readmathvalue(f),
+ AxisHeight=readmathvalue(f),
+ AccentBaseHeight=readmathvalue(f),
+ FlattenedAccentBaseHeight=readmathvalue(f),
+ SubscriptShiftDown=readmathvalue(f),
+ SubscriptTopMax=readmathvalue(f),
+ SubscriptBaselineDropMin=readmathvalue(f),
+ SuperscriptShiftUp=readmathvalue(f),
+ SuperscriptShiftUpCramped=readmathvalue(f),
+ SuperscriptBottomMin=readmathvalue(f),
+ SuperscriptBaselineDropMax=readmathvalue(f),
+ SubSuperscriptGapMin=readmathvalue(f),
+ SuperscriptBottomMaxWithSubscript=readmathvalue(f),
+ SpaceAfterScript=readmathvalue(f),
+ UpperLimitGapMin=readmathvalue(f),
+ UpperLimitBaselineRiseMin=readmathvalue(f),
+ LowerLimitGapMin=readmathvalue(f),
+ LowerLimitBaselineDropMin=readmathvalue(f),
+ StackTopShiftUp=readmathvalue(f),
+ StackTopDisplayStyleShiftUp=readmathvalue(f),
+ StackBottomShiftDown=readmathvalue(f),
+ StackBottomDisplayStyleShiftDown=readmathvalue(f),
+ StackGapMin=readmathvalue(f),
+ StackDisplayStyleGapMin=readmathvalue(f),
+ StretchStackTopShiftUp=readmathvalue(f),
+ StretchStackBottomShiftDown=readmathvalue(f),
+ StretchStackGapAboveMin=readmathvalue(f),
+ StretchStackGapBelowMin=readmathvalue(f),
+ FractionNumeratorShiftUp=readmathvalue(f),
+ FractionNumeratorDisplayStyleShiftUp=readmathvalue(f),
+ FractionDenominatorShiftDown=readmathvalue(f),
+ FractionDenominatorDisplayStyleShiftDown=readmathvalue(f),
+ FractionNumeratorGapMin=readmathvalue(f),
+ FractionNumeratorDisplayStyleGapMin=readmathvalue(f),
+ FractionRuleThickness=readmathvalue(f),
+ FractionDenominatorGapMin=readmathvalue(f),
+ FractionDenominatorDisplayStyleGapMin=readmathvalue(f),
+ SkewedFractionHorizontalGap=readmathvalue(f),
+ SkewedFractionVerticalGap=readmathvalue(f),
+ OverbarVerticalGap=readmathvalue(f),
+ OverbarRuleThickness=readmathvalue(f),
+ OverbarExtraAscender=readmathvalue(f),
+ UnderbarVerticalGap=readmathvalue(f),
+ UnderbarRuleThickness=readmathvalue(f),
+ UnderbarExtraDescender=readmathvalue(f),
+ RadicalVerticalGap=readmathvalue(f),
+ RadicalDisplayStyleVerticalGap=readmathvalue(f),
+ RadicalRuleThickness=readmathvalue(f),
+ RadicalExtraAscender=readmathvalue(f),
+ RadicalKernBeforeDegree=readmathvalue(f),
+ RadicalKernAfterDegree=readmathvalue(f),
+ RadicalDegreeBottomRaisePercent=readshort(f),
+ }
+end
+local function readmathglyphinfo(f,fontdata,offset)
+ setposition(f,offset)
+ local italics=readushort(f)
+ local accents=readushort(f)
+ local extensions=readushort(f)
+ local kerns=readushort(f)
+ local glyphs=fontdata.glyphs
+ if italics~=0 then
+ setposition(f,offset+italics)
+ local coverage=readushort(f)
+ local nofglyphs=readushort(f)
+ coverage=readcoverage(f,offset+italics+coverage,true)
+ setposition(f,offset+italics+4)
+ for i=1,nofglyphs do
+ local italic=readmathvalue(f)
+ if italic~=0 then
+ local glyph=glyphs[coverage[i]]
+ local math=glyph.math
+ if not math then
+ glyph.math={ italic=italic }
else
- l={ [anchor]=true }
- lookup_to_anchor[lookup]=l
+ math.italic=italic
end
- a[lookup]=true
end
end
+ fontdata.hasitalics=true
end
-end
-actions["prepare tounicode"]=function(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
-end
-local g_directions={
- gsub_contextchain=1,
- gpos_contextchain=1,
- gsub_reversecontextchain=-1,
- gpos_reversecontextchain=-1,
-}
-actions["reorganize subtables"]=function(data,filename,raw)
- local resources=data.resources
- local sequences={}
- local lookups={}
- local chainedfeatures={}
- resources.sequences=sequences
- resources.lookups=lookups
- for k=1,#otf.glists do
- local what=otf.glists[k]
- local dw=raw[what]
- if dw then
- for k=1,#dw do
- local gk=dw[k]
- local features=gk.features
- local typ=gk.type
- local chain=g_directions[typ] or 0
- local subtables=gk.subtables
- if subtables then
- local t={}
- for s=1,#subtables do
- t[s]=subtables[s].name
- end
- subtables=t
+ if accents~=0 then
+ setposition(f,offset+accents)
+ local coverage=readushort(f)
+ local nofglyphs=readushort(f)
+ coverage=readcoverage(f,offset+accents+coverage,true)
+ setposition(f,offset+accents+4)
+ for i=1,nofglyphs do
+ local accent=readmathvalue(f)
+ if accent~=0 then
+ local glyph=glyphs[coverage[i]]
+ local math=glyph.math
+ if not math then
+ glyph.math={ accent=accent }
+ else
+ math.accent=accent
+ end
+ end
+ end
+ end
+ if extensions~=0 then
+ setposition(f,offset+extensions)
+ end
+ if kerns~=0 then
+ local kernoffset=offset+kerns
+ setposition(f,kernoffset)
+ local coverage=readushort(f)
+ local nofglyphs=readushort(f)
+ if nofglyphs>0 then
+ local function get(offset)
+ setposition(f,kernoffset+offset)
+ local n=readushort(f)
+ if n>0 then
+ local l={}
+ for i=1,n do
+ l[i]={ height=readmathvalue(f) }
end
- local flags,markclass=gk.flags,nil
- if flags then
- local t={
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass=flags.mark_class
- if markclass then
- markclass=resources.markclasses[markclass]
+ for i=1,n do
+ l[i].kern=readmathvalue(f)
+ end
+ l[n+1]={ kern=readmathvalue(f) }
+ return l
+ end
+ end
+ local kernsets={}
+ for i=1,nofglyphs do
+ local topright=readushort(f)
+ local topleft=readushort(f)
+ local bottomright=readushort(f)
+ local bottomleft=readushort(f)
+ kernsets[i]={
+ topright=topright~=0 and topright or nil,
+ topleft=topleft~=0 and topleft or nil,
+ bottomright=bottomright~=0 and bottomright or nil,
+ bottomleft=bottomleft~=0 and bottomleft or nil,
+ }
+ end
+ coverage=readcoverage(f,kernoffset+coverage,true)
+ for i=1,nofglyphs do
+ local kernset=kernsets[i]
+ if next(kernset) then
+ local k=kernset.topright if k then kernset.topright=get(k) end
+ local k=kernset.topleft if k then kernset.topleft=get(k) end
+ local k=kernset.bottomright if k then kernset.bottomright=get(k) end
+ local k=kernset.bottomleft if k then kernset.bottomleft=get(k) end
+ if next(kernset) then
+ local glyph=glyphs[coverage[i]]
+ local math=glyph.math
+ if not math then
+ glyph.math={ kerns=kernset }
+ else
+ math.kerns=kernset
end
- flags=t
- end
- local name=gk.name
- if not name then
- report_otf("skipping weird lookup number %s",k)
- elseif features then
- local f={}
- local o={}
- for i=1,#features do
- local df=features[i]
- local tag=strip(lower(df.tag))
- local ft=f[tag]
- if not ft then
- ft={}
- f[tag]=ft
- o[#o+1]=tag
+ end
+ end
+ end
+ end
+ end
+end
+local function readmathvariants(f,fontdata,offset)
+ setposition(f,offset)
+ local glyphs=fontdata.glyphs
+ local minoverlap=readushort(f)
+ local vcoverage=readushort(f)
+ local hcoverage=readushort(f)
+ local vnofglyphs=readushort(f)
+ local hnofglyphs=readushort(f)
+ local vconstruction={}
+ local hconstruction={}
+ for i=1,vnofglyphs do
+ vconstruction[i]=readushort(f)
+ end
+ for i=1,hnofglyphs do
+ hconstruction[i]=readushort(f)
+ end
+ fontdata.mathconstants.MinConnectorOverlap=minoverlap
+ local function get(offset,coverage,nofglyphs,construction,kvariants,kparts,kitalic)
+ if coverage~=0 and nofglyphs>0 then
+ local coverage=readcoverage(f,offset+coverage,true)
+ for i=1,nofglyphs do
+ local c=construction[i]
+ if c~=0 then
+ local index=coverage[i]
+ local glyph=glyphs[index]
+ local math=glyph.math
+ setposition(f,offset+c)
+ local assembly=readushort(f)
+ local nofvariants=readushort(f)
+ if nofvariants>0 then
+ local variants,v=nil,0
+ for i=1,nofvariants do
+ local variant=readushort(f)
+ if variant==index then
+ elseif variants then
+ v=v+1
+ variants[v]=variant
+ else
+ v=1
+ variants={ variant }
end
- local dscripts=df.scripts
- for i=1,#dscripts do
- local d=dscripts[i]
- local languages=d.langs
- local script=strip(lower(d.script))
- local fts=ft[script] if not fts then fts={} ft[script]=fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))]=true
- end
+ skipshort(f)
+ end
+ if not variants then
+ elseif not math then
+ math={ [kvariants]=variants }
+ glyph.math=math
+ else
+ math[kvariants]=variants
+ end
+ end
+ if assembly~=0 then
+ setposition(f,offset+c+assembly)
+ local italic=readmathvalue(f)
+ local nofparts=readushort(f)
+ local parts={}
+ for i=1,nofparts do
+ local p={
+ glyph=readushort(f),
+ start=readushort(f),
+ ["end"]=readushort(f),
+ advance=readushort(f),
+ }
+ local flags=readushort(f)
+ if bittest(flags,0x0001) then
+ p.extender=1
end
+ parts[i]=p
+ end
+ if not math then
+ math={
+ [kparts]=parts
+ }
+ glyph.math=math
+ else
+ math[kparts]=parts
+ end
+ if italic and italic~=0 then
+ math[kitalic]=italic
end
- sequences[#sequences+1]={
- type=typ,
- chain=chain,
- flags=flags,
- name=name,
- subtables=subtables,
- markclass=markclass,
- features=f,
- order=o,
- }
- else
- lookups[name]={
- type=typ,
- chain=chain,
- flags=flags,
- subtables=subtables,
- markclass=markclass,
- }
end
+ end
end
end
end
+ get(offset,vcoverage,vnofglyphs,vconstruction,"vvariants","vparts","vitalic")
+ get(offset,hcoverage,hnofglyphs,hconstruction,"hvariants","hparts","hitalic")
end
-actions["prepare lookups"]=function(data,filename,raw)
- local lookups=raw.lookups
- if lookups then
- data.lookups=lookups
+function readers.math(f,fontdata,specification)
+ if specification.glyphs then
+ local datatable=fontdata.tables.math
+ if datatable then
+ local tableoffset=datatable.offset
+ setposition(f,tableoffset)
+ local version=readulong(f)
+ if version~=0x00010000 then
+ report("table version %a of %a is not supported (yet), maybe font %s is bad",version,what,fontdata.filename)
+ return
+ end
+ local constants=readushort(f)
+ local glyphinfo=readushort(f)
+ local variants=readushort(f)
+ if constants==0 then
+ report("the math table of %a has no constants",fontdata.filename)
+ else
+ readmathconstants(f,fontdata,tableoffset+constants)
+ end
+ if glyphinfo~=0 then
+ readmathglyphinfo(f,fontdata,tableoffset+glyphinfo)
+ end
+ if variants~=0 then
+ readmathvariants(f,fontdata,tableoffset+variants)
+ end
+ end
end
end
-local function t_uncover(splitter,cache,covers)
- local result={}
- for n=1,#covers do
- local cover=covers[n]
- local uncovered=cache[cover]
- if not uncovered then
- uncovered=lpegmatch(splitter,cover)
- cache[cover]=uncovered
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oup']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type=next,type
+local P,R,S=lpeg.P,lpeg.R,lpeg.S
+local lpegmatch=lpeg.match
+local insert,remove,copy=table.insert,table.remove,table.copy
+local formatters=string.formatters
+local sortedkeys=table.sortedkeys
+local sortedhash=table.sortedhash
+local tohash=table.tohash
+local report=logs.reporter("otf reader")
+local trace_markwidth=false trackers.register("otf.markwidth",function(v) trace_markwidth=v end)
+local readers=fonts.handlers.otf.readers
+local privateoffset=fonts.constructors and fonts.constructors.privateoffset or 0xF0000
+local f_private=formatters["P%05X"]
+local f_unicode=formatters["U%05X"]
+local f_index=formatters["I%05X"]
+local f_character_y=formatters["%C"]
+local f_character_n=formatters["[ %C ]"]
+local doduplicates=true
+local function replaced(list,index,replacement)
+ if type(list)=="number" then
+ return replacement
+ elseif type(replacement)=="table" then
+ local t={}
+ local n=index-1
+ for i=1,n do
+ t[i]=list[i]
end
- result[n]=uncovered
+ for i=1,#replacement do
+ n=n+1
+ t[n]=replacement[i]
+ end
+ for i=index+1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ else
+ list[index]=replacement
+ return list
end
- return result
end
-local function s_uncover(splitter,cache,cover)
- if cover=="" then
- return nil
- else
- local uncovered=cache[cover]
- if not uncovered then
- uncovered=lpegmatch(splitter,cover)
- cache[cover]=uncovered
+local function unifyresources(fontdata,indices)
+ local descriptions=fontdata.descriptions
+ local resources=fontdata.resources
+ if not descriptions or not resources then
+ return
+ end
+ local variants=fontdata.resources.variants
+ if variants then
+ for selector,unicodes in next,variants do
+ for unicode,index in next,unicodes do
+ unicodes[unicode]=indices[index]
+ end
end
- return { uncovered }
end
-end
-local function t_hashed(t,cache)
- if t then
- local ht={}
- for i=1,#t do
- local ti=t[i]
- local tih=cache[ti]
- if not tih then
- local tn=#ti
- if tn==1 then
- tih={ [ti[1]]=true }
+ local function remark(marks)
+ if marks then
+ local newmarks={}
+ for k,v in next,marks do
+ local u=indices[k]
+ if u then
+ newmarks[u]=v
else
- tih={}
- for i=1,tn do
- tih[ti[i]]=true
- end
+ report("discarding mark %i",k)
end
- cache[ti]=tih
end
- ht[i]=tih
+ return newmarks
end
- return ht
- else
- return nil
end
-end
-local function s_hashed(t,cache)
- if t then
- local tf=t[1]
- local nf=#tf
- if nf==1 then
- return { [tf[1]]=true }
- else
- local ht={}
- for i=1,nf do
- ht[i]={ [tf[i]]=true }
+ local marks=resources.marks
+ if marks then
+ resources.marks=remark(marks)
+ end
+ local markclasses=resources.markclasses
+ if markclasses then
+ for class,marks in next,markclasses do
+ markclasses[class]=remark(marks)
+ end
+ end
+ local marksets=resources.marksets
+ if marksets then
+ for class,marks in next,marksets do
+ marksets[class]=remark(marks)
+ end
+ end
+ local done={}
+ local duplicates=doduplicates and resources.duplicates
+ if duplicates and not next(duplicates) then
+ duplicates=false
+ end
+ local function recover(cover)
+ for i=1,#cover do
+ local c=cover[i]
+ if not done[c] then
+ local t={}
+ for k,v in next,c do
+ t[indices[k]]=v
+ end
+ cover[i]=t
+ done[c]=d
end
- return ht
end
- else
- return nil
end
-end
-local function r_uncover(splitter,cache,cover,replacements)
- if cover=="" then
- return nil
- else
- local uncovered=cover[1]
- local replaced=cache[replacements]
- if not replaced then
- replaced=lpegmatch(splitter,replacements)
- cache[replacements]=replaced
- end
- local nu,nr=#uncovered,#replaced
- local r={}
- if nu==nr then
- for i=1,nu do
- r[uncovered[i]]=replaced[i]
- end
- end
- return r
- end
-end
-actions["reorganize lookups"]=function(data,filename,raw)
- if data.lookups then
- local helpers=data.helpers
- local duplicates=data.resources.duplicates
- local splitter=helpers.tounicodetable
- local t_u_cache={}
- local s_u_cache=t_u_cache
- local t_h_cache={}
- local s_h_cache=t_h_cache
- local r_u_cache={}
- helpers.matchcache=t_h_cache
- for _,lookup in next,data.lookups do
- local rules=lookup.rules
- if rules then
- local format=lookup.format
- if format=="class" then
- local before_class=lookup.before_class
- if before_class then
- before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
- end
- local current_class=lookup.current_class
- if current_class then
- current_class=t_uncover(splitter,t_u_cache,current_class)
- end
- local after_class=lookup.after_class
- if after_class then
- after_class=t_uncover(splitter,t_u_cache,after_class)
- end
- for i=1,#rules do
- local rule=rules[i]
- local class=rule.class
- local before=class.before
- if before then
- for i=1,#before do
- before[i]=before_class[before[i]] or {}
+ local function recursed(c)
+ local t={}
+ for g,d in next,c do
+ if type(d)=="table" then
+ t[indices[g]]=recursed(d)
+ else
+ t[g]=indices[d]
+ end
+ end
+ return t
+ end
+ local function unifythem(sequences)
+ if not sequences then
+ return
+ end
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ local features=sequence.features
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gsub_single" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ if duplicates then
+ for g1,d1 in next,c do
+ local ug1=indices[g1]
+ local ud1=indices[d1]
+ t1[ug1]=ud1
+ local dg1=duplicates[ug1]
+ if dg1 then
+ for u in next,dg1 do
+ t1[u]=ud1
+ end
+ end
+ end
+ else
+ for g1,d1 in next,c do
+ t1[indices[g1]]=indices[d1]
+ end
+ end
+ done[c]=t1
end
- rule.before=t_hashed(before,t_h_cache)
+ step.coverage=t1
end
- local current=class.current
- local lookups=rule.lookups
- if current then
- for i=1,#current do
- current[i]=current_class[current[i]] or {}
- if lookups and not lookups[i] then
- lookups[i]=""
+ elseif kind=="gpos_pair" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ for g1,d1 in next,c do
+ local t2=done[d1]
+ if not t2 then
+ t2={}
+ for g2,d2 in next,d1 do
+ t2[indices[g2]]=d2
+ end
+ done[d1]=t2
+ end
+ t1[indices[g1]]=t2
end
+ done[c]=t1
end
- rule.current=t_hashed(current,t_h_cache)
+ step.coverage=t1
end
- local after=class.after
- if after then
- for i=1,#after do
- after[i]=after_class[after[i]] or {}
- end
- rule.after=t_hashed(after,t_h_cache)
+ elseif kind=="gsub_ligature" then
+ local c=step.coverage
+ if c then
+ step.coverage=recursed(c)
end
- rule.class=nil
- end
- lookup.before_class=nil
- lookup.current_class=nil
- lookup.after_class=nil
- lookup.format="coverage"
- elseif format=="coverage" then
- for i=1,#rules do
- local rule=rules[i]
- local coverage=rule.coverage
- if coverage then
- local before=coverage.before
- if before then
- before=t_uncover(splitter,t_u_cache,reversed(before))
- rule.before=t_hashed(before,t_h_cache)
- end
- local current=coverage.current
- if current then
- current=t_uncover(splitter,t_u_cache,current)
- local lookups=rule.lookups
- if lookups then
- for i=1,#current do
- if not lookups[i] then
- lookups[i]=""
+ elseif kind=="gsub_alternate" or kind=="gsub_multiple" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ if duplicates then
+ for g1,d1 in next,c do
+ for i=1,#d1 do
+ d1[i]=indices[d1[i]]
+ end
+ local ug1=indices[g1]
+ t1[ug1]=d1
+ local dg1=duplicates[ug1]
+ if dg1 then
+ for u in next,dg1 do
+ t1[u]=copy(d1)
+ end
+ end
+ end
+ else
+ for g1,d1 in next,c do
+ for i=1,#d1 do
+ d1[i]=indices[d1[i]]
end
+ t1[indices[g1]]=d1
end
end
- rule.current=t_hashed(current,t_h_cache)
+ done[c]=t1
end
- local after=coverage.after
- if after then
- after=t_uncover(splitter,t_u_cache,after)
- rule.after=t_hashed(after,t_h_cache)
- end
- rule.coverage=nil
+ step.coverage=t1
end
- end
- elseif format=="reversecoverage" then
- for i=1,#rules do
- local rule=rules[i]
- local reversecoverage=rule.reversecoverage
- if reversecoverage then
- local before=reversecoverage.before
- if before then
- before=t_uncover(splitter,t_u_cache,reversed(before))
- rule.before=t_hashed(before,t_h_cache)
+ elseif kind=="gpos_mark2base" or kind=="gpos_mark2mark" or kind=="gpos_mark2ligature" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ for g1,d1 in next,c do
+ t1[indices[g1]]=d1
+ end
+ done[c]=t1
end
- local current=reversecoverage.current
- if current then
- current=t_uncover(splitter,t_u_cache,current)
- rule.current=t_hashed(current,t_h_cache)
+ step.coverage=t1
+ end
+ local c=step.baseclasses
+ if c then
+ local t1=done[c]
+ if not t1 then
+ for g1,d1 in next,c do
+ local t2=done[d1]
+ if not t2 then
+ t2={}
+ for g2,d2 in next,d1 do
+ t2[indices[g2]]=d2
+ end
+ done[d1]=t2
+ end
+ c[g1]=t2
+ end
+ done[c]=c
end
- local after=reversecoverage.after
- if after then
- after=t_uncover(splitter,t_u_cache,after)
- rule.after=t_hashed(after,t_h_cache)
+ end
+ elseif kind=="gpos_single" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ if duplicates then
+ for g1,d1 in next,c do
+ local ug1=indices[g1]
+ t1[ug1]=d1
+ local dg1=duplicates[ug1]
+ if dg1 then
+ for u in next,dg1 do
+ t1[u]=d1
+ end
+ end
+ end
+ else
+ for g1,d1 in next,c do
+ t1[indices[g1]]=d1
+ end
+ end
+ done[c]=t1
end
- local replacements=reversecoverage.replacements
- if replacements then
- rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ step.coverage=t1
+ end
+ elseif kind=="gpos_cursive" then
+ local c=step.coverage
+ if c then
+ local t1=done[c]
+ if not t1 then
+ t1={}
+ if duplicates then
+ for g1,d1 in next,c do
+ local ug1=indices[g1]
+ t1[ug1]=d1
+ local dg1=duplicates[ug1]
+ if dg1 then
+ for u in next,dg1 do
+ t1[u]=copy(d1)
+ end
+ end
+ end
+ else
+ for g1,d1 in next,c do
+ t1[indices[g1]]=d1
+ end
+ end
+ done[c]=t1
end
- rule.reversecoverage=nil
+ step.coverage=t1
end
end
- elseif format=="glyphs" then
- for i=1,#rules do
- local rule=rules[i]
- local glyphs=rule.glyphs
- if glyphs then
- local fore=glyphs.fore
- if fore and fore~="" then
- fore=s_uncover(splitter,s_u_cache,fore)
- rule.after=s_hashed(fore,s_h_cache)
- end
- local back=glyphs.back
- if back then
- back=s_uncover(splitter,s_u_cache,back)
- rule.before=s_hashed(back,s_h_cache)
- end
- local names=glyphs.names
- if names then
- names=s_uncover(splitter,s_u_cache,names)
- rule.current=s_hashed(names,s_h_cache)
- end
- rule.glyphs=nil
- local lookups=rule.lookups
- if lookups then
- for i=1,#names do
- if not lookups[i] then
- lookups[i]=""
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before if before then recover(before) end
+ local after=rule.after if after then recover(after) end
+ local current=rule.current if current then recover(current) end
+ local replacements=rule.replacements
+ if replacements then
+ if not done[replacements] then
+ local r={}
+ for k,v in next,replacements do
+ r[indices[k]]=indices[v]
end
+ rule.replacements=r
+ done[replacements]=r
end
end
end
@@ -8575,648 +13704,1788 @@ actions["reorganize lookups"]=function(data,filename,raw)
end
end
end
+ unifythem(resources.sequences)
+ unifythem(resources.sublookups)
end
-actions["expand lookups"]=function(data,filename,raw)
- if data.lookups then
- local cache=data.helpers.matchcache
- if cache then
- local duplicates=data.resources.duplicates
- for key,hash in next,cache do
- local done=nil
- for key in next,hash do
- local unicode=duplicates[key]
- if not unicode then
- elseif type(unicode)=="table" then
- for i=1,#unicode do
- local u=unicode[i]
- if hash[u] then
- elseif done then
- done[u]=key
- else
- done={ [u]=key }
- end
- end
- else
- if hash[unicode] then
- elseif done then
- done[unicode]=key
+local function copyduplicates(fontdata)
+ if doduplicates then
+ local descriptions=fontdata.descriptions
+ local resources=fontdata.resources
+ local duplicates=resources.duplicates
+ if duplicates then
+ for u,d in next,duplicates do
+ local du=descriptions[u]
+ if du then
+ local t={ f_character_y(u),"@",f_index(du.index),"->" }
+ for u in next,d do
+ if descriptions[u] then
+ t[#t+1]=f_character_n(u)
else
- done={ [unicode]=key }
+ local c=copy(du)
+ descriptions[u]=c
+ t[#t+1]=f_character_y(u)
end
end
+ report("duplicates: % t",t)
+ else
end
- if done then
- for u in next,done do
- hash[u]=true
+ end
+ end
+ end
+end
+local ignore={
+ ["notdef"]=true,
+ [".notdef"]=true,
+ ["null"]=true,
+ [".null"]=true,
+ ["nonmarkingreturn"]=true,
+}
+local function checklookups(fontdata,missing,nofmissing)
+ local descriptions=fontdata.descriptions
+ local resources=fontdata.resources
+ if missing and nofmissing and nofmissing<=0 then
+ return
+ end
+ local singles={}
+ local alternates={}
+ local ligatures={}
+ if not missing then
+ missing={}
+ nofmissing=0
+ for u,d in next,descriptions do
+ if not d.unicode then
+ nofmissing=nofmissing+1
+ missing[u]=true
+ end
+ end
+ end
+ local function collectthem(sequences)
+ if not sequences then
+ return
+ end
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gsub_single" then
+ local c=step.coverage
+ if c then
+ singles[#singles+1]=c
+ end
+ elseif kind=="gsub_alternate" then
+ local c=step.coverage
+ if c then
+ alternates[#alternates+1]=c
+ end
+ elseif kind=="gsub_ligature" then
+ local c=step.coverage
+ if c then
+ ligatures[#ligatures+1]=c
+ end
end
end
end
end
end
-end
-local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants=the_variants.variants
- if variants then
- local glyphs=lpegmatch(splitter,variants)
- local done={ [unicode]=true }
- local n=0
- for i=1,#glyphs do
- local g=glyphs[i]
- if done[g] then
- if i>1 then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ collectthem(resources.sequences)
+ collectthem(resources.sublookups)
+ local loops=0
+ while true do
+ loops=loops+1
+ local old=nofmissing
+ for i=1,#singles do
+ local c=singles[i]
+ for g1,g2 in next,c do
+ if missing[g1] then
+ local u2=descriptions[g2].unicode
+ if u2 then
+ missing[g1]=false
+ descriptions[g1].unicode=u2
+ nofmissing=nofmissing-1
+ end
end
- else
- if n==0 then
- n=1
- variants={ g }
- else
+ if missing[g2] then
+ local u1=descriptions[g1].unicode
+ if u1 then
+ missing[g2]=false
+ descriptions[g2].unicode=u1
+ nofmissing=nofmissing-1
+ end
+ end
+ end
+ end
+ for i=1,#alternates do
+ local c=alternates[i]
+ for g1,d1 in next,c do
+ if missing[g1] then
+ for i=1,#d1 do
+ local g2=d1[i]
+ local u2=descriptions[g2].unicode
+ if u2 then
+ missing[g1]=false
+ descriptions[g1].unicode=u2
+ nofmissing=nofmissing-1
+ end
+ end
+ end
+ if not missing[g1] then
+ for i=1,#d1 do
+ local g2=d1[i]
+ if missing[g2] then
+ local u1=descriptions[g1].unicode
+ if u1 then
+ missing[g2]=false
+ descriptions[g2].unicode=u1
+ nofmissing=nofmissing-1
+ end
+ end
+ end
+ end
+ end
+ end
+ if nofmissing<=0 then
+ report("all done in %s loops",loops)
+ return
+ elseif old==nofmissing then
+ break
+ end
+ end
+ local t,n
+ local function recursed(c)
+ for g,d in next,c do
+ if g~="ligature" then
+ local u=descriptions[g].unicode
+ if u then
n=n+1
- variants[n]=g
+ t[n]=u
+ recursed(d)
+ n=n-1
end
- done[g]=true
+ elseif missing[d] then
+ local l={}
+ local m=0
+ for i=1,n do
+ local u=t[i]
+ if type(u)=="table" then
+ for i=1,#u do
+ m=m+1
+ l[m]=u[i]
+ end
+ else
+ m=m+1
+ l[m]=u
+ end
+ end
+ missing[d]=false
+ descriptions[d].unicode=l
+ nofmissing=nofmissing-1
end
end
- if n==0 then
- variants=nil
+ end
+ if nofmissing>0 then
+ t={}
+ n=0
+ local loops=0
+ while true do
+ loops=loops+1
+ local old=nofmissing
+ for i=1,#ligatures do
+ recursed(ligatures[i])
+ end
+ if nofmissing<=0 then
+ report("all done in %s loops",loops)
+ return
+ elseif old==nofmissing then
+ break
+ end
+ end
+ t=nil
+ n=0
+ end
+ if nofmissing>0 then
+ local done={}
+ for i,r in next,missing do
+ if r then
+ local name=descriptions[i].name or f_index(i)
+ if not ignore[name] then
+ done[#done+1]=name
+ end
+ end
+ end
+ if #done>0 then
+ table.sort(done)
+ report("not unicoded: % t",done)
end
end
- local parts=the_variants.parts
- if parts then
- local p=#parts
- if p>0 then
- for i=1,p do
- local pi=parts[i]
- pi.glyph=unicodes[pi.component] or 0
- pi.component=nil
+end
+local function unifymissing(fontdata)
+ if not fonts.mappings then
+ require("font-map")
+ require("font-agl")
+ end
+ local unicodes={}
+ local private=fontdata.private
+ local resources=fontdata.resources
+ resources.unicodes=unicodes
+ for unicode,d in next,fontdata.descriptions do
+ if unicode<privateoffset then
+ local name=d.name
+ if name then
+ unicodes[name]=unicode
end
+ end
+ end
+ fonts.mappings.addtounicode(fontdata,fontdata.filename,checklookups)
+ resources.unicodes=nil
+end
+local function unifyglyphs(fontdata,usenames)
+ local private=fontdata.private or privateoffset
+ local glyphs=fontdata.glyphs
+ local indices={}
+ local descriptions={}
+ local names=usenames and {}
+ local resources=fontdata.resources
+ local zero=glyphs[0]
+ local zerocode=zero.unicode
+ if not zerocode then
+ zerocode=private
+ zero.unicode=zerocode
+ private=private+1
+ end
+ descriptions[zerocode]=zero
+ if names then
+ local name=glyphs[0].name or f_private(zerocode)
+ indices[0]=name
+ names[name]=zerocode
+ else
+ indices[0]=zerocode
+ end
+ for index=1,#glyphs do
+ local glyph=glyphs[index]
+ local unicode=glyph.unicode
+ if not unicode then
+ unicode=private
+ if names then
+ local name=glyph.name or f_private(unicode)
+ indices[index]=name
+ names[name]=unicode
+ else
+ indices[index]=unicode
+ end
+ private=private+1
+ elseif descriptions[unicode] then
+ report("assigning private unicode %U to glyph indexed %05X (%C)",private,index,unicode)
+ unicode=private
+ if names then
+ local name=glyph.name or f_private(unicode)
+ indices[index]=name
+ names[name]=unicode
+ else
+ indices[index]=unicode
+ end
+ private=private+1
else
- parts=nil
+ if names then
+ local name=glyph.name or f_unicode(unicode)
+ indices[index]=name
+ names[name]=unicode
+ else
+ indices[index]=unicode
+ end
end
+ descriptions[unicode]=glyph
end
- local italic=the_variants.italic
- if italic and italic==0 then
- italic=nil
+ for index=1,#glyphs do
+ local math=glyphs[index].math
+ if math then
+ local list=math.vparts
+ if list then
+ for i=1,#list do local l=list[i] l.glyph=indices[l.glyph] end
+ end
+ local list=math.hparts
+ if list then
+ for i=1,#list do local l=list[i] l.glyph=indices[l.glyph] end
+ end
+ local list=math.vvariants
+ if list then
+ for i=1,#list do list[i]=indices[list[i]] end
+ end
+ local list=math.hvariants
+ if list then
+ for i=1,#list do list[i]=indices[list[i]] end
+ end
+ end
end
- return variants,parts,italic
+ fontdata.private=private
+ fontdata.glyphs=nil
+ fontdata.names=names
+ fontdata.descriptions=descriptions
+ fontdata.hashmethod=hashmethod
+ return indices,names
end
-actions["analyze math"]=function(data,filename,raw)
- if raw.math then
- data.metadata.math=raw.math
- local unicodes=data.resources.unicodes
- local splitter=data.helpers.tounicodetable
- for unicode,description in next,data.descriptions do
- local glyph=description.glyph
- local mathkerns=glyph.mathkern
- local hvariants=glyph.horiz_variants
- local vvariants=glyph.vert_variants
- local accent=glyph.top_accent
- local italic=glyph.italic_correction
- if mathkerns or hvariants or vvariants or accent or italic then
- local math={}
- if accent then
- math.accent=accent
+local p_bogusname=(
+ (P("uni")+P("UNI")+P("Uni")+P("U")+P("u"))*S("Xx")^0*R("09","AF")^1+(P("identity")+P("Identity")+P("IDENTITY"))*R("09","AF")^1+(P("index")+P("Index")+P("INDEX"))*R("09")^1
+)*P(-1)
+local function stripredundant(fontdata)
+ local descriptions=fontdata.descriptions
+ if descriptions then
+ local n=0
+ local c=0
+ for unicode,d in next,descriptions do
+ local name=d.name
+ if name and lpegmatch(p_bogusname,name) then
+ d.name=nil
+ n=n+1
+ end
+ if d.class=="base" then
+ d.class=nil
+ c=c+1
+ end
+ end
+ if n>0 then
+ report("%s bogus names removed (verbose unicode)",n)
+ end
+ if c>0 then
+ report("%s base class tags removed (default is base)",c)
+ end
+ end
+end
+function readers.rehash(fontdata,hashmethod)
+ if not (fontdata and fontdata.glyphs) then
+ return
+ end
+ if hashmethod=="indices" then
+ fontdata.hashmethod="indices"
+ elseif hashmethod=="names" then
+ fontdata.hashmethod="names"
+ local indices=unifyglyphs(fontdata,true)
+ unifyresources(fontdata,indices)
+ copyduplicates(fontdata)
+ unifymissing(fontdata)
+ else
+ fontdata.hashmethod="unicode"
+ local indices=unifyglyphs(fontdata)
+ unifyresources(fontdata,indices)
+ copyduplicates(fontdata)
+ unifymissing(fontdata)
+ stripredundant(fontdata)
+ end
+end
+function readers.checkhash(fontdata)
+ local hashmethod=fontdata.hashmethod
+ if hashmethod=="unicodes" then
+ fontdata.names=nil
+ elseif hashmethod=="names" and fontdata.names then
+ unifyresources(fontdata,fontdata.names)
+ copyduplicates(fontdata)
+ fontdata.hashmethod="unicode"
+ fontdata.names=nil
+ else
+ readers.rehash(fontdata,"unicode")
+ end
+end
+function readers.addunicodetable(fontdata)
+ local resources=fontdata.resources
+ local unicodes=resources.unicodes
+ if not unicodes then
+ local descriptions=fontdata.descriptions
+ if descriptions then
+ unicodes={}
+ resources.unicodes=unicodes
+ for u,d in next,descriptions do
+ local n=d.name
+ if n then
+ unicodes[n]=u
+ end
+ end
+ end
+ end
+end
+local concat,sort=table.concat,table.sort
+local next,type,tostring=next,type,tostring
+local criterium=1
+local threshold=0
+local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local function tabstr_normal(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ s[n]=k..">"..tabstr_normal(v)
+ elseif v==true then
+ s[n]=k.."+"
+ elseif v then
+ s[n]=k.."="..v
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_flat(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ s[n]=k.."="..v
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_mixed(t)
+ local s={}
+ local n=#t
+ if n==0 then
+ return ""
+ elseif n==1 then
+ local k=t[1]
+ if k==true then
+ return "++"
+ elseif k==false then
+ return "--"
+ else
+ return tostring(k)
+ end
+ else
+ for i=1,n do
+ local k=t[i]
+ if k==true then
+ s[i]="++"
+ elseif k==false then
+ s[i]="--"
+ else
+ s[i]=k
+ end
+ end
+ return concat(s,",")
+ end
+end
+local function tabstr_boolean(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if v then
+ s[n]=k.."+"
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+function readers.pack(data)
+ if data then
+ local h,t,c={},{},{}
+ local hh,tt,cc={},{},{}
+ local nt,ntt=0,0
+ local function pack_normal(v)
+ local tag=tabstr_normal(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag=tabstr_flat(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag=tabstr_boolean(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag=concat(v," ")
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag=tabstr_mixed(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ if c[v]<=criterium then
+ return t[v]
+ else
+ local hv=hh[v]
+ if hv then
+ return hv
+ else
+ ntt=ntt+1
+ tt[ntt]=t[v]
+ hh[v]=ntt
+ cc[ntt]=c[v]
+ return ntt
end
- if mathkerns then
- for k,v in next,mathkerns do
- if not next(v) then
- mathkerns[k]=nil
+ end
+ end
+ local function success(stage,pass)
+ if nt==0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt>=threshold then
+ local one,two,rest=0,0,0
+ if pass==1 then
+ for k,v in next,c do
+ if v==1 then
+ one=one+1
+ elseif v==2 then
+ two=two+1
else
- for k,v in next,v do
- if v==0 then
- k[v]=nil
- end
- end
+ rest=rest+1
+ end
+ end
+ else
+ for k,v in next,cc do
+ if v>20 then
+ rest=rest+1
+ elseif v>10 then
+ two=two+1
+ else
+ one=one+1
end
end
- math.kerns=mathkerns
+ data.tables=tt
end
- if hvariants then
- math.hvariants,math.hparts,math.hitalic=check_variants(unicode,hvariants,splitter,unicodes)
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",
+ stage,pass,one+two+rest,one,two,rest,criterium)
end
- if vvariants then
- math.vvariants,math.vparts,math.vitalic=check_variants(unicode,vvariants,splitter,unicodes)
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",
+ stage,pass,nt,threshold)
end
- if italic and italic~=0 then
- math.italic=italic
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass==1 then
+ return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
+ else
+ return pack_final,pack_final,pack_final,pack_final,pack_final
+ end
+ end
+ local resources=data.resources
+ local sequences=resources.sequences
+ local sublookups=resources.sublookups
+ local features=resources.features
+ local chardata=characters and characters.data
+ local descriptions=data.descriptions or data.glyphs
+ if not descriptions then
+ return
+ end
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,descriptions do
+ local boundingbox=description.boundingbox
+ if boundingbox then
+ description.boundingbox=pack_indexed(boundingbox)
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_normal(kern)
+ end
+ end
end
- description.math=math
+ end
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ local order=sequence.order
+ local features=sequence.features
+ local flags=sequence.flags
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gpos_pair" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ for g1,d1 in next,c do
+ c[g1]=pack_normal(d1)
+ end
+ else
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ local f=d2[1] if f then d2[1]=pack_indexed(f) end
+ local s=d2[2] if s then d2[2]=pack_indexed(s) end
+ end
+ end
+ end
+ end
+ elseif kind=="gpos_single" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ step.coverage=pack_normal(c)
+ else
+ for g1,d1 in next,c do
+ c[g1]=pack_indexed(d1)
+ end
+ end
+ end
+ elseif kind=="gpos_cursive" then
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ local f=d1[2] if f then d1[2]=pack_indexed(f) end
+ local s=d1[3] if s then d1[3]=pack_indexed(s) end
+ end
+ end
+ elseif kind=="gpos_mark2base" or kind=="gpos_mark2mark" then
+ local c=step.baseclasses
+ if c then
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ d1[g2]=pack_indexed(d2)
+ end
+ end
+ end
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ d1[2]=pack_indexed(d1[2])
+ end
+ end
+ elseif kind=="gpos_mark2ligature" then
+ local c=step.baseclasses
+ if c then
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ for g3,d3 in next,d2 do
+ d2[g3]=pack_indexed(d3)
+ end
+ end
+ end
+ end
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ d1[2]=pack_indexed(d1[2])
+ end
+ end
+ end
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.replacements if r then rule.replacements=pack_flat (r) end
+ end
+ end
+ end
+ end
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ if features then
+ for script,feature in next,features do
+ features[script]=pack_normal(feature)
+ end
+ end
+ if flags then
+ sequence.flags=pack_normal(flags)
+ end
+ end
+ end
+ if sequences then
+ packthem(sequences)
+ end
+ if sublookups then
+ packthem(sublookups)
+ end
+ if features then
+ for k,list in next,features do
+ for feature,spec in next,list do
+ list[feature]=pack_normal(spec)
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
end
end
- end
-end
-actions["reorganize glyph kerns"]=function(data,filename,raw)
- local descriptions=data.descriptions
- local resources=data.resources
- local unicodes=resources.unicodes
- for unicode,description in next,descriptions do
- local kerns=description.glyph.kerns
- if kerns then
- local newkerns={}
- for k,kern in next,kerns do
- local name=kern.char
- local offset=kern.off
- local lookup=kern.lookup
- if name and offset and lookup then
- local unicode=unicodes[name]
- if unicode then
- if type(lookup)=="table" then
- for l=1,#lookup do
- local lookup=lookup[l]
- local lookupkerns=newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode]=offset
- else
- newkerns[lookup]={ [unicode]=offset }
+ if nt>0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,descriptions do
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ math.kerns=pack_normal(kerns)
+ end
+ end
+ end
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ local features=sequence.features
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gpos_pair" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ else
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ d1[g2]=pack_normal(d2)
+ end
+ end
+ end
+ end
+ end
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then rule.before=pack_normal(r) end
+ local r=rule.after if r then rule.after=pack_normal(r) end
+ local r=rule.current if r then rule.current=pack_normal(r) end
+ end
end
end
- else
- local lookupkerns=newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode]=offset
- else
- newkerns[lookup]={ [unicode]=offset }
+ end
+ if features then
+ sequence.features=pack_normal(features)
+ end
+ end
+ end
+ if sequences then
+ packthem(sequences)
+ end
+ if sublookups then
+ packthem(sublookups)
+ end
+ if not success(2,pass) then
+ end
+ end
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 3, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ local function packthem(sequences)
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ local features=sequence.features
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gpos_pair" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ else
+ for g1,d1 in next,c do
+ c[g1]=pack_normal(d1)
+ end
+ end
+ end
+ end
end
end
- elseif trace_loading then
- report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
end
end
+ if sequences then
+ packthem(sequences)
+ end
+ if sublookups then
+ packthem(sublookups)
+ end
end
- description.kerns=newkerns
end
end
end
-actions["merge kern classes"]=function(data,filename,raw)
- local gposlist=raw.gpos
- if gposlist then
- local descriptions=data.descriptions
- local resources=data.resources
- local unicodes=resources.unicodes
- local splitter=data.helpers.tounicodetable
- local ignored=0
- local blocked=0
- for gp=1,#gposlist do
- local gpos=gposlist[gp]
- local subtables=gpos.subtables
- if subtables then
- local first_done={}
- local split={}
- for s=1,#subtables do
- local subtable=subtables[s]
- local kernclass=subtable.kernclass
- local lookup=subtable.lookup or subtable.name
- if kernclass then
- if #kernclass>0 then
- kernclass=kernclass[1]
- lookup=type(kernclass.lookup)=="string" and kernclass.lookup or lookup
- report_otf("fixing kernclass table of lookup %a",lookup)
- end
- local firsts=kernclass.firsts
- local seconds=kernclass.seconds
- local offsets=kernclass.offsets
- for n,s in next,firsts do
- split[s]=split[s] or lpegmatch(splitter,s)
+local unpacked_mt={
+ __index=function(t,k)
+ t[k]=false
+ return k
+ end
+}
+function readers.unpack(data)
+ if data then
+ local tables=data.tables
+ if tables then
+ local resources=data.resources
+ local descriptions=data.descriptions or data.glyphs
+ local sequences=resources.sequences
+ local sublookups=resources.sublookups
+ local features=resources.features
+ local unpacked={}
+ setmetatable(unpacked,unpacked_mt)
+ for unicode,description in next,descriptions do
+ local tv=tables[description.boundingbox]
+ if tv then
+ description.boundingbox=tv
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ math.kerns=tm
+ kerns=unpacked[tm]
end
- local maxseconds=0
- for n,s in next,seconds do
- if n>maxseconds then
- maxseconds=n
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
end
- split[s]=split[s] or lpegmatch(splitter,s)
end
- for fk=1,#firsts do
- local fv=firsts[fk]
- local splt=split[fv]
- if splt then
- local extrakerns={}
- local baseoffset=(fk-1)*maxseconds
- for sk=2,maxseconds do
- local sv=seconds[sk]
- if sv then
- local splt=split[sv]
- if splt then
- local offset=offsets[baseoffset+sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ local function unpackthem(sequences)
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local kind=sequence.type
+ local steps=sequence.steps
+ local order=sequence.order
+ local features=sequence.features
+ local flags=sequence.flags
+ local markclass=sequence.markclass
+ if steps then
+ for i=1,#steps do
+ local step=steps[i]
+ if kind=="gpos_pair" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ for g1,d1 in next,c do
+ local tv=tables[d1]
+ if tv then
+ c[g1]=tv
+ end
+ end
+ else
+ for g1,d1 in next,c do
+ local tv=tables[d1]
+ if tv then
+ c[g1]=tv
+ d1=tv
+ end
+ for g2,d2 in next,d1 do
+ local tv=tables[d2]
+ if tv then
+ d1[g2]=tv
+ d2=tv
end
+ local f=tables[d2[1]] if f then d2[1]=f end
+ local s=tables[d2[2]] if s then d2[2]=s end
end
end
end
end
- for i=1,#splt do
- local first_unicode=splt[i]
- if first_done[first_unicode] then
- report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
- blocked=blocked+1
+ elseif kind=="gpos_single" then
+ local c=step.coverage
+ if c then
+ if step.format=="kern" then
+ local tv=tables[c]
+ if tv then
+ step.coverage=tv
+ end
else
- first_done[first_unicode]=true
- local description=descriptions[first_unicode]
- if description then
- local kerns=description.kerns
- if not kerns then
- kerns={}
- description.kerns=kerns
+ for g1,d1 in next,c do
+ local tv=tables[d1]
+ if tv then
+ c[g1]=tv
end
- local lookupkerns=kerns[lookup]
- if not lookupkerns then
- lookupkerns={}
- kerns[lookup]=lookupkerns
+ end
+ end
+ end
+ elseif kind=="gpos_cursive" then
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ local f=tables[d1[2]] if f then d1[2]=f end
+ local s=tables[d1[3]] if s then d1[3]=s end
+ end
+ end
+ elseif kind=="gpos_mark2base" or kind=="gpos_mark2mark" then
+ local c=step.baseclasses
+ if c then
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ local tv=tables[d2]
+ if tv then
+ d1[g2]=tv
end
- if overloadkerns then
- for second_unicode,kern in next,extrakerns do
- lookupkerns[second_unicode]=kern
- end
- else
- for second_unicode,kern in next,extrakerns do
- local k=lookupkerns[second_unicode]
- if not k then
- lookupkerns[second_unicode]=kern
- elseif k~=kern then
- if trace_loading then
- report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
- end
- ignored=ignored+1
- end
+ end
+ end
+ end
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ local tv=tables[d1[2]]
+ if tv then
+ d1[2]=tv
+ end
+ end
+ end
+ elseif kind=="gpos_mark2ligature" then
+ local c=step.baseclasses
+ if c then
+ for g1,d1 in next,c do
+ for g2,d2 in next,d1 do
+ for g3,d3 in next,d2 do
+ local tv=tables[d2[g3]]
+ if tv then
+ d2[g3]=tv
end
end
- elseif trace_loading then
- report_otf("no glyph data for %U",first_unicode)
end
end
end
+ local c=step.coverage
+ if c then
+ for g1,d1 in next,c do
+ local tv=tables[d1[2]]
+ if tv then
+ d1[2]=tv
+ end
+ end
+ end
+ end
+ local rules=step.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ if before then
+ local tv=tables[before]
+ if tv then
+ rule.before=tv
+ before=tv
+ end
+ for i=1,#before do
+ local tv=tables[before[i]]
+ if tv then
+ before[i]=tv
+ end
+ end
+ end
+ local after=rule.after
+ if after then
+ local tv=tables[after]
+ if tv then
+ rule.after=tv
+ after=tv
+ end
+ for i=1,#after do
+ local tv=tables[after[i]]
+ if tv then
+ after[i]=tv
+ end
+ end
+ end
+ local current=rule.current
+ if current then
+ local tv=tables[current]
+ if tv then
+ rule.current=tv
+ current=tv
+ end
+ for i=1,#current do
+ local tv=tables[current[i]]
+ if tv then
+ current[i]=tv
+ end
+ end
+ end
+ local replacements=rule.replacements
+ if replacements then
+ local tv=tables[replace]
+ if tv then
+ rule.replacements=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ if features then
+ local tv=tables[features]
+ if tv then
+ sequence.features=tv
+ features=tv
+ end
+ for script,feature in next,features do
+ local tv=tables[feature]
+ if tv then
+ features[script]=tv
end
end
- subtable.kernclass={}
+ end
+ if order then
+ local tv=tables[order]
+ if tv then
+ sequence.order=tv
+ end
+ end
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ sequence.flags=tv
+ end
end
end
end
- end
- if ignored>0 then
- report_otf("%s kern overloads ignored",ignored)
- end
- if blocked>0 then
- report_otf("%s successive kerns blocked",blocked)
+ if sequences then
+ unpackthem(sequences)
+ end
+ if sublookups then
+ unpackthem(sublookups)
+ end
+ if features then
+ for k,list in next,features do
+ for feature,spec in next,list do
+ local tv=tables[spec]
+ if tv then
+ list[feature]=tv
+ end
+ end
+ end
+ end
+ data.tables=nil
end
end
end
-actions["check glyphs"]=function(data,filename,raw)
- for unicode,description in next,data.descriptions do
- description.glyph=nil
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
end
+}
+local function sameformat(sequence,steps,first,nofsteps,kind)
+ return true
end
-local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1)
-local function valid_ps_name(str)
- return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
-end
-actions["check metadata"]=function(data,filename,raw)
- local metadata=data.metadata
- for _,k in next,mainfields do
- if valid_fields[k] then
- local v=raw[k]
- if not metadata[k] then
- metadata[k]=v
+local function mergesteps_1(lookup,strict)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ local first=steps[1]
+ if strict then
+ local f=first.format
+ for i=2,nofsteps do
+ if steps[i].format~=f then
+ report("not merging %a steps of %a lookup %a, different formats",nofsteps,lookup.type,lookup.name)
+ return 0
+ end
+ end
+ end
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target=first.coverage
+ for i=2,nofsteps do
+ for k,v in next,steps[i].coverage do
+ if not target[k] then
+ target[k]=v
+ end
+ end
+ end
+ lookup.nofsteps=1
+ lookup.merged=true
+ lookup.steps={ first }
+ return nofsteps-1
+end
+local function mergesteps_2(lookup,strict)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ local first=steps[1]
+ if strict then
+ local f=first.format
+ for i=2,nofsteps do
+ if steps[i].format~=f then
+ report("not merging %a steps of %a lookup %a, different formats",nofsteps,lookup.type,lookup.name)
+ return 0
+ end
+ end
+ end
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target=first.coverage
+ for i=2,nofsteps do
+ for k,v in next,steps[i].coverage do
+ local tk=target[k]
+ if tk then
+ for k,v in next,v do
+ if not tk[k] then
+ tk[k]=v
+ end
+ end
+ else
+ target[k]=v
+ end
+ end
+ end
+ lookup.nofsteps=1
+ lookup.steps={ first }
+ return nofsteps-1
+end
+local function mergesteps_3(lookup,strict)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ local first=steps[1]
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local baseclasses={}
+ local coverage={}
+ local used={}
+ for i=1,nofsteps do
+ local offset=i*10
+ local step=steps[i]
+ for k,v in sortedhash(step.baseclasses) do
+ baseclasses[offset+k]=v
+ end
+ for k,v in next,step.coverage do
+ local tk=coverage[k]
+ if tk then
+ for k,v in next,v do
+ if not tk[k] then
+ tk[k]=v
+ local c=offset+v[1]
+ v[1]=c
+ if not used[c] then
+ used[c]=true
+ end
+ end
+ end
+ else
+ coverage[k]=v
+ local c=offset+v[1]
+ v[1]=c
+ if not used[c] then
+ used[c]=true
+ end
end
end
end
- local ttftables=metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data="deleted"
+ for k,v in next,baseclasses do
+ if not used[k] then
+ baseclasses[k]=nil
+ report("discarding not used baseclass %i",k)
end
end
- local state=metadata.validation_state
- local names=raw.names
- if state and table.contains(state,"bad_ps_fontname") then
- local function valid(what)
- if names then
- for i=1,#names do
- local list=names[i]
- local names=list.names
- if names then
- local name=names[what]
- if name and valid_ps_name(name) then
- return name
- end
- end
- end
+ first.baseclasses=baseclasses
+ first.coverage=coverage
+ lookup.nofsteps=1
+ lookup.steps={ first }
+ return nofsteps-1
+end
+local function nested(old,new)
+ for k,v in next,old do
+ if k=="ligature" then
+ if not new.ligature then
+ new.ligature=v
+ end
+ else
+ local n=new[k]
+ if n then
+ nested(v,n)
+ else
+ new[k]=v
end
end
- local function check(what)
- local oldname=metadata[what]
- if valid_ps_name(oldname) then
- report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ end
+end
+local function mergesteps_4(lookup)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ local first=steps[1]
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target=first.coverage
+ for i=2,nofsteps do
+ for k,v in next,steps[i].coverage do
+ local tk=target[k]
+ if tk then
+ nested(v,tk)
else
- local newname=valid(what)
- if not newname then
- newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ target[k]=v
+ end
+ end
+ end
+ lookup.nofsteps=1
+ lookup.steps={ first }
+ return nofsteps-1
+end
+local function checkkerns(lookup)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ for i=1,nofsteps do
+ local step=steps[i]
+ if step.format=="pair" then
+ local coverage=step.coverage
+ local kerns=true
+ for g1,d1 in next,coverage do
+ if d1[1]~=0 or d1[2]~=0 or d1[4]~=0 then
+ kerns=false
+ break
end
- local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
- data.warnings[#data.warnings+1]=warning
- report_otf(warning)
- metadata[what]=newname
+ end
+ if kerns then
+ report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
+ for g1,d1 in next,coverage do
+ coverage[g1]=d1[3]
+ end
+ step.format="kern"
end
end
- check("fontname")
- check("fullname")
end
- if names then
- local psname=metadata.psname
- if not psname or psname=="" then
- for i=1,#names do
- local name=names[i]
- if lower(name.lang)=="english (us)" then
- local specification=name.names
- if specification then
- local postscriptname=specification.postscriptname
- if postscriptname then
- psname=postscriptname
+end
+local function checkpairs(lookup)
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ local kerned=0
+ for i=1,nofsteps do
+ local step=steps[i]
+ if step.format=="pair" then
+ local coverage=step.coverage
+ local kerns=true
+ for g1,d1 in next,coverage do
+ for g2,d2 in next,d1 do
+ if d2[2] then
+ kerns=false
+ break
+ else
+ local v=d2[1]
+ if v[1]~=0 or v[2]~=0 or v[4]~=0 then
+ kerns=false
+ break
end
end
end
- break
+ end
+ if kerns then
+ report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
+ for g1,d1 in next,coverage do
+ for g2,d2 in next,d1 do
+ d1[g2]=d2[1][3]
+ end
+ end
+ step.format="kern"
+ kerned=kerned+1
end
end
- if psname~=metadata.fontname then
- report_otf("fontname %a, fullname %a, psname %a",metadata.fontname,metadata.fullname,psname)
- end
- metadata.psname=psname
- end
- if state and table.contains(state,"bad_cmap_table") then
- report_otf("fontfile %a has bad cmap tables",filename)
end
+ return kerned
end
-actions["cleanup tables"]=function(data,filename,raw)
- local duplicates=data.resources.duplicates
- if duplicates then
- for k,v in next,duplicates do
- if #v==1 then
- duplicates[k]=v[1]
+function readers.compact(data)
+ if not data or data.compacted then
+ return
+ else
+ data.compacted=true
+ end
+ local resources=data.resources
+ local merged=0
+ local kerned=0
+ local allsteps=0
+ local function compact(what)
+ local lookups=resources[what]
+ if lookups then
+ for i=1,#lookups do
+ local lookup=lookups[i]
+ local nofsteps=lookup.nofsteps
+ allsteps=allsteps+nofsteps
+ if nofsteps>1 then
+ local kind=lookup.type
+ if kind=="gsub_single" or kind=="gsub_alternate" or kind=="gsub_multiple" then
+ merged=merged+mergesteps_1(lookup)
+ elseif kind=="gsub_ligature" then
+ merged=merged+mergesteps_4(lookup)
+ elseif kind=="gpos_single" then
+ merged=merged+mergesteps_1(lookup,true)
+ checkkerns(lookup)
+ elseif kind=="gpos_pair" then
+ merged=merged+mergesteps_2(lookup,true)
+ kerned=kerned+checkpairs(lookup)
+ elseif kind=="gpos_cursive" then
+ merged=merged+mergesteps_2(lookup)
+ elseif kind=="gpos_mark2mark" or kind=="gpos_mark2base" or kind=="gpos_mark2ligature" then
+ merged=merged+mergesteps_3(lookup)
+ end
+ end
end
+ else
+ report("no lookups in %a",what)
end
end
- data.resources.indices=nil
- data.resources.unicodes=nil
- data.helpers=nil
+ compact("sequences")
+ compact("sublookups")
+ if merged>0 then
+ report("%i steps of %i removed due to merging",merged,allsteps)
+ end
+ if kerned>0 then
+ report("%i steps of %i steps turned from pairs into kerns",kerned,allsteps)
+ end
end
-actions["reorganize glyph lookups"]=function(data,filename,raw)
+function readers.expand(data)
+ if not data or data.expanded then
+ return
+ else
+ data.expanded=true
+ end
local resources=data.resources
- local unicodes=resources.unicodes
+ local sublookups=resources.sublookups
+ local sequences=resources.sequences
+ local markclasses=resources.markclasses
local descriptions=data.descriptions
- local splitter=data.helpers.tounicodelist
- local lookuptypes=resources.lookuptypes
- for unicode,description in next,descriptions do
- local lookups=description.glyph.lookups
- if lookups then
- for tag,lookuplist in next,lookups do
- for l=1,#lookuplist do
- local lookup=lookuplist[l]
- local specification=lookup.specification
- local lookuptype=lookup.type
- local lt=lookuptypes[tag]
- if not lt then
- lookuptypes[tag]=lookuptype
- elseif lt~=lookuptype then
- report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
- end
- if lookuptype=="ligature" then
- lookuplist[l]={ lpegmatch(splitter,specification.components) }
- elseif lookuptype=="alternate" then
- lookuplist[l]={ lpegmatch(splitter,specification.components) }
- elseif lookuptype=="substitution" then
- lookuplist[l]=unicodes[specification.variant]
- elseif lookuptype=="multiple" then
- lookuplist[l]={ lpegmatch(splitter,specification.components) }
- elseif lookuptype=="position" then
- lookuplist[l]={
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype=="pair" then
- local one=specification.offsets[1]
- local two=specification.offsets[2]
- local paired=unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
- else
- lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
- end
- else
- if two then
- lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
- else
- lookuplist[l]={ paired }
- end
- end
- end
- end
+ if descriptions then
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(resources.filename)
+ for u,d in next,descriptions do
+ local bb=d.boundingbox
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
end
- local slookups,mlookups
- for tag,lookuplist in next,lookups do
- if #lookuplist==1 then
- if slookups then
- slookups[tag]=lookuplist[1]
- else
- slookups={ [tag]=lookuplist[1] }
- end
+ if bb then
+ local ht=bb[4]
+ local dp=-bb[2]
+ if ht==0 or ht<0 then
else
- if mlookups then
- mlookups[tag]=lookuplist
- else
- mlookups={ [tag]=lookuplist }
- end
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
end
- end
- if slookups then
- description.slookups=slookups
- end
- if mlookups then
- description.mlookups=mlookups
end
end
end
-end
-local zero={ 0,0 }
-actions["reorganize glyph anchors"]=function(data,filename,raw)
- local descriptions=data.descriptions
- for unicode,description in next,descriptions do
- local anchors=description.glyph.anchors
- if anchors then
- for class,data in next,anchors do
- if class=="baselig" then
- for tag,specification in next,data do
- local n=0
- for k,v in next,specification do
- if k>n then
- n=k
+ local function expandlookups(sequences)
+ if sequences then
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local steps=sequence.steps
+ if steps then
+ local kind=sequence.type
+ local markclass=sequence.markclass
+ if markclass then
+ if not markclasses then
+ report_warning("missing markclasses")
+ sequence.markclass=false
+ else
+ sequence.markclass=markclasses[markclass]
+ end
+ end
+ for i=1,sequence.nofsteps do
+ local step=steps[i]
+ local baseclasses=step.baseclasses
+ if baseclasses then
+ local coverage=step.coverage
+ for k,v in next,coverage do
+ v[1]=baseclasses[v[1]]
end
- local x,y=v.x,v.y
- if x or y then
- specification[k]={ x or 0,y or 0 }
- else
- specification[k]=zero
+ elseif kind=="gpos_cursive" then
+ local coverage=step.coverage
+ for k,v in next,coverage do
+ v[1]=coverage
end
end
- local t={}
- for i=1,n do
- t[i]=specification[i] or zero
- end
- data[tag]=t
- end
- else
- for tag,specification in next,data do
- local x,y=specification.x,specification.y
- if x or y then
- data[tag]={ x or 0,y or 0 }
- else
- data[tag]=zero
+ local rules=step.rules
+ if rules then
+ local rulehash={}
+ local rulesize=0
+ local coverage={}
+ local lookuptype=sequence.type
+ step.coverage=coverage
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements or false
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ local lookups=rule.lookups or false
+ local subtype=nil
+ if lookups then
+ for k,v in next,lookups do
+ local lookup=sublookups[v]
+ if lookup then
+ lookups[k]=lookup
+ if not subtype then
+ subtype=lookup.type
+ end
+ else
+ end
+ end
+ end
+ if sequence[1] then
+ rulesize=rulesize+1
+ rulehash[rulesize]={
+ nofrules,
+ lookuptype,
+ sequence,
+ start,
+ stop,
+ lookups,
+ replacements,
+ subtype,
+ }
+ for unic in next,sequence[start] do
+ local cu=coverage[unic]
+ if not cu then
+ coverage[unic]=rulehash
+ end
+ end
+ end
+ end
end
end
end
end
- description.anchors=anchors
end
end
+ expandlookups(sequences)
+ expandlookups(sublookups)
end
-local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1
-local uselessname=(1-bogusname)^0*bogusname
-actions["purge names"]=function(data,filename,raw)
- if purge_names then
- local n=0
- for u,d in next,data.descriptions do
- if lpegmatch(uselessname,d.name) then
- n=n+1
- d.name=nil
- end
- end
- if n>0 then
- report_otf("%s bogus names removed",n)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otl']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local gmatch,find,match,lower,strip=string.gmatch,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring,unpack=type,next,tonumber,tostring,unpack
+local abs=math.abs
+local ioflush=io.flush
+local derivetable=table.derive
+local formatters=string.formatters
+local setmetatableindex=table.setmetatableindex
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.version=3.016
+otf.cache=containers.define("fonts","otl",otf.version,true)
+local otfreaders=otf.readers
+local hashes=fonts.hashes
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local otffeatures=constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local forceload=false
+local cleanup=0
+local syncspace=true
+local forcenotdef=false
+local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
+local wildcard="*"
+local default="dflt"
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+local ordered_enhancers={
+ "check extra features",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
end
+ enhancer(data,filename,raw)
+ else
end
end
-actions["compact lookups"]=function(data,filename,raw)
- if not compact_lookups then
- report_otf("not compacting")
- return
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
end
- local last=0
- local tags=table.setmetatableindex({},
- function(t,k)
- last=last+1
- t[k]=last
- return last
- end
- )
- local descriptions=data.descriptions
- local resources=data.resources
- for u,d in next,descriptions do
- local slookups=d.slookups
- if type(slookups)=="table" then
- local s={}
- for k,v in next,slookups do
- s[tags[k]]=v
- end
- d.slookups=s
- end
- local mlookups=d.mlookups
- if type(mlookups)=="table" then
- local m={}
- for k,v in next,mlookups do
- m[tags[k]]=v
- end
- d.mlookups=m
- end
- local kerns=d.kerns
- if type(kerns)=="table" then
- local t={}
- for k,v in next,kerns do
- t[tags[k]]=v
- end
- d.kerns=t
- end
- end
- local lookups=data.lookups
- if lookups then
- local l={}
- for k,v in next,lookups do
- local rules=v.rules
- if rules then
- for i=1,#rules do
- local l=rules[i].lookups
- if type(l)=="table" then
- for i=1,#l do
- l[i]=tags[l[i]]
- end
- end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
end
end
- l[tags[k]]=v
end
- data.lookups=l
- end
- local lookups=resources.lookups
- if lookups then
- local l={}
- for k,v in next,lookups do
- local s=v.subtables
- if type(s)=="table" then
- for i=1,#s do
- s[i]=tags[s[i]]
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
end
end
- l[tags[k]]=v
end
- resources.lookups=l
+ ioflush()
end
- local sequences=resources.sequences
- if sequences then
- for i=1,#sequences do
- local s=sequences[i]
- local n=s.name
- if n then
- s.name=tags[n]
- end
- local t=s.subtables
- if type(t)=="table" then
- for i=1,#t do
- t[i]=tags[t[i]]
- end
- end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
end
end
- local lookuptypes=resources.lookuptypes
- if lookuptypes then
- local l={}
- for k,v in next,lookuptypes do
- l[tags[k]]=v
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,sub,featurefile)
+ local featurefile=nil
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
end
- resources.lookuptypes=l
end
- local anchor_to_lookup=resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor,lookups in next,anchor_to_lookup do
- local l={}
- for lookup,value in next,lookups do
- l[tags[lookup]]=value
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time or data.tableversion~=otfreaders.tableversion
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ starttiming(otfreaders)
+ data=otfreaders.loadfont(filename,sub or 1)
+ if data then
+ otfreaders.compact(data)
+ otfreaders.rehash(data,"unicodes")
+ otfreaders.addunicodetable(data)
+ otfreaders.extend(data)
+ otfreaders.pack(data)
+ report_otf("loading done")
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
end
- anchor_to_lookup[anchor]=l
+ stoptiming(otfreaders)
+ if elapsedtime then
+ report_otf("loading, optimizing, packing and caching time %s",elapsedtime(otfreaders))
+ end
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
end
end
- local lookup_to_anchor=resources.lookup_to_anchor
- if lookup_to_anchor then
- local l={}
- for lookup,value in next,lookup_to_anchor do
- l[tags[lookup]]=value
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
end
- resources.lookup_to_anchor=l
+ otfreaders.unpack(data)
+ otfreaders.expand(data)
+ otfreaders.addunicodetable(data)
+ enhancers.apply(data,filename,data)
+ constructors.addcoreunicodes(unicodes)
+ if applyruntimefixes then
+ applyruntimefixes(filename,data)
+ end
+ data.metadata.math=data.resources.mathconstants
end
- tags=table.swapped(tags)
- report_otf("%s lookup tags compacted",#tags)
- resources.lookuptags=tags
+ return data
end
function otf.setfeatures(tfmdata,features)
local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
@@ -9229,7 +15498,6 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata=data.metadata
- local warnings=data.warnings
local resources=data.resources
local properties=derivetable(data.properties)
local descriptions=derivetable(data.descriptions)
@@ -9237,14 +15505,13 @@ local function copytotfm(data,cache_id)
local characters={}
local parameters={}
local mathparameters={}
- local pfminfo=metadata.pfminfo or {}
local resources=data.resources
local unicodes=resources.unicodes
local spaceunits=500
local spacer="space"
- local designsize=metadata.designsize or metadata.design_size or 100
- local minsize=metadata.minsize or metadata.design_range_bottom or designsize
- local maxsize=metadata.maxsize or metadata.design_range_top or designsize
+ local designsize=metadata.designsize or 100
+ local minsize=metadata.minsize or designsize
+ local maxsize=metadata.maxsize or designsize
local mathspecs=metadata.math
if designsize==0 then
designsize=100
@@ -9256,7 +15523,7 @@ local function copytotfm(data,cache_id)
mathparameters[name]=value
end
end
- for unicode,_ in next,data.descriptions do
+ for unicode in next,data.descriptions do
characters[unicode]={}
end
if mathspecs then
@@ -9272,8 +15539,8 @@ local function copytotfm(data,cache_id)
local c=character
for i=1,#variants do
local un=variants[i]
- c.next=un
- c=characters[un]
+ c.next=un
+ c=characters[un]
end
c.horiz_variants=parts
elseif parts then
@@ -9286,20 +15553,20 @@ local function copytotfm(data,cache_id)
local c=character
for i=1,#variants do
local un=variants[i]
- c.next=un
- c=characters[un]
+ c.next=un
+ c=characters[un]
end
c.vert_variants=parts
elseif parts then
character.vert_variants=parts
end
if italic and italic~=0 then
- character.italic=italic
+ character.italic=italic
end
if vitalic and vitalic~=0 then
character.vert_italic=vitalic
end
- local accent=m.accent
+ local accent=m.accent
if accent then
character.accent=accent
end
@@ -9313,18 +15580,20 @@ local function copytotfm(data,cache_id)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname
local fullname=metadata.fullname or fontname
- local psname=metadata.psname or fontname or fullname
- local units=metadata.units or metadata.units_per_em or 1000
+ local psname=fontname or fullname
+ local units=metadata.units or 1000
if units==0 then
units=1000
metadata.units=1000
report_otf("changing %a units to %a",0,units)
end
- local monospaced=metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
- local charwidth=pfminfo.avgwidth
- local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ local monospaced=metadata.monospaced
+ local charwidth=metadata.averagewidth
+ local charxheight=metadata.xheight
local italicangle=metadata.italicangle
+ local hasitalics=metadata.hasitalics
properties.monospaced=monospaced
+ properties.hasitalics=hasitalics
parameters.italicangle=italicangle
parameters.charwidth=charwidth
parameters.charxheight=charxheight
@@ -9354,7 +15623,7 @@ local function copytotfm(data,cache_id)
spaceunits=tonumber(spaceunits) or 500
parameters.slant=0
parameters.space=spaceunits
- parameters.space_stretch=units/2
+ parameters.space_stretch=1*units/2
parameters.space_shrink=1*units/3
parameters.x_height=2*units/5
parameters.quad=units
@@ -9387,26 +15656,18 @@ local function copytotfm(data,cache_id)
parameters.designsize=(designsize/10)*65536
parameters.minsize=(minsize/10)*65536
parameters.maxsize=(maxsize/10)*65536
- parameters.ascender=abs(metadata.ascender or metadata.ascent or 0)
- parameters.descender=abs(metadata.descender or metadata.descent or 0)
+ parameters.ascender=abs(metadata.ascender or 0)
+ parameters.descender=abs(metadata.descender or 0)
parameters.units=units
properties.space=spacer
properties.encodingbytes=2
- properties.format=data.format or otf_format(filename) or formats.otf
+ properties.format=data.format or formats.otf
properties.noglyphnames=true
properties.filename=filename
properties.fontname=fontname
properties.fullname=fullname
properties.psname=psname
properties.name=filename or fullname
- if warnings and #warnings>0 then
- report_otf("warnings for font: %s",filename)
- report_otf()
- for i=1,#warnings do
- report_otf(" %s",warnings[i])
- end
- report_otf()
- end
return {
characters=characters,
descriptions=descriptions,
@@ -9415,7 +15676,6 @@ local function copytotfm(data,cache_id)
resources=resources,
properties=properties,
goodies=goodies,
- warnings=warnings,
}
end
end
@@ -9425,38 +15685,13 @@ local function otftotfm(specification)
if not tfmdata then
local name=specification.name
local sub=specification.sub
+ local subindex=specification.subindex
local filename=specification.filename
local features=specification.features.normal
local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
local descriptions=rawdata.descriptions
- local duplicates=rawdata.resources.duplicates
- if duplicates then
- local nofduplicates,nofduplicated=0,0
- for parent,list in next,duplicates do
- if type(list)=="table" then
- local n=#list
- for i=1,n do
- local unicode=list[i]
- if not descriptions[unicode] then
- descriptions[unicode]=descriptions[parent]
- nofduplicated=nofduplicated+1
- end
- end
- nofduplicates=nofduplicates+n
- else
- if not descriptions[list] then
- descriptions[list]=descriptions[parent]
- nofduplicated=nofduplicated+1
- end
- nofduplicates=nofduplicates+1
- end
- end
- if trace_otf and nofduplicated~=nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
- rawdata.lookuphash={}
+ rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
local features=constructors.checkedfeatures("otf",features)
@@ -9508,33 +15743,129 @@ registerotffeature {
}
}
function otf.collectlookups(rawdata,kind,script,language)
- local sequences=rawdata.resources.sequences
- if sequences then
- local featuremap,featurelist={},{}
- for s=1,#sequences do
- local sequence=sequences[s]
- local features=sequence.features
- features=features and features[kind]
- features=features and (features[script] or features[default] or features[wildcard])
- features=features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables=sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss=subtables[s]
- if not featuremap[s] then
- featuremap[ss]=true
- featurelist[#featurelist+1]=ss
+ if not kind then
+ return
+ end
+ if not script then
+ script=default
+ end
+ if not language then
+ language=default
+ end
+ local lookupcache=rawdata.lookupcache
+ if not lookupcache then
+ lookupcache={}
+ rawdata.lookupcache=lookupcache
+ end
+ local kindlookup=lookupcache[kind]
+ if not kindlookup then
+ kindlookup={}
+ lookupcache[kind]=kindlookup
+ end
+ local scriptlookup=kindlookup[script]
+ if not scriptlookup then
+ scriptlookup={}
+ kindlookup[script]=scriptlookup
+ end
+ local languagelookup=scriptlookup[language]
+ if not languagelookup then
+ local sequences=rawdata.resources.sequences
+ local featuremap={}
+ local featurelist={}
+ if sequences then
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ if features then
+ features=features[kind]
+ if features then
+ features=features[script] or features[wildcard]
+ if features then
+ features=features[language] or features[wildcard]
+ if features then
+ if not featuremap[sequence] then
+ featuremap[sequence]=true
+ featurelist[#featurelist+1]=sequence
+ end
+ end
end
end
end
end
+ if #featurelist==0 then
+ featuremap,featurelist=false,false
+ end
+ else
+ featuremap,featurelist=false,false
end
- if #featurelist>0 then
- return featuremap,featurelist
+ languagelookup={ featuremap,featurelist }
+ scriptlookup[language]=languagelookup
+ end
+ return unpack(languagelookup)
+end
+local function getgsub(tfmdata,k,kind,value)
+ local shared=tfmdata.shared
+ local rawdata=shared and shared.rawdata
+ if rawdata then
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local properties=tfmdata.properties
+ local validlookups,lookuplist=otf.collectlookups(rawdata,kind,properties.script,properties.language)
+ if validlookups then
+ local choice=tonumber(value) or 1
+ for i=1,#lookuplist do
+ local lookup=lookuplist[i]
+ local steps=lookup.steps
+ local nofsteps=lookup.nofsteps
+ for i=1,nofsteps do
+ local coverage=steps[i].coverage
+ if coverage then
+ local found=coverage[k]
+ if found then
+ return found,lookup.type
+ end
+ end
+ end
+ end
+ end
end
end
- return nil,nil
+end
+otf.getgsub=getgsub
+function otf.getsubstitution(tfmdata,k,kind,value)
+ local found,kind=getgsub(tfmdata,k,kind)
+ if not found then
+ elseif kind=="gsub_single" then
+ return found
+ elseif kind=="gsub_alternate" then
+ local choice=tonumber(value) or 1
+ return found[choice] or found[1] or k
+ end
+ return k
+end
+otf.getalternate=otf.getsubstitution
+function otf.getmultiple(tfmdata,k,kind)
+ local found,kind=getgsub(tfmdata,k,kind)
+ if found and kind=="gsub_multiple" then
+ return found
+ end
+ return { k }
+end
+function otf.getkern(tfmdata,left,right,kind)
+ local kerns=getgsub(tfmdata,left,kind or "kern",true)
+ if kerns then
+ local found=kerns[right]
+ local kind=type(found)
+ if kind=="table" then
+ found=found[1][3]
+ elseif kind~="number" then
+ found=false
+ end
+ if found then
+ return found*tfmdata.parameters.factor
+ end
+ end
+ return 0
end
local function check_otf(forced,specification,suffix)
local name=specification.name
@@ -9562,7 +15893,6 @@ readers.opentype=opentypereader
function readers.otf (specification) return opentypereader(specification,"otf") end
function readers.ttf (specification) return opentypereader(specification,"ttf") end
function readers.ttc (specification) return opentypereader(specification,"ttf") end
-function readers.dfont(specification) return opentypereader(specification,"ttf") end
function otf.scriptandlanguage(tfmdata,attr)
local properties=tfmdata.properties
return properties.script or "dflt",properties.language or "dflt"
@@ -9571,110 +15901,55 @@ local function justset(coverage,unicode,replacement)
coverage[unicode]=replacement
end
otf.coverup={
- stepkey="subtables",
+ stepkey="steps",
actions={
+ chainsubstitution=justset,
+ chainposition=justset,
substitution=justset,
alternate=justset,
multiple=justset,
- ligature=justset,
kern=justset,
- chainsubstitution=justset,
- chainposition=justset,
+ pair=justset,
+ ligature=function(coverage,unicode,ligature)
+ local first=ligature[1]
+ local tree=coverage[first]
+ if not tree then
+ tree={}
+ coverage[first]=tree
+ end
+ for i=2,#ligature do
+ local l=ligature[i]
+ local t=tree[l]
+ if not t then
+ t={}
+ tree[l]=t
+ end
+ tree=t
+ end
+ tree.ligature=unicode
+ end,
},
- register=function(coverage,lookuptype,format,feature,n,descriptions,resources)
- local name=formatters["ctx_%s_%s_%s"](feature,lookuptype,n)
- if lookuptype=="kern" then
- resources.lookuptypes[name]="position"
- else
- resources.lookuptypes[name]=lookuptype
- end
- for u,c in next,coverage do
- local description=descriptions[u]
- local slookups=description.slookups
- if slookups then
- slookups[name]=c
- else
- description.slookups={ [name]=c }
- end
- end
- return name
+ register=function(coverage,featuretype,format)
+ return {
+ format=format,
+ coverage=coverage,
+ }
end
}
-local function getgsub(tfmdata,k,kind)
- local description=tfmdata.descriptions[k]
- if description then
- local slookups=description.slookups
- if slookups then
- local shared=tfmdata.shared
- local rawdata=shared and shared.rawdata
- if rawdata then
- local lookuptypes=rawdata.resources.lookuptypes
- if lookuptypes then
- local properties=tfmdata.properties
- local validlookups,lookuplist=otf.collectlookups(rawdata,kind,properties.script,properties.language)
- if validlookups then
- for l=1,#lookuplist do
- local lookup=lookuplist[l]
- local found=slookups[lookup]
- if found then
- return found,lookuptypes[lookup]
- end
- end
- end
- end
- end
- end
- end
-end
-otf.getgsub=getgsub
-function otf.getsubstitution(tfmdata,k,kind,value)
- local found,kind=getgsub(tfmdata,k,kind)
- if not found then
- elseif kind=="substitution" then
- return found
- elseif kind=="alternate" then
- local choice=tonumber(value) or 1
- return found[choice] or found[1] or k
- end
- return k
-end
-otf.getalternate=otf.getsubstitution
-function otf.getmultiple(tfmdata,k,kind)
- local found,kind=getgsub(tfmdata,k,kind)
- if found and kind=="multiple" then
- return found
- end
- return { k }
-end
-function otf.getkern(tfmdata,left,right,kind)
- local kerns=getgsub(tfmdata,left,kind or "kern",true)
- if kerns then
- local found=kerns[right]
- local kind=type(found)
- if kind=="table" then
- found=found[1][3]
- elseif kind~="number" then
- found=false
- end
- if found then
- return found*tfmdata.parameters.factor
- end
- end
- return 0
-end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-otb']={
+if not modules then modules={} end modules ['font-oto']={
version=1.001,
comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local concat=table.concat
+local concat,unpack=table.concat,table.unpack
+local insert,remove=table.insert,table.remove
local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget
local lpegmatch=lpeg.match
@@ -9684,7 +15959,6 @@ local trace_singles=false trackers.register("otf.singles",function(v) trace_sing
local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
-local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
local report_prepare=logs.reporter("fonts","otf prepare")
@@ -9723,48 +15997,36 @@ local function gref(descriptions,n)
return "<error in base mode tracing>"
end
end
-local function cref(feature,lookuptags,lookupname)
- if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
- else
- return formatters["feature %a"](feature)
- end
+local function cref(feature,sequence)
+ return formatters["feature %a, type %a, chain lookup %a"](feature,sequence.type,sequence.name)
end
-local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
+local function report_alternate(feature,sequence,descriptions,unicode,replacement,value,comment)
report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookuptags,lookupname),
+ cref(feature,sequence),
gref(descriptions,unicode),
replacement and gref(descriptions,replacement),
value,
comment)
end
-local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
+local function report_substitution(feature,sequence,descriptions,unicode,substitution)
report_prepare("%s: base substitution %s => %S",
- cref(feature,lookuptags,lookupname),
+ cref(feature,sequence),
gref(descriptions,unicode),
gref(descriptions,substitution))
end
-local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
+local function report_ligature(feature,sequence,descriptions,unicode,ligature)
report_prepare("%s: base ligature %s => %S",
- cref(feature,lookuptags,lookupname),
+ cref(feature,sequence),
gref(descriptions,ligature),
gref(descriptions,unicode))
end
-local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
+local function report_kern(feature,sequence,descriptions,unicode,otherunicode,value)
report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookuptags,lookupname),
+ cref(feature,sequence),
gref(descriptions,unicode),
gref(descriptions,otherunicode),
value)
end
-local basemethods={}
-local basemethod="<unset>"
-local function applybasemethod(what,...)
- local m=basemethods[basemethod][what]
- if m then
- return m(...)
- end
-end
local basehash,basehashes,applied={},1,{}
local function registerbasehash(tfmdata)
local properties=tfmdata.properties
@@ -9782,239 +16044,6 @@ end
local function registerbasefeature(feature,value)
applied[#applied+1]=feature.."="..tostring(value)
end
-local trace=false
-local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures=#ligatures
- if nofligatures>0 then
- local characters=tfmdata.characters
- local descriptions=tfmdata.descriptions
- local resources=tfmdata.resources
- local unicodes=resources.unicodes
- local private=resources.private
- local alldone=false
- while not alldone do
- local done=0
- for i=1,nofligatures do
- local ligature=ligatures[i]
- if ligature then
- local unicode,lookupdata=ligature[1],ligature[2]
- if trace_ligatures_detail then
- report_prepare("building % a into %a",lookupdata,unicode)
- end
- local size=#lookupdata
- local firstcode=lookupdata[1]
- local firstdata=characters[firstcode]
- local okay=false
- if firstdata then
- local firstname="ctx_"..firstcode
- for i=1,size-1 do
- local firstdata=characters[firstcode]
- if not firstdata then
- firstcode=private
- if trace_ligatures_detail then
- report_prepare("defining %a as %a",firstname,firstcode)
- end
- unicodes[firstname]=firstcode
- firstdata={ intermediate=true,ligatures={} }
- characters[firstcode]=firstdata
- descriptions[firstcode]={ name=firstname }
- private=private+1
- end
- local target
- local secondcode=lookupdata[i+1]
- local secondname=firstname.."_"..secondcode
- if i==size-1 then
- target=unicode
- if not rawget(unicodes,secondname) then
- unicodes[secondname]=unicode
- end
- okay=true
- else
- target=rawget(unicodes,secondname)
- if not target then
- break
- end
- end
- if trace_ligatures_detail then
- report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs=firstdata.ligatures
- if firstligs then
- firstligs[secondcode]={ char=target }
- else
- firstdata.ligatures={ [secondcode]={ char=target } }
- end
- firstcode=target
- firstname=secondname
- end
- elseif trace_ligatures_detail then
- report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
- end
- if okay then
- ligatures[i]=false
- done=done+1
- end
- end
- end
- alldone=done==0
- end
- if trace_ligatures_detail then
- for k,v in table.sortedhash(characters) do
- if v.ligatures then
- table.print(v,k)
- end
- end
- end
- resources.private=private
- return true
- end
-end
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters=tfmdata.characters
- local descriptions=tfmdata.descriptions
- local resources=tfmdata.resources
- local properties=tfmdata.properties
- local changed=tfmdata.changed
- local lookuphash=resources.lookuphash
- local lookuptypes=resources.lookuptypes
- local lookuptags=resources.lookuptags
- local ligatures={}
- local alternate=tonumber(value) or true and 1
- local defaultalt=otf.defaultbasealternate
- local trace_singles=trace_baseinit and trace_singles
- local trace_alternatives=trace_baseinit and trace_alternatives
- local trace_ligatures=trace_baseinit and trace_ligatures
- local actions={
- substitution=function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode]=lookupdata
- end,
- alternate=function(lookupdata,lookuptags,lookupname,description,unicode)
- local replacement=lookupdata[alternate]
- if replacement then
- changed[unicode]=replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt=="first" then
- replacement=lookupdata[1]
- changed[unicode]=replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt=="last" then
- replacement=lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature=function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1]={ unicode,lookupdata }
- end,
- }
- for unicode,character in next,characters do
- local description=descriptions[unicode]
- local lookups=description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname=lookuplist[l]
- local lookupdata=lookups[lookupname]
- if lookupdata then
- local lookuptype=lookuptypes[lookupname]
- local action=actions[lookuptype]
- if action then
- action(lookupdata,lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- local lookups=description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname=lookuplist[l]
- local lookuplist=lookups[lookupname]
- if lookuplist then
- local lookuptype=lookuptypes[lookupname]
- local action=actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- end
- end
- properties.hasligatures=finalize_ligatures(tfmdata,ligatures)
-end
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters=tfmdata.characters
- local descriptions=tfmdata.descriptions
- local resources=tfmdata.resources
- local properties=tfmdata.properties
- local lookuptags=resources.lookuptags
- local sharedkerns={}
- local traceindeed=trace_baseinit and trace_kerns
- local haskerns=false
- for unicode,character in next,characters do
- local description=descriptions[unicode]
- local rawkerns=description.kerns
- if rawkerns then
- local s=sharedkerns[rawkerns]
- if s==false then
- elseif s then
- character.kerns=s
- else
- local newkerns=character.kerns
- local done=false
- for l=1,#lookuplist do
- local lookup=lookuplist[l]
- local kerns=rawkerns[lookup]
- if kerns then
- for otherunicode,value in next,kerns do
- if value==0 then
- elseif not newkerns then
- newkerns={ [otherunicode]=value }
- done=true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- elseif not newkerns[otherunicode] then
- newkerns[otherunicode]=value
- done=true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns]=newkerns
- character.kerns=newkerns
- haskerns=true
- else
- sharedkerns[rawkerns]=false
- end
- end
- end
- end
- properties.haskerns=haskerns
-end
-basemethods.independent={
- preparesubstitutions=preparesubstitutions,
- preparepositionings=preparepositionings,
-}
local function makefake(tfmdata,name,present)
local resources=tfmdata.resources
local private=resources.private
@@ -10035,13 +16064,13 @@ local function make_1(present,tree,name)
end
end
end
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done)
for k,v in next,tree do
if k=="ligature" then
local character=characters[preceding]
if not character then
if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",sequence.name,v,preceding)
end
character=makefake(tfmdata,name,present)
end
@@ -10052,9 +16081,9 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
character.ligatures={ [unicode]={ char=v } }
end
if done then
- local d=done[lookupname]
+ local d=done[name]
if not d then
- done[lookupname]={ "dummy",v }
+ done[name]={ "dummy",v }
else
d[#d+1]=v
end
@@ -10062,7 +16091,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
else
local code=present[name] or unicode
local name=name.."_"..k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
+ make_2(present,tfmdata,characters,v,name,code,k,done)
end
end
end
@@ -10071,52 +16100,63 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
local changed=tfmdata.changed
- local lookuphash=resources.lookuphash
- local lookuptypes=resources.lookuptypes
- local lookuptags=resources.lookuptags
local ligatures={}
local alternate=tonumber(value) or true and 1
local defaultalt=otf.defaultbasealternate
local trace_singles=trace_baseinit and trace_singles
local trace_alternatives=trace_baseinit and trace_alternatives
local trace_ligatures=trace_baseinit and trace_ligatures
- for l=1,#lookuplist do
- local lookupname=lookuplist[l]
- local lookupdata=lookuphash[lookupname]
- local lookuptype=lookuptypes[lookupname]
- for unicode,data in next,lookupdata do
- if lookuptype=="substitution" then
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
- end
- changed[unicode]=data
- elseif lookuptype=="alternate" then
- local replacement=data[alternate]
- if replacement then
- changed[unicode]=replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt=="first" then
- replacement=data[1]
- changed[unicode]=replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt=="last" then
- replacement=data[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ for i=1,#lookuplist do
+ local sequence=lookuplist[i]
+ local steps=sequence.steps
+ local kind=sequence.type
+ if kind=="gsub_single" then
+ for i=1,#steps do
+ for unicode,data in next,steps[i].coverage do
+ if not changed[unicode] then
+ if trace_singles then
+ report_substitution(feature,sequence,descriptions,unicode,data)
+ end
+ changed[unicode]=data
end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif kind=="gsub_alternate" then
+ for i=1,#steps do
+ for unicode,data in next,steps[i].coverage do
+ if not changed[unicode] then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
end
end
- elseif lookuptype=="ligature" then
- ligatures[#ligatures+1]={ unicode,data,lookupname }
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
+ end
+ elseif kind=="gsub_ligature" then
+ for i=1,#steps do
+ for unicode,data in next,steps[i].coverage do
+ ligatures[#ligatures+1]={ unicode,data,"" }
+ if trace_ligatures then
+ report_ligature(feature,sequence,descriptions,unicode,data)
+ end
end
end
end
@@ -10134,7 +16174,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for i=1,nofligatures do
local ligature=ligatures[i]
local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,sequence)
end
end
end
@@ -10143,30 +16183,62 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
local properties=tfmdata.properties
- local lookuphash=resources.lookuphash
- local lookuptags=resources.lookuptags
local traceindeed=trace_baseinit and trace_kerns
- for l=1,#lookuplist do
- local lookupname=lookuplist[l]
- local lookupdata=lookuphash[lookupname]
- for unicode,data in next,lookupdata do
- local character=characters[unicode]
- local kerns=character.kerns
- if not kerns then
- kerns={}
- character.kerns=kerns
- end
- if traceindeed then
- for otherunicode,kern in next,data do
- if not kerns[otherunicode] and kern~=0 then
- kerns[otherunicode]=kern
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
+ for i=1,#lookuplist do
+ local sequence=lookuplist[i]
+ local steps=sequence.steps
+ local kind=sequence.type
+ local format=sequence.format
+ if kind=="gpos_pair" then
+ for i=1,#steps do
+ local step=steps[i]
+ if step.format=="kern" then
+ for unicode,data in next,steps[i].coverage do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,sequence,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
end
- end
- else
- for otherunicode,kern in next,data do
- if not kerns[otherunicode] and kern~=0 then
- kerns[otherunicode]=kern
+ else
+ for unicode,data in next,steps[i].coverage do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ for otherunicode,kern in next,data do
+ if not kern[2] and not (kerns and kerns[otherunicode]) then
+ local kern=kern[1]
+ if kern[1]~=0 or kern[2]~=0 or kern[4]~=0 then
+ else
+ kern=kern[3]
+ if kern~=0 then
+ if kerns then
+ kerns[otherunicode]=kern
+ else
+ kerns={ [otherunicode]=kern }
+ character.kerns=kerns
+ end
+ if traceindeed then
+ report_kern(feature,sequence,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ end
+ end
+ end
end
end
end
@@ -10174,28 +16246,22 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
end
end
local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
end
-basemethods.shared={
- initializehashes=initializehashes,
- preparesubstitutions=preparesubstitutions,
- preparepositionings=preparepositionings,
-}
-basemethod="independent"
local function featuresinitializer(tfmdata,value)
if true then
local starttime=trace_preparing and os.clock()
local features=tfmdata.shared.features
local fullname=tfmdata.properties.fullname or "?"
if features then
- applybasemethod("initializehashes",tfmdata)
+ initializehashes(tfmdata)
local collectlookups=otf.collectlookups
local rawdata=tfmdata.shared.rawdata
local properties=tfmdata.properties
- local script=properties.script
- local language=properties.language
- local basesubstitutions=rawdata.resources.features.gsub
- local basepositionings=rawdata.resources.features.gpos
+ local script=properties.script
+ local language=properties.language
+ local rawfeatures=rawdata.resources.features
+ local basesubstitutions=rawfeatures and rawfeatures.gsub
+ local basepositionings=rawfeatures and rawfeatures.gpos
if basesubstitutions or basepositionings then
local sequences=tfmdata.resources.sequences
for s=1,#sequences do
@@ -10214,13 +16280,13 @@ local function featuresinitializer(tfmdata,value)
if trace_preparing then
report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
end
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
registerbasefeature(feature,value)
elseif basepositionings and basepositionings[feature] then
if trace_preparing then
report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
end
- applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
registerbasefeature(feature,value)
end
end
@@ -10244,17 +16310,12 @@ registerotffeature {
base=featuresinitializer,
}
}
-directives.register("fonts.otf.loader.basemethod",function(v)
- if basemethods[v] then
- basemethod=v
- end
-end)
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-inj']={
+if not modules then modules={} end modules ['font-otj']={
version=1.001,
comment="companion to font-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10265,17 +16326,30 @@ if not nodes.properties then return end
local next,rawget=next,rawget
local utfchar=utf.char
local fastcopy=table.fastcopy
-local trace_injections=false trackers.register("fonts.injections",function(v) trace_injections=v end)
+local registertracker=trackers.register
+local trace_injections=false registertracker("fonts.injections",function(v) trace_injections=v end)
+local trace_marks=false registertracker("fonts.injections.marks",function(v) trace_marks=v end)
+local trace_cursive=false registertracker("fonts.injections.cursive",function(v) trace_cursive=v end)
+local trace_spaces=false registertracker("otf.spaces",function(v) trace_spaces=v end)
+local use_advance=false directives.register("fonts.injections.advance",function(v) use_advance=v end)
local report_injections=logs.reporter("fonts","injections")
+local report_spaces=logs.reporter("fonts","spaces")
local attributes,nodes,node=attributes,nodes,node
fonts=fonts
-local fontdata=fonts.hashes.identifiers
+local hashes=fonts.hashes
+local fontdata=hashes.identifiers
+local parameters=fonts.hashes.parameters
+local resources=fonts.hashes.resources
nodes.injections=nodes.injections or {}
local injections=nodes.injections
+local tracers=nodes.tracers
+local setcolor=tracers and tracers.colors.set
+local resetcolor=tracers and tracers.colors.reset
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
local disc_code=nodecodes.disc
local kern_code=nodecodes.kern
+local glue_code=nodecodes.glue
local nuts=nodes.nuts
local nodepool=nuts.pool
local newkern=nodepool.kern
@@ -10289,7 +16363,12 @@ local getid=nuts.getid
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
+local getboth=nuts.getboth
+local ischar=nuts.is_char
+local getdisc=nuts.getdisc
+local setdisc=nuts.setdisc
local traverse_id=nuts.traverse_id
+local traverse_char=nuts.traverse_char
local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
local find_tail=nuts.tail
@@ -10324,7 +16403,7 @@ function injections.copy(target,source)
local sp=rawget(properties,source)
if sp then
local tp=rawget(properties,target)
- local si=rawget(sp,"injections")
+ local si=sp.injections
if si then
si=fastcopy(si)
if tp then
@@ -10351,7 +16430,7 @@ end
function injections.setligaindex(n,index)
local p=rawget(properties,n)
if p then
- local i=rawget(p,"injections")
+ local i=p.injections
if i then
i.ligaindex=index
else
@@ -10370,7 +16449,7 @@ end
function injections.getligaindex(n,default)
local p=rawget(properties,n)
if p then
- local i=rawget(p,"injections")
+ local i=p.injections
if i then
return i.ligaindex or default
end
@@ -10388,9 +16467,12 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
else
dx=dx-ws
end
+ if dx==0 then
+ dx=0
+ end
local p=rawget(properties,start)
if p then
- local i=rawget(p,"injections")
+ local i=p.injections
if i then
i.cursiveanchor=true
else
@@ -10407,7 +16489,7 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
end
local p=rawget(properties,nxt)
if p then
- local i=rawget(p,"injections")
+ local i=p.injections
if i then
i.cursivex=dx
i.cursivey=dy
@@ -10525,7 +16607,7 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk)
end
local p=rawget(properties,start)
if p then
- local i=rawget(p,"injections")
+ local i=p.injections
if i then
if i.markmark then
else
@@ -10629,10 +16711,9 @@ local function trace(head,where)
show(n,"preinjections",false,"<")
show(n,"postinjections",false,">")
show(n,"replaceinjections",false,"=")
+ show(n,"emptyinjections",false,"*")
elseif id==disc_code then
- local pre=getfield(n,"pre")
- local post=getfield(n,"post")
- local replace=getfield(n,"replace")
+ local pre,post,replace=getdisc(n)
if pre then
showsub(pre,"preinjections","pre")
end
@@ -10642,6 +16723,7 @@ local function trace(head,where)
if replace then
showsub(replace,"replaceinjections","replace")
end
+ show(n,"emptyinjections",false,"*")
end
n=getnext(n)
end
@@ -10666,421 +16748,372 @@ local function show_result(head)
current=getnext(current)
end
end
-local function collect_glyphs(head,offsets)
- local glyphs,glyphi,nofglyphs={},{},0
- local marks,marki,nofmarks={},{},0
- local nf,tm=nil,nil
- local n=head
- local function identify(n,what)
- local f=getfont(n)
- if f~=nf then
- nf=f
- tm=fontdata[nf].resources
- if tm then
- tm=tm.marks
- end
- end
- if tm and tm[getchar(n)] then
- nofmarks=nofmarks+1
- marks[nofmarks]=n
- marki[nofmarks]="injections"
- else
- nofglyphs=nofglyphs+1
- glyphs[nofglyphs]=n
- glyphi[nofglyphs]=what
- end
- if offsets then
- local p=rawget(properties,n)
- if p then
- local i=rawget(p,what)
- if i then
- local yoffset=i.yoffset
- if yoffset and yoffset~=0 then
- setfield(n,"yoffset",yoffset)
- end
- end
- end
- end
+local function inject_kerns_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
end
- while n do
- local id=getid(n)
+ local current=head
+ local prev=nil
+ local next=nil
+ local prevdisc=nil
+ local prevglyph=nil
+ local pre=nil
+ local post=nil
+ local replace=nil
+ local pretail=nil
+ local posttail=nil
+ local replacetail=nil
+ while current do
+ local id=getid(current)
+ local next=getnext(current)
if id==glyph_code then
- identify(n,"injections")
- elseif id==disc_code then
- local d=getfield(n,"pre")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- identify(n,"preinjections")
- end
- end
- end
- local d=getfield(n,"post")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- identify(n,"postinjections")
- end
- end
- end
- local d=getfield(n,"replace")
- if d then
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- identify(n,"replaceinjections")
- end
- end
- end
- end
- n=getnext(n)
- end
- return glyphs,glyphi,nofglyphs,marks,marki,nofmarks
-end
-local function inject_marks(marks,marki,nofmarks)
- for i=1,nofmarks do
- local n=marks[i]
- local pn=rawget(properties,n)
- if pn then
- local ni=marki[i]
- local pn=rawget(pn,ni)
- if pn then
- local p=pn.markbasenode
+ if getsubtype(current)<256 then
+ local p=rawget(properties,current)
if p then
- local px=getfield(p,"xoffset")
- local ox=0
- local rightkern=nil
- local pp=rawget(properties,p)
- if pp then
- pp=rawget(pp,ni)
- if pp then
- rightkern=pp.rightkern
+ local i=p.injections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(current,"xoffset",leftkern)
+ setfield(current,"xadvance",leftkern)
+ else
+ insert_node_before(head,current,newkern(leftkern))
+ end
end
end
- if rightkern then
- if pn.markdir<0 then
- ox=px-pn.markx-rightkern
+ if prevdisc then
+ local done=false
+ if post then
+ local i=p.postinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(post,"xadvance",leftkern)
+ else
+ insert_node_after(post,posttail,newkern(leftkern))
+ done=true
+ end
+ end
+ end
+ end
+ if replace then
+ local i=p.replaceinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(replace,"xadvance",leftkern)
+ else
+ insert_node_after(replace,replacetail,newkern(leftkern))
+ done=true
+ end
+ end
+ end
else
-
-
- if false then
- local leftkern=pp.leftkern
- if leftkern then
- ox=px-pn.markx-leftkern
- else
- ox=px-pn.markx
+ local i=p.emptyinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(prev,"replace",newkern(leftkern))
end
- else
- ox=px-pn.markx
end
end
- else
- ox=px-pn.markx
- local wn=getfield(n,"width")
- if wn~=0 then
- pn.leftkern=-wn/2
- pn.rightkern=-wn/2
+ if done then
+ setdisc(prevdisc,pre,post,replace)
end
end
- setfield(n,"xoffset",ox)
- local py=getfield(p,"yoffset")
- local oy=getfield(n,"yoffset")+py+pn.marky
- setfield(n,"yoffset",oy)
- else
end
end
- end
- end
-end
-local function inject_cursives(glyphs,glyphi,nofglyphs)
- local cursiveanchor,lastanchor=nil,nil
- local minc,maxc,last=0,0,nil
- for i=1,nofglyphs do
- local n=glyphs[i]
- local pn=rawget(properties,n)
- if pn then
- pn=rawget(pn,glyphi[i])
- end
- if pn then
- local cursivex=pn.cursivex
- if cursivex then
- if cursiveanchor then
- if cursivex~=0 then
- pn.leftkern=(pn.leftkern or 0)+cursivex
- end
- if lastanchor then
- if maxc==0 then
- minc=lastanchor
+ prevdisc=nil
+ prevglyph=current
+ elseif id==disc_code then
+ pre,post,replace,pretail,posttail,replacetail=getdisc(current,true)
+ local done=false
+ if pre then
+ for n in traverse_char(pre) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.preinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(pre,"xoffset",leftkern)
+ setfield(pre,"xadvance",leftkern)
+ else
+ pre=insert_node_before(pre,n,newkern(leftkern))
+ done=true
+ end
+ end
end
- maxc=lastanchor
- properties[cursiveanchor].cursivedy=pn.cursivey
end
- last=n
- else
- maxc=0
end
- elseif maxc>0 then
- local ny=getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti=glyphs[i]
- ny=ny+properties[ti].cursivedy
- setfield(ti,"yoffset",ny)
+ end
+ if post then
+ for n in traverse_char(post) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.postinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(post,"xoffset",leftkern)
+ setfield(post,"xadvance",leftkern)
+ else
+ post=insert_node_before(post,n,newkern(leftkern))
+ done=true
+ end
+ end
+ end
+ end
end
- maxc=0
end
- if pn.cursiveanchor then
- cursiveanchor=n
- lastanchor=i
- else
- cursiveanchor=nil
- lastanchor=nil
- if maxc>0 then
- local ny=getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti=glyphs[i]
- ny=ny+properties[ti].cursivedy
- setfield(ti,"yoffset",ny)
+ if replace then
+ for n in traverse_char(replace) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.replaceinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ if use_advance then
+ setfield(replace,"xoffset",leftkern)
+ setfield(replace,"xadvance",leftkern)
+ else
+ replace=insert_node_before(replace,n,newkern(leftkern))
+ done=true
+ end
+ end
+ end
end
- maxc=0
end
end
- elseif maxc>0 then
- local ny=getfield(n,"yoffset")
- for i=maxc,minc,-1 do
- local ti=glyphs[i]
- ny=ny+properties[ti].cursivedy
- setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ if done then
+ setdisc(current,pre,post,replace)
end
- maxc=0
- cursiveanchor=nil
- lastanchor=nil
- end
- end
- if last and maxc>0 then
- local ny=getfield(last,"yoffset")
- for i=maxc,minc,-1 do
- local ti=glyphs[i]
- ny=ny+properties[ti].cursivedy
- setfield(ti,"yoffset",ny)
- end
- end
-end
-local function inject_kerns(head,glist,ilist,length)
- for i=1,length do
- local n=glist[i]
- local pn=rawget(properties,n)
- if pn then
- local dp=nil
- local dr=nil
- local ni=ilist[i]
- local p=nil
- if ni=="injections" then
- p=getprev(n)
- if p then
- local id=getid(p)
- if id==disc_code then
- dp=getfield(p,"post")
- dr=getfield(p,"replace")
- end
- end
- end
- if dp then
- local i=rawget(pn,"postinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- local t=find_tail(dp)
- insert_node_after(dp,t,newkern(leftkern))
- setfield(p,"post",dp)
- end
- end
- end
- if dr then
- local i=rawget(pn,"replaceinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- local t=find_tail(dr)
- insert_node_after(dr,t,newkern(leftkern))
- setfield(p,"replace",dr)
- end
- end
- else
- local i=rawget(pn,ni)
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- insert_node_before(head,n,newkern(leftkern))
- end
- local rightkern=i.rightkern
- if rightkern and rightkern~=0 then
- insert_node_after(head,n,newkern(rightkern))
- end
- end
- end
- end
- end
-end
-local function inject_everything(head,where)
- head=tonut(head)
- if trace_injections then
- trace(head,"everything")
- end
- local glyphs,glyphi,nofglyphs,marks,marki,nofmarks=collect_glyphs(head,nofregisteredpairs>0)
- if nofglyphs>0 then
- if nofregisteredcursives>0 then
- inject_cursives(glyphs,glyphi,nofglyphs)
- end
- if nofregisteredmarks>0 then
- inject_marks(marks,marki,nofmarks)
+ prevglyph=nil
+ prevdisc=current
+ else
+ prevglyph=nil
+ prevdisc=nil
end
- inject_kerns(head,glyphs,glyphi,nofglyphs)
+ prev=current
+ current=next
end
- if nofmarks>0 then
- inject_kerns(head,marks,marki,nofmarks)
- end
if keepregisteredcounts then
keepregisteredcounts=false
else
nofregisteredkerns=0
- nofregisteredpairs=0
- nofregisteredmarks=0
- nofregisteredcursives=0
end
return tonode(head),true
end
-local function inject_kerns_only(head,where)
+local function inject_pairs_only(head,where)
head=tonut(head)
if trace_injections then
- trace(head,"kerns")
+ trace(head,"pairs")
end
- local n=head
- local p=nil
- while n do
- local id=getid(n)
+ local current=head
+ local prev=nil
+ local next=nil
+ local prevdisc=nil
+ local prevglyph=nil
+ local pre=nil
+ local post=nil
+ local replace=nil
+ local pretail=nil
+ local posttail=nil
+ local replacetail=nil
+ while current do
+ local id=getid(current)
+ local next=getnext(current)
if id==glyph_code then
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- if p then
- local d=getfield(p,"post")
- if d then
- local i=rawget(pn,"postinjections")
+ if getsubtype(current)<256 then
+ local p=rawget(properties,current)
+ if p then
+ local i=p.injections
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(current,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,current,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,current,newkern(rightkern))
+ end
+ else
+ local i=p.emptyinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ if next and getid(next)==disc_code then
+ if replace then
+ else
+ setfield(next,"replace",newkern(rightkern))
+ end
+ end
+ end
+ end
+ end
+ if prevdisc then
+ local done=false
+ if post then
+ local i=p.postinjections
if i then
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- local t=find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"post",d)
+ insert_node_after(post,posttail,newkern(leftkern))
+ done=true
end
end
end
- local d=getfield(p,"replace")
- if d then
- local i=rawget(pn,"replaceinjections")
+ if replace then
+ local i=p.replaceinjections
if i then
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- local t=find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"replace",d)
+ insert_node_after(replace,replacetail,newkern(leftkern))
+ done=true
end
end
else
- local i=rawget(pn,"injections")
+ local i=p.emptyinjections
if i then
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- setfield(p,"replace",newkern(leftkern))
+ setfield(prev,"replace",newkern(leftkern))
end
end
end
- else
- local i=rawget(pn,"injections")
+ if done then
+ setdisc(prevdisc,pre,post,replace)
+ end
+ end
+ end
+ end
+ prevdisc=nil
+ prevglyph=current
+ elseif id==disc_code then
+ pre,post,replace,pretail,posttail,replacetail=getdisc(current,true)
+ local done=false
+ if pre then
+ for n in traverse_char(pre) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.preinjections
if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- head=insert_node_before(head,n,newkern(leftkern))
+ pre=insert_node_before(pre,n,newkern(leftkern))
+ done=true
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(pre,n,newkern(rightkern))
+ done=true
end
end
end
end
end
- p=nil
- elseif id==disc_code then
- local d=getfield(n,"pre")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"preinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
+ if post then
+ for n in traverse_char(post) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.postinjections
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ post=insert_node_before(post,n,newkern(leftkern))
+ done=true
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(post,n,newkern(rightkern))
+ done=true
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"pre",h)
- end
end
- local d=getfield(n,"post")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"postinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
+ if replace then
+ for n in traverse_char(replace) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.replaceinjections
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ replace=insert_node_before(replace,n,newkern(leftkern))
+ done=true
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(replace,n,newkern(rightkern))
+ done=true
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"post",h)
- end
end
- local d=getfield(n,"replace")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"replaceinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
+ if prevglyph then
+ if pre then
+ local p=rawget(properties,prevglyph)
+ if p then
+ local i=p.preinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ pre=insert_node_before(pre,pre,newkern(rightkern))
+ done=true
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"replace",h)
+ if replace then
+ local p=rawget(properties,prevglyph)
+ if p then
+ local i=p.replaceinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ replace=insert_node_before(replace,replace,newkern(rightkern))
+ done=true
+ end
+ end
+ end
end
end
- p=n
+ if done then
+ setdisc(current,pre,post,replace)
+ end
+ prevglyph=nil
+ prevdisc=current
else
- p=nil
+ prevglyph=nil
+ prevdisc=nil
end
- n=getnext(n)
+ prev=current
+ current=next
end
if keepregisteredcounts then
keepregisteredcounts=false
@@ -11089,185 +17122,484 @@ local function inject_kerns_only(head,where)
end
return tonode(head),true
end
-local function inject_pairs_only(head,where)
+local function showoffset(n,flag)
+ local o=getfield(n,"xoffset")
+ if o==0 then
+ o=getfield(n,"yoffset")
+ end
+ if o~=0 then
+ setcolor(n,flag and "darkred" or "darkgreen")
+ else
+ resetcolor(n)
+ end
+end
+local function inject_everything(head,where)
head=tonut(head)
if trace_injections then
- trace(head,"pairs")
+ trace(head,"everything")
end
- local n=head
- local p=nil
- while n do
- local id=getid(n)
+ local hascursives=nofregisteredcursives>0
+ local hasmarks=nofregisteredmarks>0
+ local current=head
+ local last=nil
+ local font=font
+ local markdata=nil
+ local prev=nil
+ local next=nil
+ local prevdisc=nil
+ local prevglyph=nil
+ local pre=nil
+ local post=nil
+ local replace=nil
+ local pretail=nil
+ local posttail=nil
+ local replacetail=nil
+ local cursiveanchor=nil
+ local minc=0
+ local maxc=0
+ local glyphs={}
+ local marks={}
+ local nofmarks=0
+ local function processmark(p,n,pn)
+ local px=getfield(p,"xoffset")
+ local ox=0
+ local rightkern=nil
+ local pp=rawget(properties,p)
+ if pp then
+ pp=pp.injections
+ if pp then
+ rightkern=pp.rightkern
+ end
+ end
+ if rightkern then
+ if pn.markdir<0 then
+ ox=px-pn.markx-rightkern
+ else
+ if false then
+ local leftkern=pp.leftkern
+ if leftkern then
+ ox=px-pn.markx-leftkern
+ else
+ ox=px-pn.markx
+ end
+ else
+ ox=px-pn.markx
+ end
+ end
+ else
+ ox=px-pn.markx
+ local wn=getfield(n,"width")
+ if wn~=0 then
+ pn.leftkern=-wn/2
+ pn.rightkern=-wn/2
+ end
+ end
+ local oy=getfield(n,"yoffset")+getfield(p,"yoffset")+pn.marky
+ setfield(n,"xoffset",ox)
+ setfield(n,"yoffset",oy)
+ if trace_marks then
+ showoffset(n,true)
+ end
+ end
+ while current do
+ local id=getid(current)
+ local next=getnext(current)
if id==glyph_code then
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- if p then
- local d=getfield(p,"post")
- if d then
- local i=rawget(pn,"postinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- local t=find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"post",d)
+ if getsubtype(current)<256 then
+ local p=rawget(properties,current)
+ if p then
+ local i=p.injections
+ if i then
+ local pm=i.markbasenode
+ if pm then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=current
+ else
+ if hascursives then
+ local cursivex=i.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex~=0 then
+ i.leftkern=(i.leftkern or 0)+cursivex
+ end
+ if maxc==0 then
+ minc=1
+ maxc=1
+ glyphs[1]=cursiveanchor
+ else
+ maxc=maxc+1
+ glyphs[maxc]=cursiveanchor
+ end
+ properties[cursiveanchor].cursivedy=i.cursivey
+ last=current
+ else
+ maxc=0
+ end
+ elseif maxc>0 then
+ local ny=getfield(current,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ if trace_cursive then
+ showoffset(ti)
+ end
+ end
+ maxc=0
+ cursiveanchor=nil
+ end
+ if i.cursiveanchor then
+ cursiveanchor=current
+ else
+ if maxc>0 then
+ local ny=getfield(current,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ if trace_cursive then
+ showoffset(ti)
+ end
+ end
+ maxc=0
+ end
+ cursiveanchor=nil
+ end
+ end
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(current,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,current,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,current,newkern(rightkern))
+ end
+ end
+ else
+ local i=p.emptyinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ if next and getid(next)==disc_code then
+ if replace then
+ else
+ setfield(next,"replace",newkern(rightkern))
+ end
end
end
end
- local d=getfield(p,"replace")
- if d then
- local i=rawget(pn,"replaceinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- local t=find_tail(d)
- insert_node_after(d,t,newkern(leftkern))
- setfield(p,"replace",d)
+ end
+ if prevdisc then
+ if p then
+ local done=false
+ if post then
+ local i=p.postinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_after(post,posttail,newkern(leftkern))
+ done=true
+ end
end
end
- else
- local i=rawget(pn,"injections")
- if i then
+ if replace then
+ local i=p.replaceinjections
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_after(replace,replacetail,newkern(leftkern))
+ done=true
+ end
+ end
+ else
+ local i=p.emptyinjections
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- setfield(p,"replace",newkern(leftkern))
+ setfield(prev,"replace",newkern(leftkern))
end
end
+ if done then
+ setdisc(prevdisc,pre,post,replace)
+ end
end
- else
- local i=rawget(pn,"injections")
+ end
+ else
+ if hascursives and maxc>0 then
+ local ny=getfield(current,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ maxc=0
+ cursiveanchor=nil
+ end
+ end
+ end
+ prevdisc=nil
+ prevglyph=current
+ elseif id==disc_code then
+ pre,post,replace,pretail,posttail,replacetail=getdisc(current,true)
+ local done=false
+ if pre then
+ for n in traverse_char(pre) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.preinjections
if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
local leftkern=i.leftkern
if leftkern and leftkern~=0 then
- head=insert_node_before(head,n,newkern(leftkern))
+ pre=insert_node_before(pre,n,newkern(leftkern))
+ done=true
end
local rightkern=i.rightkern
if rightkern and rightkern~=0 then
- insert_node_after(head,n,newkern(rightkern))
- n=getnext(n)
+ insert_node_after(pre,n,newkern(rightkern))
+ done=true
+ end
+ end
+ if hasmarks then
+ local pm=i.markbasenode
+ if pm then
+ processmark(pm,current,i)
end
+ end
+ end
+ end
+ end
+ if post then
+ for n in traverse_char(post) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.postinjections
+ if i then
local yoffset=i.yoffset
if yoffset and yoffset~=0 then
setfield(n,"yoffset",yoffset)
end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ post=insert_node_before(post,n,newkern(leftkern))
+ done=true
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(post,n,newkern(rightkern))
+ done=true
+ end
+ end
+ if hasmarks then
+ local pm=i.markbasenode
+ if pm then
+ processmark(pm,current,i)
+ end
end
end
end
end
- p=nil
- elseif id==disc_code then
- local d=getfield(n,"pre")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"preinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern=i.rightkern
- if rightkern and rightkern~=0 then
- insert_node_after(head,n,newkern(rightkern))
- n=getnext(n)
- end
- local yoffset=i.yoffset
- if yoffset and yoffset~=0 then
- setfield(n,"yoffset",yoffset)
- end
+ if replace then
+ for n in traverse_char(replace) do
+ local p=rawget(properties,n)
+ if p then
+ local i=p.injections or p.replaceinjections
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ replace=insert_node_before(replace,n,newkern(leftkern))
+ done=true
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(replace,n,newkern(rightkern))
+ done=true
+ end
+ end
+ if hasmarks then
+ local pm=i.markbasenode
+ if pm then
+ processmark(pm,current,i)
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"pre",h)
- end
end
- local d=getfield(n,"post")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"postinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern=i.rightkern
- if rightkern and rightkern~=0 then
- insert_node_after(head,n,newkern(rightkern))
- n=getnext(n)
- end
- local yoffset=i.yoffset
- if yoffset and yoffset~=0 then
- setfield(n,"yoffset",yoffset)
- end
+ if prevglyph then
+ if pre then
+ local p=rawget(properties,prevglyph)
+ if p then
+ local i=p.preinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ pre=insert_node_before(pre,pre,newkern(rightkern))
+ done=true
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"post",h)
- end
- end
- local d=getfield(n,"replace")
- if d then
- local h=d
- for n in traverse_id(glyph_code,d) do
- if getsubtype(n)<256 then
- local pn=rawget(properties,n)
- if pn then
- local i=rawget(pn,"replaceinjections")
- if i then
- local leftkern=i.leftkern
- if leftkern and leftkern~=0 then
- h=insert_node_before(h,n,newkern(leftkern))
- end
- local rightkern=i.rightkern
- if rightkern and rightkern~=0 then
- insert_node_after(head,n,newkern(rightkern))
- n=getnext(n)
- end
- local yoffset=i.yoffset
- if yoffset and yoffset~=0 then
- setfield(n,"yoffset",yoffset)
- end
+ if replace then
+ local p=rawget(properties,prevglyph)
+ if p then
+ local i=p.replaceinjections
+ if i then
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ replace=insert_node_before(replace,replace,newkern(rightkern))
+ done=true
end
end
- else
- break
end
end
- if h~=d then
- setfield(n,"replace",h)
- end
end
- p=n
+ if done then
+ setdisc(current,pre,post,replace)
+ end
+ prevglyph=nil
+ prevdisc=current
else
- p=nil
+ prevglyph=nil
+ prevdisc=nil
end
- n=getnext(n)
+ prev=current
+ current=next
+ end
+ if hascursives and maxc>0 then
+ local ny=getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ if trace_cursive then
+ showoffset(ti)
+ end
+ end
+ end
+ if nofmarks>0 then
+ for i=1,nofmarks do
+ local m=marks[i]
+ local p=rawget(properties,m)
+ local i=p.injections
+ local b=i.markbasenode
+ processmark(b,m,i)
+ end
+ elseif hasmarks then
end
if keepregisteredcounts then
keepregisteredcounts=false
else
- nofregisteredpairs=0
nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
end
return tonode(head),true
end
+local triggers=false
+function nodes.injections.setspacekerns(font,sequence)
+ if triggers then
+ triggers[font]=sequence
+ else
+ triggers={ [font]=sequence }
+ end
+end
+local function injectspaces(head)
+ if not triggers then
+ return head,false
+ end
+ local lastfont=nil
+ local spacekerns=nil
+ local leftkerns=nil
+ local rightkerns=nil
+ local factor=0
+ local threshold=0
+ local leftkern=false
+ local rightkern=false
+ local function updatefont(font,trig)
+ leftkerns=trig.left
+ rightkerns=trig.right
+ local par=parameters[font]
+ factor=par.factor
+ threshold=par.spacing.width-1
+ lastfont=font
+ end
+ for n in traverse_id(glue_code,tonut(head)) do
+ local prev,next=getboth(n)
+ local prevchar=ischar(prev)
+ local nextchar=ischar(next)
+ if nextchar then
+ local font=getfont(next)
+ local trig=triggers[font]
+ if trig then
+ if lastfont~=font then
+ updatefont(font,trig)
+ end
+ if rightkerns then
+ rightkern=rightkerns[nextchar]
+ end
+ end
+ end
+ if prevchar then
+ local font=getfont(next)
+ local trig=triggers[font]
+ if trig then
+ if lastfont~=font then
+ updatefont(font,trig)
+ end
+ if leftkerns then
+ leftkern=leftkerns[prevchar]
+ end
+ end
+ end
+ if leftkern then
+ local old=getfield(n,"width")
+ if old>=threshold then
+ if rightkern then
+ local new=old+(leftkern+rightkern)*factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p] %C",prevchar,old,new,nextchar)
+ end
+ setfield(n,"width",new)
+ leftkern=false
+ else
+ local new=old+leftkern*factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p]",prevchar,old,new)
+ end
+ setfield(n,"width",new)
+ end
+ end
+ leftkern=false
+ elseif rightkern then
+ local old=getfield(n,"width")
+ if old>=threshold then
+ local new=old+rightkern*factor
+ if trace_spaces then
+ report_spaces("[%p -> %p] %C",nextchar,old,new)
+ end
+ setfield(n,"width",new)
+ end
+ rightkern=false
+ end
+ end
+ triggers=false
+ return head,true
+end
function injections.handler(head,where)
+ if triggers then
+ head=injectspaces(head)
+ end
if nofregisteredmarks>0 or nofregisteredcursives>0 then
return inject_everything(head,where)
elseif nofregisteredpairs>0 then
@@ -11283,7 +17615,7 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['luatex-fonts-ota']={
+if not modules then modules={} end modules ['font-ota']={
version=1.001,
comment="companion to font-otf.lua (analysing)",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -11306,21 +17638,22 @@ local tonut=nuts.tonut
local getfield=nuts.getfield
local getnext=nuts.getnext
local getprev=nuts.getprev
-local getid=nuts.getid
+local getprev=nuts.getprev
local getprop=nuts.getprop
local setprop=nuts.setprop
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
+local ischar=nuts.is_char
local traverse_id=nuts.traverse_id
local traverse_node_list=nuts.traverse
local end_of_math=nuts.end_of_math
local nodecodes=nodes.nodecodes
-local glyph_code=nodecodes.glyph
local disc_code=nodecodes.disc
local math_code=nodecodes.math
local fontdata=fonts.hashes.identifiers
local categories=characters and characters.categories or {}
+local chardata=characters and characters.data
local otffeatures=fonts.constructors.newfeatures("otf")
local registerotffeature=otffeatures.register
local s_init=1 local s_rphf=7
@@ -11332,7 +17665,10 @@ local s_rest=6
local states={
init=s_init,
medi=s_medi,
+ med2=s_medi,
fina=s_fina,
+ fin2=s_fina,
+ fin3=s_fina,
isol=s_isol,
mark=s_mark,
rest=s_rest,
@@ -11345,7 +17681,10 @@ local states={
local features={
init=s_init,
medi=s_medi,
+ med2=s_medi,
fina=s_fina,
+ fin2=s_fina,
+ fin3=s_fina,
isol=s_isol,
rphf=s_rphf,
half=s_half,
@@ -11363,10 +17702,9 @@ function analyzers.setstate(head,font)
local first,last,current,n,done=nil,nil,head,0,false
current=tonut(current)
while current do
- local id=getid(current)
- if id==glyph_code and getfont(current)==font then
+ local char,id=ischar(current,font)
+ if char and not getprop(current,a_state) then
done=true
- local char=getchar(current)
local d=descriptions[char]
if d then
if d.class=="mark" then
@@ -11390,6 +17728,16 @@ function analyzers.setstate(head,font)
end
first,last,n=nil,nil,0
end
+ elseif char==false then
+ if first and first==last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
elseif id==disc_code then
setprop(current,a_state,s_medi)
last=current
@@ -11454,107 +17802,6 @@ registerotffeature {
}
}
methods.latn=analyzers.setstate
-local tatweel=0x0640
-local zwnj=0x200C
-local zwj=0x200D
-local isolated={
- [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
- [0x0604]=true,
- [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
- [0x06DD]=true,
- [0x0856]=true,[0x0858]=true,[0x0857]=true,
- [0x07FA]=true,
- [zwnj]=true,
- [0x08AD]=true,
-}
-local final={
- [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
- [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
- [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
- [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
- [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
- [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
- [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
- [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
- [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
- [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
- [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
- [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
- [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
- [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
- [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
- [0x0778]=true,[0x0779]=true,
- [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
- [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
- [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
- [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
- [0x072C]=true,[0x071E]=true,
- [0x072F]=true,[0x074D]=true,
- [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
- [0x084F]=true,
- [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
-}
-local medial={
- [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
- [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
- [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
- [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
- [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
- [0x0641]=true,[0x0642]=true,[0x0643]=true,
- [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
- [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
- [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
- [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
- [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
- [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
- [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
- [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
- [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
- [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
- [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
- [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
- [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
- [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
- [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
- [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
- [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
- [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
- [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
- [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
- [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
- [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
- [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
- [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
- [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
- [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
- [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
- [0x077E]=true,[0x077F]=true,
- [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
- [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
- [0x08A7]=true,[0x08A3]=true,
- [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
- [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
- [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
- [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
- [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
- [0x074E]=true,[0x074F]=true,
- [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
- [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
- [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
- [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
- [0x0853]=true,
- [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
- [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
- [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
- [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
- [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
- [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
- [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
- [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
- [0x07E6]=true,
- [tatweel]=true,[zwj]=true,
- [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
-}
local arab_warned={}
local function warning(current,what)
local char=getchar(current)
@@ -11563,92 +17810,173 @@ local function warning(current,what)
arab_warned[char]=true
end
end
-local function finish(first,last)
- if last then
- if first==last then
- local fc=getchar(first)
- if medial[fc] or final[fc] then
- setprop(first,a_state,s_isol)
- else
- warning(first,"isol")
- setprop(first,a_state,s_error)
- end
- else
- local lc=getchar(last)
- if medial[lc] or final[lc] then
- setprop(last,a_state,s_fina)
- else
- warning(last,"fina")
- setprop(last,a_state,s_error)
+local mappers={
+ l=s_init,
+ d=s_medi,
+ c=s_medi,
+ r=s_fina,
+ u=s_isol,
+}
+local classifiers=characters.classifiers
+if not classifiers then
+ local first_arabic,last_arabic=characters.blockrange("arabic")
+ local first_syriac,last_syriac=characters.blockrange("syriac")
+ local first_mandiac,last_mandiac=characters.blockrange("mandiac")
+ local first_nko,last_nko=characters.blockrange("nko")
+ classifiers=table.setmetatableindex(function(t,k)
+ local c=chardata[k]
+ local v=false
+ if c then
+ local arabic=c.arabic
+ if arabic then
+ v=mappers[arabic]
+ if not v then
+ log.report("analyze","error in mapping arabic %C",k)
+ v=false
+ end
+ elseif k>=first_arabic and k<=last_arabic or k>=first_syriac and k<=last_syriac or
+ k>=first_mandiac and k<=last_mandiac or k>=first_nko and k<=last_nko then
+ if categories[k]=="mn" then
+ v=s_mark
+ else
+ v=s_rest
+ end
end
end
- first,last=nil,nil
- elseif first then
- local fc=getchar(first)
- if medial[fc] or final[fc] then
- setprop(first,a_state,s_isol)
- else
- warning(first,"isol")
- setprop(first,a_state,s_error)
- end
- first=nil
- end
- return first,last
+ t[k]=v
+ return v
+ end)
end
function methods.arab(head,font,attr)
- local useunicodemarks=analyzers.useunicodemarks
- local tfmdata=fontdata[font]
- local marks=tfmdata.resources.marks
- local first,last,current,done=nil,nil,head,false
+ local first,last=nil,nil
+ local c_first,c_last=nil,nil
+ local current,done=head,false
current=tonut(current)
while current do
- local id=getid(current)
- if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then
+ local char,id=ischar(current,font)
+ if char and not getprop(current,a_state) then
done=true
- local char=getchar(current)
- if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ local classifier=classifiers[char]
+ if not classifier then
+ if last then
+ if c_last==s_medi or c_last==s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first,last=nil,nil
+ elseif first then
+ if c_first==s_medi or c_first==s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
+ end
+ elseif classifier==s_mark then
setprop(current,a_state,s_mark)
- elseif isolated[char] then
- first,last=finish(first,last)
+ elseif classifier==s_isol then
+ if last then
+ if c_last==s_medi or c_last==s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first,last=nil,nil
+ elseif first then
+ if c_first==s_medi or c_first==s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
+ end
setprop(current,a_state,s_isol)
- first,last=nil,nil
- elseif not first then
- if medial[char] then
+ elseif classifier==s_medi then
+ if first then
+ last=current
+ c_last=classifier
+ setprop(current,a_state,s_medi)
+ else
setprop(current,a_state,s_init)
- first,last=first or current,current
- elseif final[char] then
- setprop(current,a_state,s_isol)
- first,last=nil,nil
- else
- first,last=finish(first,last)
+ first=current
+ c_first=classifier
end
- elseif medial[char] then
- first,last=first or current,current
- setprop(current,a_state,s_medi)
- elseif final[char] then
- if getprop(last,a_state)~=s_init then
- setprop(last,a_state,s_medi)
+ elseif classifier==s_fina then
+ if last then
+ if getprop(last,a_state)~=s_init then
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first,last=nil,nil
+ elseif first then
+ setprop(current,a_state,s_fina)
+ first=nil
+ else
+ setprop(current,a_state,s_isol)
end
- setprop(current,a_state,s_fina)
- first,last=nil,nil
- elseif char>=0x0600 and char<=0x06FF then
- setprop(current,a_state,s_rest)
- first,last=finish(first,last)
else
- first,last=finish(first,last)
+ setprop(current,a_state,s_rest)
+ if last then
+ if c_last==s_medi or c_last==s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first,last=nil,nil
+ elseif first then
+ if c_first==s_medi or c_first==s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
+ end
end
else
- if first or last then
- first,last=finish(first,last)
+ if last then
+ if c_last==s_medi or c_last==s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ first,last=nil,nil
+ elseif first then
+ if c_first==s_medi or c_first==s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first=nil
end
- if id==math_code then
+ if id==math_code then
current=end_of_math(current)
end
end
current=getnext(current)
end
- if first or last then
- finish(first,last)
+ if last then
+ if c_last==s_medi or c_last==s_fina then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ elseif first then
+ if c_first==s_medi or c_first==s_fina then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
end
return head,done
end
@@ -11663,7 +17991,7 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-otn']={
+if not modules then modules={} end modules ['font-ots']={
version=1.001,
comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -11673,6 +18001,7 @@ if not modules then modules={} end modules ['font-otn']={
local type,next,tonumber=type,next,tonumber
local random=math.random
local formatters=string.formatters
+local insert=table.insert
local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
local registertracker=trackers.register
local registerdirective=directives.register
@@ -11697,19 +18026,19 @@ local trace_directions=false registertracker("otf.directions",function(v) trace_
local trace_kernruns=false registertracker("otf.kernruns",function(v) trace_kernruns=v end)
local trace_discruns=false registertracker("otf.discruns",function(v) trace_discruns=v end)
local trace_compruns=false registertracker("otf.compruns",function(v) trace_compruns=v end)
+local trace_testruns=false registertracker("otf.testruns",function(v) trace_testruns=v end)
local quit_on_no_replacement=true
local zwnjruns=true
+local optimizekerns=true
registerdirective("otf.zwnjruns",function(v) zwnjruns=v end)
registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement=value end)
local report_direct=logs.reporter("fonts","otf direct")
local report_subchain=logs.reporter("fonts","otf subchain")
local report_chain=logs.reporter("fonts","otf chain")
local report_process=logs.reporter("fonts","otf process")
-local report_prepare=logs.reporter("fonts","otf prepare")
local report_warning=logs.reporter("fonts","otf warning")
local report_run=logs.reporter("fonts","otf run")
-registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+local report_check=logs.reporter("fonts","otf check")
registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
registertracker("otf.actions","otf.replacements,otf.positions")
@@ -11724,6 +18053,8 @@ local getnext=nuts.getnext
local setnext=nuts.setnext
local getprev=nuts.getprev
local setprev=nuts.setprev
+local getboth=nuts.getboth
+local setboth=nuts.setboth
local getid=nuts.getid
local getattr=nuts.getattr
local setattr=nuts.setattr
@@ -11734,6 +18065,10 @@ local getsubtype=nuts.getsubtype
local setsubtype=nuts.setsubtype
local getchar=nuts.getchar
local setchar=nuts.setchar
+local getdisc=nuts.getdisc
+local setdisc=nuts.setdisc
+local setlink=nuts.setlink
+local ischar=nuts.is_char
local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
local delete_node=nuts.delete
@@ -11764,7 +18099,6 @@ local discretionary_code=disccodes.discretionary
local ligature_code=glyphcodes.ligature
local privateattribute=attributes.private
local a_state=privateattribute('state')
-local a_cursbase=privateattribute('cursbase')
local injections=nodes.injections
local setmark=injections.setmark
local setcursive=injections.setcursive
@@ -11784,23 +18118,26 @@ otf.defaultnodealternate="none"
local tfmdata=false
local characters=false
local descriptions=false
-local resources=false
local marks=false
local currentfont=false
-local lookuptable=false
-local anchorlookups=false
-local lookuptypes=false
-local lookuptags=false
-local handlers={}
-local rlmode=0
-local featurevalue=false
-local sweephead={}
+local factor=0
+local threshold=0
local sweepnode=nil
local sweepprev=nil
local sweepnext=nil
+local sweephead={}
local notmatchpre={}
local notmatchpost={}
local notmatchreplace={}
+local handlers={}
+local function isspace(n)
+ if getid(n)==glue_code then
+ local w=getfield(n,"width")
+ if w>=threshold then
+ return 32
+ end
+ end
+end
local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
@@ -11840,21 +18177,26 @@ local function gref(n)
return "<error in node mode tracing>"
end
end
-local function cref(kind,chainname,chainlookupname,lookupname,index)
- if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
- elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
- elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
- elseif chainname then
- return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
+local function cref(dataset,sequence,index)
+ if not dataset then
+ return "no valid dataset"
+ elseif index then
+ return formatters["feature %a, type %a, chain lookup %a, index %a"](dataset[4],sequence.type,sequence.name,index)
else
- return formatters["feature %a"](kind)
+ return formatters["feature %a, type %a, chain lookup %a"](dataset[4],sequence.type,sequence.name)
end
end
-local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
+local function pref(dataset,sequence)
+ return formatters["feature %a, type %a, lookup %a"](dataset[4],sequence.type,sequence.name)
+end
+local function mref(rlmode)
+ if not rlmode or rlmode==0 then
+ return "---"
+ elseif rlmode==-1 or rlmode=="+TRT" then
+ return "r2l"
+ else
+ return "l2r"
+ end
end
local function copy_glyph(g)
local components=getfield(g,"components")
@@ -11871,16 +18213,14 @@ local function copy_glyph(g)
end
end
local function flattendisk(head,disc)
- local replace=getfield(disc,"replace")
+ local _,_,replace,_,_,replacetail=getdisc(disc,true)
setfield(disc,"replace",nil)
free_node(disc)
if head==disc then
local next=getnext(disc)
if replace then
if next then
- local tail=find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
+ setlink(replacetail,next)
end
return replace,replace
elseif next then
@@ -11889,47 +18229,36 @@ local function flattendisk(head,disc)
return
end
else
- local next=getnext(disc)
- local prev=getprev(disc)
+ local prev,next=getboth(disc)
if replace then
- local tail=find_node_tail(replace)
if next then
- setnext(tail,next)
- setprev(next,tail)
+ setlink(replacetail,next)
end
- setnext(prev,replace)
- setprev(replace,prev)
+ setlink(prev,replace)
return head,replace
else
- if next then
- setprev(next,prev)
- end
- setnext(prev,next)
+ setlink(prev,next)
return head,next
end
end
end
local function appenddisc(disc,list)
- local post=getfield(disc,"post")
- local replace=getfield(disc,"replace")
- local phead=list
- local rhead=copy_node_list(list)
- local ptail=find_node_tail(post)
- local rtail=find_node_tail(replace)
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(disc,true)
+ local posthead=list
+ local replacehead=copy_node_list(list)
if post then
- setnext(ptail,phead)
- setprev(phead,ptail)
+ setlink(posttail,posthead)
else
- setfield(disc,"post",phead)
+ post=phead
end
if replace then
- setnext(rtail,rhead)
- setprev(rhead,rtail)
+ setlink(replacetail,replacehead)
else
- setfield(disc,"replace",rhead)
+ replace=rhead
end
+ setdisc(disc,pre,post,replace)
end
-local function markstoligature(kind,lookupname,head,start,stop,char)
+local function markstoligature(head,start,stop,char)
if start==stop and getchar(start)==char then
return head,start
else
@@ -11945,14 +18274,8 @@ local function markstoligature(kind,lookupname,head,start,stop,char)
setchar(base,char)
setsubtype(base,ligature_code)
setfield(base,"components",start)
- if prev then
- setnext(prev,base)
- end
- if next then
- setprev(next,base)
- end
- setnext(base,next)
- setprev(base,prev)
+ setlink(prev,base)
+ setlink(base,next)
return head,base
end
end
@@ -11974,7 +18297,7 @@ local function getcomponentindex(start)
end
end
local a_noligature=attributes.private("noligature")
-local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+local function toligature(head,start,stop,char,dataset,sequence,markflag,discfound)
if getattr(start,a_noligature)==1 then
return head,start
end
@@ -12005,8 +18328,7 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
if next then
setprev(next,base)
end
- setprev(base,prev)
- setnext(base,next)
+ setboth(base,prev,next)
if not discfound then
local deletemarks=markflag~="mark"
local components=start
@@ -12022,65 +18344,57 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
elseif not deletemarks then
setligaindex(start,baseindex+getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ logwarning("%s: keep mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
end
local n=copy_node(start)
copyinjection(n,start)
head,current=insert_node_after(head,current,n)
elseif trace_marks then
- logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ logwarning("%s: delete mark %s",pref(dataset,sequence),gref(char))
end
start=getnext(start)
end
local start=getnext(current)
- while start and getid(start)==glyph_code do
- local char=getchar(start)
- if marks[char] then
- setligaindex(start,baseindex+getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ while start do
+ local char=ischar(start)
+ if char then
+ if marks[char] then
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
+ end
+ start=getnext(start)
+ else
+ break
end
else
break
end
- start=getnext(start)
end
else
- local discprev=getprev(discfound)
- local discnext=getnext(discfound)
+ local discprev,discnext=getboth(discfound)
if discprev and discnext then
- local pre=getfield(discfound,"pre")
- local post=getfield(discfound,"post")
- local replace=getfield(discfound,"replace")
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(discfound,true)
if not replace then
local prev=getprev(base)
local copied=copy_node_list(comp)
setprev(discnext,nil)
setnext(discprev,nil)
if pre then
- setnext(discprev,pre)
- setprev(pre,discprev)
+ setlink(discprev,pre)
end
pre=comp
if post then
- local tail=find_node_tail(post)
- setnext(tail,discnext)
- setprev(discnext,tail)
+ setlink(posttail,discnext)
setprev(post,nil)
else
post=discnext
end
- setnext(prev,discfound)
- setprev(discfound,prev)
- setnext(discfound,next)
- setprev(next,discfound)
- setnext(base,nil)
- setprev(base,nil)
+ setlink(prev,discfound)
+ setlink(discfound,next)
+ setboth(base,nil,nil)
setfield(base,"components",copied)
- setfield(discfound,"pre",pre)
- setfield(discfound,"post",post)
- setfield(discfound,"replace",base)
- setsubtype(discfound,discretionary_code)
+ setdisc(discfound,pre,post,base,discretionary_code)
base=prev
end
end
@@ -12098,12 +18412,7 @@ local function multiple_glyphs(head,start,multiple,ignoremarks)
local n=copy_node(start)
resetinjection(n)
setchar(n,multiple[k])
- setprev(n,start)
- setnext(n,sn)
- if sn then
- setprev(sn,n)
- end
- setnext(start,n)
+ insert_node_after(head,start,n)
start=n
end
end
@@ -12115,7 +18424,7 @@ local function multiple_glyphs(head,start,multiple,ignoremarks)
return head,start,false
end
end
-local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+local function get_alternative_glyph(start,alternatives,value)
local n=#alternatives
if value=="random" then
local r=random(1,n)
@@ -12124,70 +18433,73 @@ local function get_alternative_glyph(start,alternatives,value,trace_alternatives
return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
elseif value=="last" then
return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
- else
- value=tonumber(value)
- if type(value)~="number" then
- return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif value>n then
- local defaultalt=otf.defaultnodealternate
- if defaultalt=="first" then
- return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif defaultalt=="last" then
- return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
- else
- return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
- end
- elseif value==0 then
- return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
- elseif value<1 then
- return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ end
+ value=value==true and 1 or tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ end
+ if value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
else
- return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
end
+ elseif value==0 then
+ return getchar(start),trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
end
end
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
+function handlers.gsub_single(head,start,dataset,sequence,replacement)
if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ logprocess("%s: replacing %s by single %s",pref(dataset,sequence),gref(getchar(start)),gref(replacement))
end
resetinjection(start)
setchar(start,replacement)
return head,start,true
end
-function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
- local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+function handlers.gsub_alternate(head,start,dataset,sequence,alternative)
+ local kind=dataset[4]
+ local what=dataset[1]
+ local value=what==true and tfmdata.shared.features[kind] or what
+ local choice,comment=get_alternative_glyph(start,alternative,value)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(dataset,sequence),gref(getchar(start)),gref(choice),comment)
end
resetinjection(start)
setchar(start,choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ logwarning("%s: no variant %a for %s, %s",pref(dataset,sequence),value,gref(getchar(start)),comment)
end
end
return head,start,true
end
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+function handlers.gsub_multiple(head,start,dataset,sequence,multiple)
if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ logprocess("%s: replacing %s by multiple %s",pref(dataset,sequence),gref(getchar(start)),gref(multiple))
end
return multiple_glyphs(head,start,multiple,sequence.flags[1])
end
-function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s,stop=getnext(start),nil
+function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
+ local current=getnext(start)
+ local stop=nil
local startchar=getchar(start)
if marks[startchar] then
- while s do
- local id=getid(s)
- if id==glyph_code and getfont(s)==currentfont and getsubtype(s)<256 then
- local lg=ligature[getchar(s)]
+ while current do
+ local char=ischar(current,currentfont)
+ if char then
+ local lg=ligature[char]
if lg then
- stop=s
+ stop=current
ligature=lg
- s=getnext(s)
+ current=getnext(current)
else
break
end
@@ -12200,10 +18512,10 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
if lig then
if trace_ligatures then
local stopchar=getchar(stop)
- head,start=markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ head,start=markstoligature(head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(dataset,sequence),gref(startchar),gref(stopchar),gref(getchar(start)))
else
- head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ head,start=markstoligature(head,start,stop,lig)
end
return head,start,true,false
else
@@ -12213,52 +18525,49 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
local skipmark=sequence.flags[1]
local discfound=false
local lastdisc=nil
- while s do
- local id=getid(s)
- if id==glyph_code and getsubtype(s)<256 then
- if getfont(s)==currentfont then
- local char=getchar(s)
- if skipmark and marks[char] then
- s=getnext(s)
- else
- local lg=ligature[char]
- if lg then
- if not discfound and lastdisc then
- discfound=lastdisc
- lastdisc=nil
- end
- stop=s
- ligature=lg
- s=getnext(s)
- else
- break
+ while current do
+ local char,id=ischar(current,currentfont)
+ if char then
+ if skipmark and marks[char] then
+ current=getnext(current)
+ else
+ local lg=ligature[char]
+ if lg then
+ if not discfound and lastdisc then
+ discfound=lastdisc
+ lastdisc=nil
end
+ stop=current
+ ligature=lg
+ current=getnext(current)
+ else
+ break
end
- else
- break
end
+ elseif char==false then
+ break
elseif id==disc_code then
- lastdisc=s
- s=getnext(s)
+ lastdisc=current
+ current=getnext(current)
else
break
end
end
- local lig=ligature.ligature
+ local lig=ligature.ligature
if lig then
if stop then
if trace_ligatures then
local stopchar=getchar(stop)
- head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ head,start=toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(dataset,sequence),gref(startchar),gref(stopchar),gref(lig))
else
- head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ head,start=toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
end
else
resetinjection(start)
setchar(start,lig)
if trace_ligatures then
- logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(dataset,sequence),gref(startchar),gref(lig))
end
end
return head,start,true,discfound
@@ -12267,190 +18576,197 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
end
return head,start,false,discfound
end
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
+function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
local startchar=getchar(start)
- local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection)
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ if step.format=="pair" then
+ local dx,dy,w,h=setpair(start,factor,rlmode,sequence.flags[4],kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),dx,dy,w,h)
+ end
+ else
+ local k=setkern(start,factor,rlmode,kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %p",pref(dataset,sequence),gref(startchar),k)
+ end
end
return head,start,false
end
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
+function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
local snext=getnext(start)
if not snext then
return head,start,false
else
local prev=start
local done=false
- local factor=tfmdata.parameters.factor
- local lookuptype=lookuptypes[lookupname]
- while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
- local nextchar=getchar(snext)
- local krn=kerns[nextchar]
- if not krn and marks[nextchar] then
- prev=snext
- snext=getnext(snext)
- else
- if not krn then
- elseif type(krn)=="table" then
- if lookuptype=="pair" then
- local a,b=krn[2],krn[3]
- if a and #a>0 then
- local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,injection)
+ while snext do
+ local nextchar=ischar(snext,currentfont)
+ if nextchar then
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ elseif not krn then
+ break
+ elseif step.format=="pair" then
+ local a,b=krn[1],krn[2]
+ if optimizekerns then
+ if not b and a[1]==0 and a[2]==0 and a[4]==0 then
+ local k=setkern(snext,factor,rlmode,a[3],injection)
if trace_kerns then
- local startchar=getchar(start)
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: shifting single %s by %p",pref(dataset,sequence),gref(nextchar),k)
end
+ done=true
+ break
end
- if b and #b>0 then
- local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,injection)
- if trace_kerns then
- local startchar=getchar(start)
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
+ end
+ if a and #a>0 then
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,injection)
+ if trace_kerns then
+ local startchar=getchar(start)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
+ end
+ end
+ if b and #b>0 then
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,injection)
+ if trace_kerns then
+ local startchar=getchar(snext)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
end
- else
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
end
done=true
+ break
elseif krn~=0 then
local k=setkern(snext,factor,rlmode,krn,injection)
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: inserting kern %p between %s and %s as %s",pref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar),injection or "injections")
end
done=true
+ break
+ else
+ break
end
+ else
break
end
end
return head,start,done
end
end
-function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
local markchar=getchar(start)
if marks[markchar] then
local base=getprev(start)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- if marks[basechar] then
- while true do
- base=getprev(base)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- basechar=getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head,start,false
- end
- end
- end
- local baseanchors=descriptions[basechar]
- if baseanchors then
- baseanchors=baseanchors.anchors
- end
- if baseanchors then
- local baseanchors=baseanchors['basechar']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if marks[basechar] then
+ while base do
+ base=getprev(base)
+ if base then
+ basechar=ischar(base,currentfont)
+ if basechar then
+ if not marks[basechar] then
+ break
end
- return head,start,true
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),1)
+ end
+ return head,start,false
end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),2)
+ end
+ return head,start,false
end
end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head,start,true
end
elseif trace_bugs then
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ logwarning("%s: nothing preceding, case %i",pref(dataset,sequence),1)
end
elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ logwarning("%s: nothing preceding, case %i",pref(dataset,sequence),2)
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlmode)
local markchar=getchar(start)
if marks[markchar] then
local base=getprev(start)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- if marks[basechar] then
- while true do
- base=getprev(base)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- basechar=getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head,start,false
- end
- end
- end
- local index=getligaindex(start)
- local baseanchors=descriptions[basechar]
- if baseanchors then
- baseanchors=baseanchors.anchors
- if baseanchors then
- local baseanchors=baseanchors['baselig']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- ba=ba[index]
- if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head,start,true
- else
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
- end
- end
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if marks[basechar] then
+ while base do
+ base=getprev(base)
+ if base then
+ basechar=ischar(base,currentfont)
+ if basechar then
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),1)
end
+ return head,start,false
end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),2)
+ end
+ return head,start,false
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ if ma then
+ local index=getligaindex(start)
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(dataset,sequence),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(dataset,sequence),gref(markchar),gref(basechar),index)
+ end
end
end
+ elseif trace_bugs then
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
end
elseif trace_bugs then
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ logwarning("%s: prev node is no char, case %i",pref(dataset,sequence),1)
end
elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ logwarning("%s: prev node is no char, case %i",pref(dataset,sequence),2)
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
local markchar=getchar(start)
if marks[markchar] then
local base=getprev(start)
@@ -12465,96 +18781,61 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
end
end
end
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- local baseanchors=descriptions[basechar]
- if baseanchors then
- baseanchors=baseanchors.anchors
- if baseanchors then
- baseanchors=baseanchors['basemark']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head,start,true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head,start,true
end
- elseif trace_bugs then
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ logwarning("%s: mark %s is no mark",pref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
- local alreadydone=cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local done=false
- local startchar=getchar(start)
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt=getnext(start)
- while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
- local nextchar=getchar(nxt)
- if marks[nextchar] then
- nxt=getnext(nxt)
- else
- local entryanchors=descriptions[nextchar]
- if entryanchors then
- entryanchors=entryanchors.anchors
- if entryanchors then
- entryanchors=entryanchors['centry']
- if entryanchors then
- local al=anchorlookups[lookupname]
- for anchor,entry in next,entryanchors do
- if al[anchor] then
- local exit=exitanchors[anchor]
- if exit then
- local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done=true
- break
- end
- end
- end
+function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,step,i)
+ local done=false
+ local startchar=getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt do
+ local nextchar=ischar(nxt,currentfont)
+ if not nextchar then
+ break
+ elseif marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local exit=exitanchors[3]
+ if exit then
+ local entry=exitanchors[1][nextchar]
+ if entry then
+ entry=entry[2]
+ if entry then
+ local dx,dy,bound=setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
end
+ done=true
end
- elseif trace_bugs then
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
- break
end
+ break
end
end
- return head,start,done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head,start,false
end
+ return head,start,done
end
local chainprocs={}
local function logprocess(...)
@@ -12571,16 +18852,12 @@ local function logprocess(...)
report_chain(...)
end
local logwarning=report_chain
-function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head,start,false
-end
-function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+local function reversesub(head,start,stop,dataset,sequence,replacements,rlmode)
local char=getchar(start)
local replacement=replacements[char]
if replacement then
if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ logprocess("%s: single reverse replacement of %s by %s",cref(dataset,sequence),gref(char),gref(replacement))
end
resetinjection(start)
setchar(start,replacement)
@@ -12589,36 +18866,35 @@ function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,loo
return head,start,false
end
end
-function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local current=start
- local subtables=currentlookup.subtables
- if #subtables>1 then
- logwarning("todo: check if we need to loop over the replacements: % t",subtables)
+chainprocs.reversesub=reversesub
+local function reportmoresteps(dataset,sequence)
+ logwarning("%s: more than 1 step",cref(dataset,sequence))
+end
+function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,chainindex)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
end
+ local current=start
while current do
- if getid(current)==glyph_code then
- local currentchar=getchar(current)
- local lookupname=subtables[1]
- local replacement=lookuphash[lookupname]
- if not replacement then
+ local currentchar=ischar(current)
+ if currentchar then
+ local replacement=steps[1].coverage[currentchar]
+ if not replacement or replacement=="" then
if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ logwarning("%s: no single for %s",cref(dataset,sequence,chainindex),gref(currentchar))
end
else
- replacement=replacement[currentchar]
- if not replacement or replacement=="" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- resetinjection(current)
- setchar(current,replacement)
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(dataset,sequence,chainindex),gref(currentchar),gref(replacement))
end
+ resetinjection(current)
+ setchar(current,replacement)
end
return head,start,true
+ elseif currentchar==false then
+ break
elseif current==stop then
break
else
@@ -12627,63 +18903,57 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
end
return head,start,false
end
-function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local startchar=getchar(start)
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local replacements=lookuphash[lookupname]
- if not replacements then
+ local replacement=steps[1].coverage[startchar]
+ if not replacement or replacement=="" then
if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ logwarning("%s: no multiple for %s",cref(dataset,sequence),gref(startchar))
end
else
- replacements=replacements[startchar]
- if not replacements or replacement=="" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(dataset,sequence),gref(startchar),gref(replacement))
end
+ return multiple_glyphs(head,start,replacement,currentlookup.flags[1])
end
return head,start,false
end
-function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlookup)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local kind=dataset[4]
+ local what=dataset[1]
+ local value=what==true and tfmdata.shared.features[kind] or what
local current=start
- local subtables=currentlookup.subtables
- local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
while current do
- if getid(current)==glyph_code then
- local currentchar=getchar(current)
- local lookupname=subtables[1]
- local alternatives=lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives=alternatives[currentchar]
- if alternatives then
- local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
- end
- resetinjection(start)
- setchar(start,choice)
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
- end
+ local currentchar=ischar(current)
+ if currentchar then
+ local alternatives=steps[1].coverage[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(dataset,sequence),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setchar(start,choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(dataset,sequence),value,gref(char),comment)
end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
end
end
return head,start,true
+ elseif currentchar==false then
+ break
elseif current==stop then
break
else
@@ -12692,295 +18962,311 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
end
return head,start,false
end
-function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup,chainindex)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local startchar=getchar(start)
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local ligatures=lookuphash[lookupname]
+ local ligatures=steps[1].coverage[startchar]
if not ligatures then
if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ logwarning("%s: no ligatures starting with %s",cref(dataset,sequence,chainindex),gref(startchar))
end
else
- ligatures=ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s=getnext(start)
- local discfound=false
- local last=stop
- local nofreplacements=1
- local skipmark=currentlookup.flags[1]
- while s do
- local id=getid(s)
- if id==disc_code then
- if not discfound then
- discfound=s
- end
- if s==stop then
- break
- else
- s=getnext(s)
- end
+ local current=getnext(start)
+ local discfound=false
+ local last=stop
+ local nofreplacements=1
+ local skipmark=currentlookup.flags[1]
+ while current do
+ local id=getid(current)
+ if id==disc_code then
+ if not discfound then
+ discfound=current
+ end
+ if current==stop then
+ break
else
- local schar=getchar(s)
- if skipmark and marks[schar] then
- s=getnext(s)
- else
- local lg=ligatures[schar]
- if lg then
- ligatures,last,nofreplacements=lg,s,nofreplacements+1
- if s==stop then
- break
- else
- s=getnext(s)
- end
- else
+ current=getnext(current)
+ end
+ else
+ local schar=getchar(current)
+ if skipmark and marks[schar] then
+ current=getnext(current)
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures=lg
+ last=current
+ nofreplacements=nofreplacements+1
+ if current==stop then
break
+ else
+ current=getnext(current)
end
- end
- end
- end
- local l2=ligatures.ligature
- if l2 then
- if chainindex then
- stop=last
- end
- if trace_ligatures then
- if start==stop then
- logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ break
end
end
- head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head,start,true,nofreplacements,discfound
- elseif trace_bugs then
+ end
+ end
+ local ligature=ligatures.ligature
+ if ligature then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
if start==stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(dataset,sequence,chainindex),gref(startchar),gref(ligature))
else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(dataset,sequence,chainindex),gref(startchar),gref(getchar(stop)),gref(ligature))
end
end
+ head,start=toligature(head,start,stop,ligature,dataset,sequence,skipmark,discfound)
+ return head,start,true,nofreplacements,discfound
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(dataset,sequence,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(dataset,sequence,chainindex),gref(startchar),gref(getchar(stop)))
+ end
end
end
return head,start,false,0,false
end
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+function chainprocs.gpos_single(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local startchar=getchar(start)
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local kerns=lookuphash[lookupname]
- if kerns then
- kerns=kerns[startchar]
- if kerns then
- local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns)
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
+ local step=steps[1]
+ local kerns=step.coverage[startchar]
+ if not kerns then
+ elseif step.format=="pair" then
+ local dx,dy,w,h=setpair(start,factor,rlmode,sequence.flags[4],kerns)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),dx,dy,w,h)
+ end
+ else
+ local k=setkern(start,factor,rlmode,kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %p",cref(dataset,sequence),gref(startchar),k)
end
end
return head,start,false
end
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local snext=getnext(start)
if snext then
local startchar=getchar(start)
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local kerns=lookuphash[lookupname]
+ local step=steps[1]
+ local kerns=step.coverage[startchar]
if kerns then
- kerns=kerns[startchar]
- if kerns then
- local lookuptype=lookuptypes[lookupname]
- local prev,done=start,false
- local factor=tfmdata.parameters.factor
- while snext and getid(snext)==glyph_code and getfont(snext)==currentfont and getsubtype(snext)<256 do
- local nextchar=getchar(snext)
- local krn=kerns[nextchar]
- if not krn and marks[nextchar] then
- prev=snext
- snext=getnext(snext)
- else
- if not krn then
- elseif type(krn)=="table" then
- if lookuptype=="pair" then
- local a,b=krn[2],krn[3]
- if a and #a>0 then
- local startchar=getchar(start)
- local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a)
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b>0 then
- local startchar=getchar(start)
- local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b)
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- end
- done=true
- elseif krn~=0 then
- local k=setkern(snext,factor,rlmode,krn)
+ local prev=start
+ local done=false
+ while snext do
+ local nextchar=ischar(snext,currentfont)
+ if not nextchar then
+ break
+ end
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=getnext(snext)
+ elseif not krn then
+ break
+ elseif step.format=="pair" then
+ local a,b=krn[1],krn[2]
+ if optimizekerns then
+ if not b and a[1]==0 and a[2]==0 and a[4]==0 then
+ local k=setkern(snext,factor,rlmode,a[3],"injections")
if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ logprocess("%s: shifting single %s by %p",cref(dataset,sequence),gref(startchar),k)
end
done=true
+ break
end
- break
end
+ if a and #a>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,"injections")
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=getchar(start)
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,"injections")
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ done=true
+ break
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done=true
+ break
+ else
+ break
end
- return head,start,done
end
+ return head,start,done
end
end
return head,start,false
end
-function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local markchar=getchar(start)
if marks[markchar] then
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local markanchors=lookuphash[lookupname]
- if markanchors then
- markanchors=markanchors[markchar]
- end
+ local markanchors=steps[1].coverage[markchar]
if markanchors then
local base=getprev(start)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- if marks[basechar] then
- while true do
- base=getprev(base)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- basechar=getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head,start,false
- end
- end
- end
- local baseanchors=descriptions[basechar].anchors
- if baseanchors then
- local baseanchors=baseanchors['basechar']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if marks[basechar] then
+ while base do
+ base=getprev(base)
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),1)
end
- return head,start,true
+ return head,start,false
end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",pref(dataset,sequence),gref(markchar),2)
+ end
+ return head,start,false
end
end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ if ma then
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
end
end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char, case %i",cref(dataset,sequence),1)
end
elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ logwarning("%s: prev node is no char, case %i",cref(dataset,sequence),2)
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: mark %s has no anchors",cref(dataset,sequence),gref(markchar))
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ logwarning("%s: mark %s is no mark",cref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local markchar=getchar(start)
if marks[markchar] then
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local markanchors=lookuphash[lookupname]
- if markanchors then
- markanchors=markanchors[markchar]
- end
+ local markanchors=steps[1].coverage[markchar]
if markanchors then
local base=getprev(start)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- if marks[basechar] then
- while true do
- base=getprev(base)
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- basechar=getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if marks[basechar] then
+ while base do
+ base=getprev(base)
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",cref(dataset,sequence),markchar,1)
+ end
+ return head,start,false
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s, case %i",cref(dataset,sequence),markchar,2)
+ end
+ return head,start,false
end
- return head,start,false
end
end
- end
- local index=getligaindex(start)
- local baseanchors=descriptions[basechar].anchors
- if baseanchors then
- local baseanchors=baseanchors['baselig']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- ba=ba[index]
- if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head,start,true
- end
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ if ma then
+ local index=getligaindex(start)
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(dataset,sequence),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
end
+ return head,start,true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
end
+ elseif trace_bugs then
+ logwarning("%s, prev node is no char, case %i",cref(dataset,sequence),1)
end
elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ logwarning("%s, prev node is no char, case %i",cref(dataset,sequence),2)
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s, mark %s has no anchors",cref(dataset,sequence),gref(markchar))
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ logwarning("%s, mark %s is no mark",cref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
local markchar=getchar(start)
if marks[markchar] then
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local markanchors=lookuphash[lookupname]
- if markanchors then
- markanchors=markanchors[markchar]
- end
+ local markanchors=steps[1].coverage[markchar]
if markanchors then
local base=getprev(start)
local slc=getligaindex(start)
@@ -12994,112 +19280,91 @@ function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext
end
end
end
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- local baseanchors=descriptions[basechar].anchors
- if baseanchors then
- baseanchors=baseanchors['basemark']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head,start,true
- end
+ if base then
+ local basechar=ischar(base,currentfont)
+ if basechar then
+ local ba=markanchors[1][basechar]
+ if ba then
+ local ma=markanchors[2]
+ if ma then
+ local dx,dy,bound=setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ return head,start,true
end
end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark, case %i",cref(dataset,sequence),1)
end
elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ logwarning("%s: prev node is no mark, case %i",cref(dataset,sequence),2)
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: mark %s has no anchors",cref(dataset,sequence),gref(markchar))
end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ logwarning("%s: mark %s is no mark",cref(dataset,sequence),gref(markchar))
end
return head,start,false
end
-function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone=cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local startchar=getchar(start)
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local exitanchors=lookuphash[lookupname]
- if exitanchors then
- exitanchors=exitanchors[startchar]
- end
- if exitanchors then
- local done=false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt=getnext(start)
- while not done and nxt and getid(nxt)==glyph_code and getfont(nxt)==currentfont and getsubtype(nxt)<256 do
- local nextchar=getchar(nxt)
- if marks[nextchar] then
- nxt=getnext(nxt)
- else
- local entryanchors=descriptions[nextchar]
- if entryanchors then
- entryanchors=entryanchors.anchors
- if entryanchors then
- entryanchors=entryanchors['centry']
- if entryanchors then
- local al=anchorlookups[lookupname]
- for anchor,entry in next,entryanchors do
- if al[anchor] then
- local exit=exitanchors[anchor]
- if exit then
- local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done=true
- break
- end
- end
- end
+function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,rlmode)
+ local steps=currentlookup.steps
+ local nofsteps=currentlookup.nofsteps
+ if nofsteps>1 then
+ reportmoresteps(dataset,sequence)
+ end
+ local startchar=getchar(start)
+ local exitanchors=steps[1].coverage[startchar]
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(dataset,sequence),gref(startchar))
+ end
+ else
+ local nxt=getnext(start)
+ while not done and nxt do
+ local nextchar=ischar(nxt,currentfont)
+ if not nextchar then
+ break
+ elseif marks[nextchar] then
+ nxt=getnext(nxt)
+ else
+ local exit=exitanchors[3]
+ if exit then
+ local entry=exitanchors[1][nextchar]
+ if entry then
+ entry=entry[2]
+ if entry then
+ local dx,dy,bound=setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
end
+ done=true
+ break
end
- elseif trace_bugs then
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
- break
+ elseif trace_bugs then
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
+ break
end
end
- return head,start,done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head,start,false
end
- end
- return head,start,false
-end
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ return head,start,done
else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(dataset,sequence),gref(getchar(start)),alreadydone)
+ end
+ return head,start,false
end
end
-local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,chainindex,sequence,chainproc)
+local function show_skip(dataset,sequence,char,ck,class)
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(dataset,sequence),gref(char),class,ck[1],ck[8] or ck[2])
+end
+local function chaindisk(head,start,last,dataset,sequence,chainlookup,rlmode,k,ck,chainproc)
if not start then
return head,start,false
end
@@ -13258,11 +19523,16 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
local cl=getprev(lookaheaddisc)
local cprev=getprev(start)
local insertedmarks=0
- while cprev and getid(cf)==glyph_code and getfont(cf)==currentfont and getsubtype(cf)<256 and marks[getchar(cf)] do
- insertedmarks=insertedmarks+1
- cf=cprev
- startishead=cf==head
- cprev=getprev(cprev)
+ while cprev do
+ local char=ischar(cf,currentfont)
+ if char and marks[char] then
+ insertedmarks=insertedmarks+1
+ cf=cprev
+ startishead=cf==head
+ cprev=getprev(cprev)
+ else
+ break
+ end
end
setprev(lookaheaddisc,cprev)
if cprev then
@@ -13273,8 +19543,7 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
if startishead then
head=lookaheaddisc
end
- local replace=getfield(lookaheaddisc,"replace")
- local pre=getfield(lookaheaddisc,"pre")
+ local pre,post,replace=getdisc(lookaheaddisc)
local new=copy_node_list(cf)
local cnew=new
for i=1,insertedmarks do
@@ -13285,22 +19554,19 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
clast=getnext(clast)
end
if not notmatchpre[lookaheaddisc] then
- cf,start,ok=chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ cf,start,ok=chainproc(cf,start,last,dataset,sequence,chainlookup,rlmode,k)
end
if not notmatchreplace[lookaheaddisc] then
- new,cnew,ok=chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ new,cnew,ok=chainproc(new,cnew,clast,dataset,sequence,chainlookup,rlmode,k)
end
if pre then
- setnext(cl,pre)
- setprev(pre,cl)
+ setlink(cl,pre)
end
if replace then
local tail=find_node_tail(new)
- setnext(tail,replace)
- setprev(replace,tail)
+ setlink(tail,replace)
end
- setfield(lookaheaddisc,"pre",cf)
- setfield(lookaheaddisc,"replace",new)
+ setdisc(lookaheaddisc,cf,post,new)
start=getprev(lookaheaddisc)
sweephead[cf]=getnext(clast)
sweephead[new]=getnext(last)
@@ -13309,10 +19575,15 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
local cl=start
local cnext=getnext(start)
local insertedmarks=0
- while cnext and getid(cnext)==glyph_code and getfont(cnext)==currentfont and getsubtype(cnext)<256 and marks[getchar(cnext)] do
- insertedmarks=insertedmarks+1
- cl=cnext
- cnext=getnext(cnext)
+ while cnext do
+ local char=ischar(cnext,currentfont)
+ if char and marks[char] then
+ insertedmarks=insertedmarks+1
+ cl=cnext
+ cnext=getnext(cnext)
+ else
+ break
+ end
end
if cnext then
setprev(cnext,backtrackdisc)
@@ -13320,8 +19591,7 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
setnext(backtrackdisc,cnext)
setprev(cf,nil)
setnext(cl,nil)
- local replace=getfield(backtrackdisc,"replace")
- local post=getfield(backtrackdisc,"post")
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(backtrackdisc,true)
local new=copy_node_list(cf)
local cnew=find_node_tail(new)
for i=1,insertedmarks do
@@ -13332,41 +19602,38 @@ local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlooku
clast=getnext(clast)
end
if not notmatchpost[backtrackdisc] then
- cf,start,ok=chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ cf,start,ok=chainproc(cf,start,last,dataset,sequence,chainlookup,rlmode,k)
end
if not notmatchreplace[backtrackdisc] then
- new,cnew,ok=chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ new,cnew,ok=chainproc(new,cnew,clast,dataset,sequence,chainlookup,rlmode,k)
end
if post then
- local tail=find_node_tail(post)
- setnext(tail,cf)
- setprev(cf,tail)
+ setlink(posttail,cf)
else
post=cf
end
if replace then
- local tail=find_node_tail(replace)
- setnext(tail,new)
- setprev(new,tail)
+ setlink(replacetail,new)
else
replace=new
end
- setfield(backtrackdisc,"post",post)
- setfield(backtrackdisc,"replace",replace)
+ setdisc(backtrackdisc,pre,post,replace)
start=getprev(backtrackdisc)
sweephead[post]=getnext(clast)
sweephead[replace]=getnext(last)
else
- head,start,ok=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ head,start,ok=chainproc(head,start,last,dataset,sequence,chainlookup,rlmode,k)
end
return head,start,ok
end
-local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+local noflags={ false,false,false,false }
+local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
local sweepnode=sweepnode
local sweeptype=sweeptype
+ local currentfont=currentfont
local diskseen=false
local checkdisc=getprev(head)
- local flags=sequence.flags
+ local flags=sequence.flags or noflags
local done=false
local skipmark=flags[1]
local skipligature=flags[2]
@@ -13381,7 +19648,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local seq=ck[3]
local s=#seq
if s==1 then
- match=getid(current)==glyph_code and getfont(current)==currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ local char=ischar(current,currentfont)
+ if char then
+ match=seq[1][char]
+ end
else
local f=ck[4]
local l=ck[5]
@@ -13398,33 +19668,22 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
sweeptype=nil
end
if last then
- local id=getid(last)
- if id==glyph_code then
- if getfont(last)==currentfont and getsubtype(last)<256 then
- local char=getchar(last)
- local ccd=descriptions[char]
- if ccd then
- local class=ccd.class or "base"
- if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
- skipped=true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
+ local char,id=ischar(last,currentfont)
+ if char then
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class or "base"
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
+ end
+ last=getnext(last)
+ elseif seq[n][char] then
+ if n<l then
last=getnext(last)
- elseif seq[n][char] then
- if n<l then
- last=getnext(last)
- end
- n=n+1
- else
- if discfound then
- notmatchreplace[discfound]=true
- match=not notmatchpre[discfound]
- else
- match=false
- end
- break
end
+ n=n+1
else
if discfound then
notmatchreplace[discfound]=true
@@ -13443,14 +19702,22 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
break
end
+ last=getnext(last)
+ elseif char==false then
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
elseif id==disc_code then
diskseen=true
discfound=last
notmatchpre[last]=nil
notmatchpost[last]=true
notmatchreplace[last]=nil
- local pre=getfield(last,"pre")
- local replace=getfield(last,"replace")
+ local pre,post,replace=getdisc(last)
if pre then
local n=n
while pre do
@@ -13510,29 +19777,18 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local n=f-1
while n>=1 do
if prev then
- local id=getid(prev)
- if id==glyph_code then
- if getfont(prev)==currentfont and getsubtype(prev)<256 then
- local char=getchar(prev)
- local ccd=descriptions[char]
- if ccd then
- local class=ccd.class
- if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
- skipped=true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n=n -1
- else
- if discfound then
- notmatchreplace[discfound]=true
- match=not notmatchpost[discfound]
- else
- match=false
- end
- break
+ local char,id=ischar(prev,currentfont)
+ if char then
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
end
+ elseif seq[n][char] then
+ n=n -1
else
if discfound then
notmatchreplace[discfound]=true
@@ -13551,19 +19807,25 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
break
end
+ prev=getprev(prev)
+ elseif char==false then
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpost[discfound]
+ else
+ match=false
+ end
+ break
elseif id==disc_code then
diskseen=true
discfound=prev
notmatchpre[prev]=true
notmatchpost[prev]=nil
notmatchreplace[prev]=nil
- local pre=getfield(prev,"pre")
- local post=getfield(prev,"post")
- local replace=getfield(prev,"replace")
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(prev,true)
if pre~=start and post~=start and replace~=start then
if post then
local n=n
- local posttail=find_node_tail(post)
while posttail do
if seq[n][getchar(posttail)] then
n=n-1
@@ -13587,7 +19849,6 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
notmatchpost[prev]=true
end
if replace then
- local replacetail=find_node_tail(replace)
while replacetail do
if seq[n][getchar(replacetail)] then
n=n-1
@@ -13613,7 +19874,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
else
end
elseif seq[n][32] then
- n=n -1
+ n=n-1
else
match=false
break
@@ -13645,29 +19906,18 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
local n=l+1
while n<=s do
if current then
- local id=getid(current)
- if id==glyph_code then
- if getfont(current)==currentfont and getsubtype(current)<256 then
- local char=getchar(current)
- local ccd=descriptions[char]
- if ccd then
- local class=ccd.class
- if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
- skipped=true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n=n+1
- else
- if discfound then
- notmatchreplace[discfound]=true
- match=not notmatchpre[discfound]
- else
- match=false
- end
- break
+ local char,id=ischar(current,currentfont)
+ if char then
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,class)
end
+ elseif seq[n][char] then
+ n=n+1
else
if discfound then
notmatchreplace[discfound]=true
@@ -13686,14 +19936,22 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
break
end
+ current=getnext(current)
+ elseif char==false then
+ if discfound then
+ notmatchreplace[discfound]=true
+ match=not notmatchpre[discfound]
+ else
+ match=false
+ end
+ break
elseif id==disc_code then
diskseen=true
discfound=current
notmatchpre[current]=nil
notmatchpost[current]=true
notmatchreplace[current]=nil
- local pre=getfield(current,"pre")
- local replace=getfield(current,"replace")
+ local pre,post,replace=getdisc(current)
if pre then
local n=n
while pre do
@@ -13742,6 +20000,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
current=getnext(current)
elseif seq[n][32] then
n=n+1
+current=getnext(current)
else
match=false
break
@@ -13755,45 +20014,39 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
if match then
local diskchain=diskseen or sweepnode
if trace_contexts then
- local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local rule=ck[1]
+ local lookuptype=ck[8] or ck[2]
+ local first=ck[4]
+ local last=ck[5]
local char=getchar(start)
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(dataset,sequence),rule,gref(char),first-1,last-first+1,s-last,lookuptype)
end
local chainlookups=ck[6]
if chainlookups then
local nofchainlookups=#chainlookups
if nofchainlookups==1 then
- local chainlookupname=chainlookups[1]
- local chainlookup=lookuptable[chainlookupname]
- if chainlookup then
- local chainproc=chainprocs[chainlookup.type]
- if chainproc then
- local ok
- if diskchain then
- head,start,ok=chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
- else
- head,start,ok=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if ok then
- done=true
- end
+ local chainlookup=chainlookups[1]
+ local chainkind=chainlookup.type
+ local chainproc=chainprocs[chainkind]
+ if chainproc then
+ local ok
+ if diskchain then
+ head,start,ok=chaindisk(head,start,last,dataset,sequence,chainlookup,rlmode,1,ck,chainproc)
else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ head,start,ok=chainproc(head,start,last,dataset,sequence,chainlookup,rlmode,1)
+ end
+ if ok then
+ done=true
end
- else
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ else
+ logprocess("%s: %s is not yet supported (1)",cref(dataset,sequence),chainkind)
end
else
local i=1
while start and true do
if skipped then
- while true do
+ while start do
local char=getchar(start)
local ccd=descriptions[char]
if ccd then
@@ -13808,21 +20061,18 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- local chainlookupname=chainlookups[i]
- local chainlookup=lookuptable[chainlookupname]
+ local chainlookup=chainlookups[1]
if not chainlookup then
- i=i+1
+ i=i+1
else
- local chainproc=chainprocs[chainlookup.type]
- if not chainproc then
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i=i+1
- else
+ local chainkind=chainlookup.type
+ local chainproc=chainprocs[chainkind]
+ if chainproc then
local ok,n
if diskchain then
- head,start,ok=chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
+ head,start,ok=chaindisk(head,start,last,dataset,sequence,chainlookup,rlmode,i,ck,chainproc)
else
- head,start,ok,n=chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ head,start,ok,n=chainproc(head,start,last,dataset,sequence,chainlookup,rlmode,i)
end
if ok then
done=true
@@ -13833,8 +20083,10 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- i=i+1
+ else
+ logprocess("%s: %s is not yet supported (2)",cref(dataset,sequence),chainkind)
end
+ i=i+1
end
if i>nofchainlookups or not start then
break
@@ -13846,11 +20098,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
else
local replacements=ck[7]
if replacements then
- head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ head,start,done=reversesub(head,start,last,dataset,sequence,replacements,rlmode)
else
done=quit_on_no_replacement
if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
+ logprocess("%s: skipping match",cref(dataset,sequence))
end
end
end
@@ -13859,58 +20111,32 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- if diskseen then
+ if diskseen then
notmatchpre={}
notmatchpost={}
notmatchreplace={}
end
return head,start,done
end
-local verbose_handle_contextchain=function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-otf.chainhandlers={
- normal=normal_handle_contextchain,
- verbose=verbose_handle_contextchain,
-}
-local handle_contextchain=nil
-local function chained_contextchain(head,start,stop,...)
+handlers.gsub_context=handle_contextchain
+handlers.gsub_contextchain=handle_contextchain
+handlers.gsub_reversecontextchain=handle_contextchain
+handlers.gpos_contextchain=handle_contextchain
+handlers.gpos_context=handle_contextchain
+local function chained_contextchain(head,start,stop,dataset,sequence,currentlookup,rlmode)
local steps=currentlookup.steps
local nofsteps=currentlookup.nofsteps
if nofsteps>1 then
reportmoresteps(dataset,sequence)
end
- return handle_contextchain(head,start,...)
-end
-function otf.setcontextchain(method)
- if not method or method=="normal" or not otf.chainhandlers[method] then
- if handle_contextchain then
- logwarning("installing normal contextchain handler")
- end
- handle_contextchain=normal_handle_contextchain
- else
- logwarning("installing contextchain handler %a",method)
- local handler=otf.chainhandlers[method]
- handle_contextchain=function(...)
- return handler(currentfont,...)
- end
- end
- handlers.gsub_context=handle_contextchain
- handlers.gsub_contextchain=handle_contextchain
- handlers.gsub_reversecontextchain=handle_contextchain
- handlers.gpos_contextchain=handle_contextchain
- handlers.gpos_context=handle_contextchain
- handlers.contextchain=handle_contextchain
+ return handle_contextchain(head,start,dataset,sequence,currentlookup,rlmode)
end
chainprocs.gsub_context=chained_contextchain
chainprocs.gsub_contextchain=chained_contextchain
chainprocs.gsub_reversecontextchain=chained_contextchain
chainprocs.gpos_contextchain=chained_contextchain
chainprocs.gpos_context=chained_contextchain
-otf.setcontextchain()
-local missing={}
+local missing=setmetatableindex("table")
local function logprocess(...)
if trace_steps then
registermessage(...)
@@ -13918,23 +20144,22 @@ local function logprocess(...)
report_process(...)
end
local logwarning=report_process
-local function report_missing_cache(typ,lookup)
- local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
- local t=f[typ] if not t then t={} f[typ]=t end
- if not t[lookup] then
- t[lookup]=true
- logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+local function report_missing_coverage(dataset,sequence)
+ local t=missing[currentfont]
+ if not t[sequence] then
+ t[sequence]=true
+ logwarning("missing coverage for feature %a, lookup %a, type %a, font %a, name %a",
+ dataset[4],sequence.name,sequence.type,currentfont,tfmdata.properties.fullname)
end
end
local resolved={}
-local lookuphashes={}
-setmetatableindex(lookuphashes,function(t,font)
- local lookuphash=fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash=false
- end
- t[font]=lookuphash
- return lookuphash
+local sequencelists=setmetatableindex(function(t,font)
+ local sequences=fontdata[font].resources.sequences
+ if not sequences or not next(sequences) then
+ sequences=false
+ end
+ t[font]=sequences
+ return sequences
end)
local autofeatures=fonts.analyzers.features
local featuretypes=otf.tables.featuretypes
@@ -14004,244 +20229,501 @@ function otf.dataset(tfmdata,font)
end
return rl
end
-local function kernrun(disc,run)
+local function report_disc(n)
+ report_run("kern: %s > %s",disc,languages.serializediscretionary(disc))
+end
+local function kernrun(disc,k_run,font,attr,...)
if trace_kernruns then
- report_run("kern")
+ report_disc("kern")
end
- local prev=getprev(disc)
- local next=getnext(disc)
- local pre=getfield(disc,"pre")
- local post=getfield(disc,"post")
- local replace=getfield(disc,"replace")
+ local prev,next=getboth(disc)
+ local nextstart=next
+ local done=false
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(disc,true)
local prevmarks=prev
- while prevmarks and getid(prevmarks)==glyph_code and marks[getchar(prevmarks)] and getfont(prevmarks)==currentfont and getsubtype(prevmarks)<256 do
- prevmarks=getprev(prevmarks)
+ while prevmarks do
+ local char=ischar(prevmarks,font)
+ if char and marks[char] then
+ prevmarks=getprev(prevmarks)
+ else
+ break
+ end
end
- if prev and (pre or replace) and not (getid(prev)==glyph_code and getfont(prev)==currentfont and getsubtype(prev)<256) then
+ if prev and (pre or replace) and not ischar(prev,font) then
prev=false
end
- if next and (post or replace) and not (getid(next)==glyph_code and getfont(next)==currentfont and getsubtype(next)<256) then
+ if next and (post or replace) and not ischar(next,font) then
next=false
end
- if not pre then
- elseif prev then
- local nest=getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- run(prevmarks,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
- else
- run(pre,"preinjections")
- end
- if not post then
- elseif next then
- local tail=find_node_tail(post)
- setnext(tail,next)
- setprev(next,tail)
- run(post,"postinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(post,"postinjections")
- end
- if not replace and prev and next then
- setnext(prev,next)
- setprev(next,prev)
- run(prevmarks,"injections",next)
- setnext(prev,disc)
- setprev(next,disc)
+ if pre then
+ if k_run(pre,"injections",nil,font,attr,...) then
+ done=true
+ end
+ if prev then
+ local nest=getprev(pre)
+ setlink(prev,pre)
+ if k_run(prevmarks,"preinjections",pre,font,attr,...) then
+ done=true
+ end
+ setprev(pre,nest)
+ setnext(prev,disc)
+ end
+ end
+ if post then
+ if k_run(post,"injections",nil,font,attr,...) then
+ done=true
+ end
+ if next then
+ setlink(posttail,next)
+ if k_run(posttail,"postinjections",next,font,attr,...) then
+ done=true
+ end
+ setnext(posttail,nil)
+ setprev(next,disc)
+ end
+ end
+ if replace then
+ if k_run(replace,"injections",nil,font,attr,...) then
+ done=true
+ end
+ if prev then
+ local nest=getprev(replace)
+ setlink(prev,replace)
+ if k_run(prevmarks,"replaceinjections",replace,font,attr,...) then
+ done=true
+ end
+ setprev(replace,nest)
+ setnext(prev,disc)
+ end
+ if next then
+ setlink(replacetail,next)
+ if k_run(replacetail,"replaceinjections",next,font,attr,...) then
+ done=true
+ end
+ setnext(replacetail,nil)
+ setprev(next,disc)
+ end
elseif prev and next then
- local tail=find_node_tail(replace)
- local nest=getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- setnext(tail,next)
- setprev(next,tail)
- run(prevmarks,"replaceinjections",next)
- setprev(replace,nest)
- setnext(prev,disc)
- setnext(tail,nil)
- setprev(next,disc)
- elseif prev then
- local nest=getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- run(prevmarks,"replaceinjections")
- setprev(replace,nest)
- setnext(prev,disc)
- elseif next then
- local tail=find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- run(replace,"replaceinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(replace,"replaceinjections")
+ setlink(prev,next)
+ if k_run(prevmarks,"emptyinjections",next,font,attr,...) then
+ done=true
+ end
+ setlink(prev,disc)
+ setlink(disc,next)
end
+ return nextstart,done
end
-local function comprun(disc,run)
+local function comprun(disc,c_run,...)
if trace_compruns then
- report_run("comp: %s",languages.serializediscretionary(disc))
+ report_disc("comp")
end
- local pre=getfield(disc,"pre")
+ local pre,post,replace=getdisc(disc)
+ local renewed=false
if pre then
sweepnode=disc
sweeptype="pre"
- local new,done=run(pre)
+ local new,done=c_run(pre,...)
if done then
- setfield(disc,"pre",new)
+ pre=new
+ renewed=true
end
end
- local post=getfield(disc,"post")
if post then
sweepnode=disc
sweeptype="post"
- local new,done=run(post)
+ local new,done=c_run(post,...)
if done then
- setfield(disc,"post",new)
+ post=new
+ renewed=true
end
end
- local replace=getfield(disc,"replace")
if replace then
sweepnode=disc
sweeptype="replace"
- local new,done=run(replace)
+ local new,done=c_run(replace,...)
if done then
- setfield(disc,"replace",new)
+ replace=new
+ renewed=true
end
end
sweepnode=nil
sweeptype=nil
+ if renewed then
+ setdisc(disc,pre,post,replace)
+ end
+ return getnext(disc),done
end
-local function testrun(disc,trun,crun)
- local next=getnext(disc)
- if next then
- local replace=getfield(disc,"replace")
- if replace then
- local prev=getprev(disc)
- if prev then
- local tail=find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- if trun(replace,next) then
- setfield(disc,"replace",nil)
- setnext(prev,replace)
- setprev(replace,prev)
- setprev(next,tail)
- setnext(tail,next)
- setprev(disc,nil)
- setnext(disc,nil)
- flush_node_list(disc)
- return replace
- else
- setnext(tail,nil)
- setprev(next,disc)
+local function testrun(disc,t_run,c_run,...)
+ if trace_testruns then
+ report_disc("test")
+ end
+ local prev,next=getboth(disc)
+ if not next then
+ return
+ end
+ local pre,post,replace,pretail,posttail,replacetail=getdisc(disc)
+ local done=false
+ if replace and prev then
+ setlink(replacetail,next)
+ if t_run(replace,next,...) then
+ setfield(disc,"replace",nil)
+ setlink(prev,replace)
+ setlink(replacetail,next)
+ setboth(disc)
+ flush_node_list(disc)
+ return replace,true
+ else
+ setnext(replacetail)
+ setprev(next,disc)
+ end
+ end
+ local renewed=false
+ if pre then
+ sweepnode=disc
+ sweeptype="pre"
+ local new,ok=c_run(pre,...)
+ if ok then
+ pre=new
+ renewed=true
+ end
+ end
+ if post then
+ sweepnode=disc
+ sweeptype="post"
+ local new,ok=c_run(post,...)
+ if ok then
+ post=new
+ renewed=true
+ end
+ end
+ if replace then
+ sweepnode=disc
+ sweeptype="replace"
+ local new,ok=c_run(replace,...)
+ if ok then
+ replace=new
+ renewed=true
+ end
+ end
+ sweepnode=nil
+ sweeptype=nil
+ if renewed then
+ setdisc(disc,pre,post,replace)
+ return next,true
+ else
+ return next,done
+ end
+end
+local nesting=0
+local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ local done=false
+ local start=sweephead[head]
+ if start then
+ sweephead[head]=nil
+ else
+ start=head
+ end
+ while start do
+ local char=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if not a or (a==attr) then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ done=true
+ end
+ end
+ if start then
+ start=getnext(start)
end
else
+ start=getnext(start)
+ end
+ elseif char==false then
+ return head,done
+ else
+ start=getnext(start)
+ end
+ end
+ return head,done
+end
+local function t_run_single(start,stop,font,attr,lookupcache)
+ while start~=stop do
+ local char=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if not a or (a==attr) then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local s=getnext(start)
+ local l=nil
+ while s do
+ local lg=lookupmatch[getchar(s)]
+ if lg then
+ l=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
end
+ start=getnext(start)
else
+ break
+ end
+ end
+end
+local function k_run_single(sub,injection,last,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ local a=getattr(sub,0)
+ if not a or (a==attr) then
+ for n in traverse_nodes(sub) do
+ if n==last then
+ break
+ end
+ local char=ischar(n)
+ if char then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local h,d,ok=handler(sub,n,dataset,sequence,lookupmatch,rlmode,step,1,injection)
+ if ok then
+ return true
+ end
+ end
+ end
end
+ end
+end
+local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ local done=false
+ local start=sweephead[head]
+ if start then
+ sweephead[head]=nil
else
+ start=head
+ end
+ while start do
+ local char=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if not a or (a==attr) then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local lookupcache=step.coverage
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ if start then
+ start=getnext(start)
+ end
+ else
+ start=getnext(start)
+ end
+ elseif char==false then
+ return head,done
+ else
+ start=getnext(start)
+ end
end
- comprun(disc,crun)
- return next
+ return head,done
end
-local function discrun(disc,drun,krun)
- local next=getnext(disc)
- local prev=getprev(disc)
- if trace_discruns then
- report_run("disc")
+local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
+ while start~=stop do
+ local char=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if not a or (a==attr) then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local lookupcache=step.coverage
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local s=getnext(start)
+ local l=nil
+ while s do
+ local lg=lookupmatch[getchar(s)]
+ if lg then
+ l=lg
+ s=getnext(s)
+ else
+ break
+ end
+ end
+ if l and l.ligature then
+ return true
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ end
+ start=getnext(start)
+ else
+ break
+ end
end
- if next and prev then
- setnext(prev,next)
- drun(prev)
- setnext(prev,disc)
+end
+local function k_run_multiple(sub,injection,last,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ local a=getattr(sub,0)
+ if not a or (a==attr) then
+ for n in traverse_nodes(sub) do
+ if n==last then
+ break
+ end
+ local char=ischar(n)
+ if char then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local lookupcache=step.coverage
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local h,d,ok=handler(head,n,dataset,sequence,lookupmatch,step,rlmode,i,injection)
+ if ok then
+ return true
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
+ end
+ end
+ end
+ end
end
- local pre=getfield(disc,"pre")
- if not pre then
- elseif prev then
- local nest=getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- krun(prev,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
+end
+local function txtdirstate(start,stack,top,rlparmode)
+ local dir=getfield(start,"dir")
+ local new=1
+ if dir=="+TRT" then
+ top=top+1
+ stack[top]=dir
+ new=-1
+ elseif dir=="+TLT" then
+ top=top+1
+ stack[top]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ top=top-1
+ if stack[top]=="+TRT" then
+ new=-1
+ end
else
- krun(pre,"preinjections")
+ new=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, level %a",dir,mref(rlparmode),mref(new),topstack)
end
- return next
+ return getnext(start),top,new
+end
+local function pardirstate(start)
+ local dir=getfield(start,"dir")
+ local new=0
+ if dir=="TLT" then
+ new=1
+ elseif dir=="TRT" then
+ new=-1
+ end
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a",dir,mref(new))
+ end
+ return getnext(start),new,new
end
local function featuresprocessor(head,font,attr)
- local lookuphash=lookuphashes[font]
- if not lookuphash then
+ local sequences=sequencelists[font]
+ if not sequencelists then
+ return head,false
+ end
+ nesting=nesting+1
+ if nesting==1 then
+ currentfont=font
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ marks=tfmdata.resources.marks
+ factor=tfmdata.parameters.factor
+ threshold=tfmdata.parameters.spacing.width or 65536*10
+ elseif currentfont~=font then
+ report_warning("nested call with a different font, level %s, quitting",nesting)
+ nesting=nesting-1
return head,false
end
head=tonut(head)
if trace_steps then
checkstep(head)
end
- tfmdata=fontdata[font]
- descriptions=tfmdata.descriptions
- characters=tfmdata.characters
- resources=tfmdata.resources
- marks=resources.marks
- anchorlookups=resources.lookup_to_anchor
- lookuptable=resources.lookups
- lookuptypes=resources.lookuptypes
- lookuptags=resources.lookuptags
- currentfont=font
- rlmode=0
- sweephead={}
- local sequences=resources.sequences
+ local rlmode=0
local done=false
local datasets=otf.dataset(tfmdata,font,attr)
- local dirstack={}
+ local dirstack={}
+ sweephead={}
for s=1,#datasets do
local dataset=datasets[s]
- featurevalue=dataset[1]
local attribute=dataset[2]
local sequence=dataset[3]
- local kind=dataset[4]
local rlparmode=0
local topstack=0
local success=false
local typ=sequence.type
local gpossing=typ=="gpos_single" or typ=="gpos_pair"
- local subtables=sequence.subtables
local handler=handlers[typ]
- if typ=="gsub_reversecontextchain" then
- local start=find_node_tail(head)
+ local steps=sequence.steps
+ local nofsteps=sequence.nofsteps
+ if not steps then
+ local h,d,ok=handler(head,start,dataset,sequence,nil,nil,nil,0,font,attr)
+ if ok then
+ success=true
+ if h then
+ head=h
+ end
+ if d then
+ start=d
+ end
+ end
+ elseif typ=="gsub_reversecontextchain" then
+ local start=find_node_tail(head)
while start do
- local id=getid(start)
- if id==glyph_code then
- if getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=a==attr
- else
- a=true
- end
- if a then
- local char=getchar(start)
- for i=1,#subtables do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- head,start,success=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
+ local char=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if not a or (a==attr) then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local lookupcache=step.coverage
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ success=true
+ break
end
- else
- report_missing_cache(typ,lookupname)
end
+ else
+ report_missing_coverage(dataset,sequence)
end
- if start then start=getprev(start) end
- else
+ end
+ if start then
start=getprev(start)
end
else
@@ -14252,467 +20734,119 @@ local function featuresprocessor(head,font,attr)
end
end
else
- local ns=#subtables
local start=head
rlmode=0
- if ns==1 then
- local lookupname=subtables[1]
- local lookupcache=lookuphash[lookupname]
- if not lookupcache then
- report_missing_cache(typ,lookupname)
+ if nofsteps==1 then
+ local step=steps[1]
+ local lookupcache=step.coverage
+ if not lookupcache then
+ report_missing_coverage(dataset,sequence)
else
- local function c_run(head)
- local done=false
- local start=sweephead[head]
- if start then
- sweephead[head]=nil
- else
- start=head
- end
- while start do
- local id=getid(start)
- if id~=glyph_code then
- start=getnext(start)
- elseif getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- local lookupmatch=lookupcache[getchar(start)]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done=true
- end
- end
- if start then start=getnext(start) end
- else
- start=getnext(start)
- end
- else
- return head,false
- end
- end
- if done then
- success=true
- end
- return head,done
- end
- local function t_run(start,stop)
- while start~=stop do
- local id=getid(start)
- if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- local lookupmatch=lookupcache[getchar(start)]
- if lookupmatch then
- local s=getnext(start)
- local l=nil
- while s do
- local lg=lookupmatch[getchar(s)]
- if lg then
- l=lg
- s=getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- end
- start=getnext(start)
+ while start do
+ local char,id=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- break
- end
- end
- end
- local function d_run(prev)
- local a=getattr(prev,0)
- if a then
- a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
- else
- a=not attribute or getprop(prev,a_state)==attribute
- end
- if a then
- local lookupmatch=lookupcache[getchar(prev)]
- if lookupmatch then
- local h,d,ok=handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done=true
- success=true
- end
+ a=not attribute or getprop(start,a_state)==attribute
end
- end
- end
- local function k_run(sub,injection,last)
- local a=getattr(sub,0)
- if a then
- a=(a==attr) and (not attribute or getprop(sub,a_state)==attribute)
- else
- a=not attribute or getprop(sub,a_state)==attribute
- end
- if a then
- for n in traverse_nodes(sub) do
- if n==last then
- break
- end
- local id=getid(n)
- if id==glyph_code then
- local lookupmatch=lookupcache[getchar(n)]
- if lookupmatch then
- local h,d,ok=handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
- if ok then
- done=true
- success=true
- end
+ if a then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ success=true
end
- else
end
- end
- end
- end
- while start do
- local id=getid(start)
- if id==glyph_code then
- if getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- local char=getchar(start)
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success=true
- elseif gpossing and zwnjruns and char==zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char==zwnj then
- discrun(start,d_run)
- end
- if start then start=getnext(start) end
- else
+ if start then
start=getnext(start)
end
else
start=getnext(start)
end
+ elseif char==false then
+ start=getnext(start)
elseif id==disc_code then
+ local ok
if gpossing then
- kernrun(start,k_run)
- start=getnext(start)
+ start,ok=kernrun(start,k_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
elseif typ=="gsub_ligature" then
- start=testrun(start,t_run,c_run)
+ start,ok=testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
else
- comprun(start,c_run)
- start=getnext(start)
+ start,ok=comprun(start,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ success=true
end
elseif id==math_code then
start=getnext(end_of_math(start))
elseif id==dir_code then
- local dir=getfield(start,"dir")
- if dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- rlmode=1
- elseif dir=="+TRT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- rlmode=-1
- elseif dir=="-TLT" or dir=="-TRT" then
- topstack=topstack-1
- rlmode=dirstack[topstack]=="+TRT" and -1 or 1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start=getnext(start)
+ start,topstack,rlmode=txtdirstate(start,dirstack,topstack,rlparmode)
elseif id==localpar_code then
- local dir=getfield(start,"dir")
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
- else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start=getnext(start)
+ start,rlparmode,rlmode=pardirstate(start)
else
start=getnext(start)
end
end
end
else
- local function c_run(head)
- local done=false
- local start=sweephead[head]
- if start then
- sweephead[head]=nil
- else
- start=head
- end
- while start do
- local id=getid(start)
- if id~=glyph_code then
- start=getnext(start)
- elseif getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- local char=getchar(start)
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done=true
- break
- elseif not start then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start=getnext(start) end
- else
- start=getnext(start)
- end
+ while start do
+ local char,id=ischar(start,font)
+ if char then
+ local a=getattr(start,0)
+ if a then
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- return head,false
- end
- end
- if done then
- success=true
- end
- return head,done
- end
- local function d_run(prev)
- local a=getattr(prev,0)
- if a then
- a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
- else
- a=not attribute or getprop(prev,a_state)==attribute
- end
- if a then
- local char=getchar(prev)
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local h,d,ok=handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done=true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
+ a=not attribute or getprop(start,a_state)==attribute
end
- end
- end
- local function k_run(sub,injection,last)
- local a=getattr(sub,0)
- if a then
- a=(a==attr) and (not attribute or getprop(sub,a_state)==attribute)
- else
- a=not attribute or getprop(sub,a_state)==attribute
- end
- if a then
- for n in traverse_nodes(sub) do
- if n==last then
- break
- end
- local id=getid(n)
- if id==glyph_code then
- local char=getchar(n)
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local h,d,ok=handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
- if ok then
- done=true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- else
- end
- end
- end
- end
- local function t_run(start,stop)
- while start~=stop do
- local id=getid(start)
- if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- local char=getchar(start)
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local s=getnext(start)
- local l=nil
- while s do
- local lg=lookupmatch[getchar(s)]
- if lg then
- l=lg
- s=getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
+ if a then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local lookupcache=step.coverage
+ if lookupcache then
+ local lookupmatch=lookupcache[char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
end
- else
- report_missing_cache(typ,lookupname)
end
+ else
+ report_missing_coverage(dataset,sequence)
end
end
- start=getnext(start)
- else
- break
- end
- end
- end
- while start do
- local id=getid(start)
- if id==glyph_code then
- if getfont(start)==font and getsubtype(start)<256 then
- local a=getattr(start,0)
- if a then
- a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
- else
- a=not attribute or getprop(start,a_state)==attribute
- end
- if a then
- for i=1,ns do
- local lookupname=subtables[i]
- local lookupcache=lookuphash[lookupname]
- if lookupcache then
- local char=getchar(start)
- local lookupmatch=lookupcache[char]
- if lookupmatch then
- local ok
- head,start,ok=handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success=true
- break
- elseif not start then
- break
- elseif gpossing and zwnjruns and char==zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char==zwnj then
- discrun(start,d_run)
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start=getnext(start) end
- else
+ if start then
start=getnext(start)
end
else
start=getnext(start)
end
+ elseif char==false then
+ start=getnext(start)
elseif id==disc_code then
+ local ok
if gpossing then
- kernrun(start,k_run)
- start=getnext(start)
+ start,ok=kernrun(start,k_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
elseif typ=="gsub_ligature" then
- start=testrun(start,t_run,c_run)
+ start,ok=testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
else
- comprun(start,c_run)
- start=getnext(start)
+ start,ok=comprun(start,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ success=true
end
elseif id==math_code then
start=getnext(end_of_math(start))
elseif id==dir_code then
- local dir=getfield(start,"dir")
- if dir=="+TLT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- rlmode=1
- elseif dir=="+TRT" then
- topstack=topstack+1
- dirstack[topstack]=dir
- rlmode=-1
- elseif dir=="-TLT" or dir=="-TRT" then
- topstack=topstack-1
- rlmode=dirstack[topstack]=="+TRT" and -1 or 1
- else
- rlmode=rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start=getnext(start)
+ start,topstack,rlmode=txtdirstate(start,dirstack,topstack,rlparmode)
elseif id==localpar_code then
- local dir=getfield(start,"dir")
- if dir=="TRT" then
- rlparmode=-1
- elseif dir=="TLT" then
- rlparmode=1
- else
- rlparmode=0
- end
- rlmode=rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start=getnext(start)
+ start,rlparmode,rlmode=pardirstate(start)
else
start=getnext(start)
end
@@ -14726,1090 +20860,2139 @@ local function featuresprocessor(head,font,attr)
registerstep(head)
end
end
+ nesting=nesting-1
head=tonode(head)
return head,done
end
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target=lookuphash[lookupname]
- if target then
- target[unicode]=lookupdata
- else
- lookuphash[lookupname]={ [unicode]=lookupdata }
- end
-end
-local function ligature(lookupdata,lookupname,unicode,lookuphash)
- local target=lookuphash[lookupname]
- if not target then
- target={}
- lookuphash[lookupname]=target
- end
- for i=1,#lookupdata do
- local li=lookupdata[i]
- local tu=target[li]
- if not tu then
- tu={}
- target[li]=tu
- end
- target=tu
- end
- target.ligature=unicode
+local function featuresinitializer(tfmdata,value)
end
-local function pair(lookupdata,lookupname,unicode,lookuphash)
- local target=lookuphash[lookupname]
- if not target then
- target={}
- lookuphash[lookupname]=target
- end
- local others=target[unicode]
- local paired=lookupdata[1]
- if others then
- others[paired]=lookupdata
- else
- others={ [paired]=lookupdata }
- target[unicode]=others
- end
-end
-local action={
- substitution=generic,
- multiple=generic,
- alternate=generic,
- position=generic,
- ligature=ligature,
- pair=pair,
- kern=pair,
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
}
-local function prepare_lookups(tfmdata)
- local rawdata=tfmdata.shared.rawdata
- local resources=rawdata.resources
- local lookuphash=resources.lookuphash
- local anchor_to_lookup=resources.anchor_to_lookup
- local lookup_to_anchor=resources.lookup_to_anchor
- local lookuptypes=resources.lookuptypes
- local characters=tfmdata.characters
- local descriptions=tfmdata.descriptions
- local duplicates=resources.duplicates
- for unicode,character in next,characters do
- local description=descriptions[unicode]
- if description then
- local lookups=description.slookups
- if lookups then
- for lookupname,lookupdata in next,lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
- local lookups=description.mlookups
- if lookups then
- for lookupname,lookuplist in next,lookups do
- local lookuptype=lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata=lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
- end
- local list=description.kerns
- if list then
- for lookup,krn in next,list do
- local target=lookuphash[lookup]
- if target then
- target[unicode]=krn
- else
- lookuphash[lookup]={ [unicode]=krn }
- end
- end
- end
- local list=description.anchors
- if list then
- for typ,anchors in next,list do
- if typ=="mark" or typ=="cexit" then
- for name,anchor in next,anchors do
- local lookups=anchor_to_lookup[name]
- if lookups then
- for lookup in next,lookups do
- local target=lookuphash[lookup]
- if target then
- target[unicode]=anchors
- else
- lookuphash[lookup]={ [unicode]=anchors }
- end
- end
- end
+otf.handlers=handlers
+local setspacekerns=nodes.injections.setspacekerns if not setspacekerns then os.exit() end
+function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
+ setspacekerns(font,sequence)
+ return head,start,true
+end
+local function hasspacekerns(data)
+ local sequences=data.resources.sequences
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local steps=sequence.steps
+ if steps and sequence.features.kern then
+ for i=1,#steps do
+ local coverage=steps[i].coverage
+ if not coverage then
+ elseif coverage[32] then
+ return true
+ else
+ for k,v in next,coverage do
+ if v[32] then
+ return true
end
end
end
end
end
end
+ return false
end
-local function split(replacement,original)
- local result={}
- for i=1,#replacement do
- result[original[i]]=replacement[i]
+otf.readers.registerextender {
+ name="spacekerns",
+ action=function(data)
+ data.properties.hasspacekerns=hasspacekerns(data)
end
- return result
-end
-local valid={
- coverage={ chainsub=true,chainpos=true,contextsub=true,contextpos=true },
- reversecoverage={ reversesub=true },
- glyphs={ chainsub=true,chainpos=true,contextsub=true,contextpos=true },
}
-local function prepare_contextchains(tfmdata)
- local rawdata=tfmdata.shared.rawdata
- local resources=rawdata.resources
- local lookuphash=resources.lookuphash
- local lookuptags=resources.lookuptags
- local lookups=rawdata.lookups
- if lookups then
- for lookupname,lookupdata in next,rawdata.lookups do
- local lookuptype=lookupdata.type
- if lookuptype then
- local rules=lookupdata.rules
- if rules then
- local format=lookupdata.format
- local validformat=valid[format]
- if not validformat then
- report_prepare("unsupported format %a",format)
- elseif not validformat[lookuptype] then
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
- else
- local contexts=lookuphash[lookupname]
- if not contexts then
- contexts={}
- lookuphash[lookupname]=contexts
- end
- local t,nt={},0
- for nofrules=1,#rules do
- local rule=rules[nofrules]
- local current=rule.current
- local before=rule.before
- local after=rule.after
- local replacements=rule.replacements
- local sequence={}
- local nofsequences=0
- if before then
- for n=1,#before do
- nofsequences=nofsequences+1
- sequence[nofsequences]=before[n]
- end
- end
- local start=nofsequences+1
- for n=1,#current do
- nofsequences=nofsequences+1
- sequence[nofsequences]=current[n]
- end
- local stop=nofsequences
- if after then
- for n=1,#after do
- nofsequences=nofsequences+1
- sequence[nofsequences]=after[n]
+local function spaceinitializer(tfmdata,value)
+ local resources=tfmdata.resources
+ local spacekerns=resources and resources.spacekerns
+ if spacekerns==nil then
+ local properties=tfmdata.properties
+ if properties and properties.hasspacekerns then
+ local sequences=resources.sequences
+ local left={}
+ local right={}
+ local last=0
+ local feat=nil
+ for i=1,#sequences do
+ local sequence=sequences[i]
+ local steps=sequence.steps
+ if steps then
+ local kern=sequence.features.kern
+ if kern then
+ feat=feat or kern
+ for i=1,#steps do
+ local step=steps[i]
+ local coverage=step.coverage
+ if coverage then
+ local kerns=coverage[32]
+ if kerns then
+ for k,v in next,kerns do
+ right[k]=v
+ end
end
- end
- if sequence[1] then
- nt=nt+1
- t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
- for unic in next,sequence[start] do
- local cu=contexts[unic]
- if not cu then
- contexts[unic]=t
+ for k,v in next,coverage do
+ local kern=v[32]
+ if kern then
+ left[k]=kern
end
end
end
end
+ last=i
end
else
end
- else
- report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
end
- end
- end
-end
-local function featuresinitializer(tfmdata,value)
- if true then
- local rawdata=tfmdata.shared.rawdata
- local properties=rawdata.properties
- if not properties.initialized then
- local starttime=trace_preparing and os.clock()
- local resources=rawdata.resources
- resources.lookuphash=resources.lookuphash or {}
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized=true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ left=next(left) and left or false
+ right=next(right) and right or false
+ if left or right then
+ spacekerns={
+ left=left,
+ right=right,
+ }
+ if last>0 then
+ local triggersequence={
+ features={ kern=feat or { dflt={ dflt=true,} } },
+ flags=noflags,
+ name="trigger_space_kerns",
+ order={ "kern" },
+ type="trigger_space_kerns",
+ left=left,
+ right=right,
+ }
+ insert(sequences,last,triggersequence)
+ end
+ else
+ spacekerns=false
end
+ else
+ spacekerns=false
end
+ resources.spacekerns=spacekerns
end
+ return spacekerns
end
registerotffeature {
- name="features",
- description="features",
+ name="spacekern",
+ description="space kern injection",
default=true,
initializers={
- position=1,
- node=featuresinitializer,
+ node=spaceinitializer,
},
- processors={
- node=featuresprocessor,
- }
}
-otf.handlers=handlers
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-otp']={
+if not modules then modules={} end modules ['font-osd']={
version=1.001,
- comment="companion to font-otf.lua (packing)",
- author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright="PRAGMA ADE / ConTeXt Development Team",
+ comment="companion to font-ini.mkiv",
+ author="Kai Eigner, TAT Zetwerk / Hans Hagen, PRAGMA ADE",
+ copyright="TAT Zetwerk / PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local next,type,tostring=next,type,tostring
-local sort,concat=table.sort,table.concat
-local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
-local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
-local report_otf=logs.reporter("fonts","otf loading")
-fonts=fonts or {}
-local handlers=fonts.handlers or {}
-fonts.handlers=handlers
-local otf=handlers.otf or {}
-handlers.otf=otf
-local enhancers=otf.enhancers or {}
-otf.enhancers=enhancers
-local glists=otf.glists or { "gsub","gpos" }
-otf.glists=glists
-local criterium=1
-local threshold=0
-local function tabstr_normal(t)
- local s={}
- local n=0
- for k,v in next,t do
- n=n+1
- if type(v)=="table" then
- s[n]=k..">"..tabstr_normal(v)
- elseif v==true then
- s[n]=k.."+"
- elseif v then
- s[n]=k.."="..v
- else
- s[n]=k.."-"
- end
- end
- if n==0 then
- return ""
- elseif n==1 then
- return s[1]
- else
- sort(s)
- return concat(s,",")
- end
-end
-local function tabstr_flat(t)
- local s={}
- local n=0
- for k,v in next,t do
- n=n+1
- s[n]=k.."="..v
+local insert,imerge,copy=table.insert,table.imerge,table.copy
+local next,type=next,type
+local report_devanagari=logs.reporter("otf","devanagari")
+fonts=fonts or {}
+fonts.analyzers=fonts.analyzers or {}
+fonts.analyzers.methods=fonts.analyzers.methods or { node={ otf={} } }
+local otf=fonts.handlers.otf
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local handlers=otf.handlers
+local methods=fonts.analyzers.methods
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local nuts=nodes.nuts
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getboth=nuts.getboth
+local getid=nuts.getid
+local getchar=nuts.getchar
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local setlink=nuts.setlink
+local setnext=nuts.setnext
+local setprev=nuts.setprev
+local setchar=nuts.setchar
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local ischar=nuts.is_char
+local insert_node_after=nuts.insert_after
+local copy_node=nuts.copy
+local free_node=nuts.free
+local remove_node=nuts.remove
+local flush_list=nuts.flush_list
+local copyinjection=nodes.injections.copy
+local unsetvalue=attributes.unsetvalue
+local fontdata=fonts.hashes.identifiers
+local a_state=attributes.private('state')
+local a_syllabe=attributes.private('syllabe')
+local dotted_circle=0x25CC
+local states=fonts.analyzers.states
+local s_rphf=states.rphf
+local s_half=states.half
+local s_pref=states.pref
+local s_blwf=states.blwf
+local s_pstf=states.pstf
+local replace_all_nbsp=nil
+replace_all_nbsp=function(head)
+ replace_all_nbsp=typesetters and typesetters.characters and typesetters.characters.replacenbspaces or function(head)
+ return head
+ end
+ return replace_all_nbsp(head)
+end
+local xprocesscharacters=nil
+if context then
+ xprocesscharacters=function(head,font)
+ xprocesscharacters=nodes.handlers.characters
+ return xprocesscharacters(head,font)
end
- if n==0 then
- return ""
- elseif n==1 then
- return s[1]
- else
- sort(s)
- return concat(s,",")
+else
+ xprocesscharacters=function(head,font)
+ xprocesscharacters=nodes.handlers.nodepass
+ return xprocesscharacters(head,font)
+ end
+end
+local function processcharacters(head,font)
+ return tonut(xprocesscharacters(tonode(head)))
+end
+local consonant={
+ [0x0915]=true,[0x0916]=true,[0x0917]=true,[0x0918]=true,
+ [0x0919]=true,[0x091A]=true,[0x091B]=true,[0x091C]=true,
+ [0x091D]=true,[0x091E]=true,[0x091F]=true,[0x0920]=true,
+ [0x0921]=true,[0x0922]=true,[0x0923]=true,[0x0924]=true,
+ [0x0925]=true,[0x0926]=true,[0x0927]=true,[0x0928]=true,
+ [0x0929]=true,[0x092A]=true,[0x092B]=true,[0x092C]=true,
+ [0x092D]=true,[0x092E]=true,[0x092F]=true,[0x0930]=true,
+ [0x0931]=true,[0x0932]=true,[0x0933]=true,[0x0934]=true,
+ [0x0935]=true,[0x0936]=true,[0x0937]=true,[0x0938]=true,
+ [0x0939]=true,[0x0958]=true,[0x0959]=true,[0x095A]=true,
+ [0x095B]=true,[0x095C]=true,[0x095D]=true,[0x095E]=true,
+ [0x095F]=true,[0x0979]=true,[0x097A]=true,
+ [0x0C95]=true,[0x0C96]=true,[0x0C97]=true,[0x0C98]=true,
+ [0x0C99]=true,[0x0C9A]=true,[0x0C9B]=true,[0x0C9C]=true,
+ [0x0C9D]=true,[0x0C9E]=true,[0x0C9F]=true,[0x0CA0]=true,
+ [0x0CA1]=true,[0x0CA2]=true,[0x0CA3]=true,[0x0CA4]=true,
+ [0x0CA5]=true,[0x0CA6]=true,[0x0CA7]=true,[0x0CA8]=true,
+ [0x0CA9]=true,[0x0CAA]=true,[0x0CAB]=true,[0x0CAC]=true,
+ [0x0CAD]=true,[0x0CAE]=true,[0x0CAF]=true,[0x0CB0]=true,
+ [0x0CB1]=true,[0x0CB2]=true,[0x0CB3]=true,[0x0CB4]=true,
+ [0x0CB5]=true,[0x0CB6]=true,[0x0CB7]=true,[0x0CB8]=true,
+ [0x0CB9]=true,
+ [0x0CDE]=true,
+ [0x0D15]=true,[0x0D16]=true,[0x0D17]=true,[0x0D18]=true,
+ [0x0D19]=true,[0x0D1A]=true,[0x0D1B]=true,[0x0D1C]=true,
+ [0x0D1D]=true,[0x0D1E]=true,[0x0D1F]=true,[0x0D20]=true,
+ [0x0D21]=true,[0x0D22]=true,[0x0D23]=true,[0x0D24]=true,
+ [0x0D25]=true,[0x0D26]=true,[0x0D27]=true,[0x0D28]=true,
+ [0x0D29]=true,[0x0D2A]=true,[0x0D2B]=true,[0x0D2C]=true,
+ [0x0D2D]=true,[0x0D2E]=true,[0x0D2F]=true,[0x0D30]=true,
+ [0x0D31]=true,[0x0D32]=true,[0x0D33]=true,[0x0D34]=true,
+ [0x0D35]=true,[0x0D36]=true,[0x0D37]=true,[0x0D38]=true,
+ [0x0D39]=true,[0x0D3A]=true,
+}
+local independent_vowel={
+ [0x0904]=true,[0x0905]=true,[0x0906]=true,[0x0907]=true,
+ [0x0908]=true,[0x0909]=true,[0x090A]=true,[0x090B]=true,
+ [0x090C]=true,[0x090D]=true,[0x090E]=true,[0x090F]=true,
+ [0x0910]=true,[0x0911]=true,[0x0912]=true,[0x0913]=true,
+ [0x0914]=true,[0x0960]=true,[0x0961]=true,[0x0972]=true,
+ [0x0973]=true,[0x0974]=true,[0x0975]=true,[0x0976]=true,
+ [0x0977]=true,
+ [0x0C85]=true,[0x0C86]=true,[0x0C87]=true,[0x0C88]=true,
+ [0x0C89]=true,[0x0C8A]=true,[0x0C8B]=true,[0x0C8C]=true,
+ [0x0C8D]=true,[0x0C8E]=true,[0x0C8F]=true,[0x0C90]=true,
+ [0x0C91]=true,[0x0C92]=true,[0x0C93]=true,[0x0C94]=true,
+ [0x0D05]=true,[0x0D06]=true,[0x0D07]=true,[0x0D08]=true,
+ [0x0D09]=true,[0x0D0A]=true,[0x0D0B]=true,[0x0D0C]=true,
+ [0x0D0E]=true,[0x0D0F]=true,[0x0D10]=true,[0x0D12]=true,
+ [0x0D13]=true,[0x0D14]=true,
+}
+local dependent_vowel={
+ [0x093A]=true,[0x093B]=true,[0x093E]=true,[0x093F]=true,
+ [0x0940]=true,[0x0941]=true,[0x0942]=true,[0x0943]=true,
+ [0x0944]=true,[0x0945]=true,[0x0946]=true,[0x0947]=true,
+ [0x0948]=true,[0x0949]=true,[0x094A]=true,[0x094B]=true,
+ [0x094C]=true,[0x094E]=true,[0x094F]=true,[0x0955]=true,
+ [0x0956]=true,[0x0957]=true,[0x0962]=true,[0x0963]=true,
+ [0x0CBE]=true,[0x0CBF]=true,[0x0CC0]=true,[0x0CC1]=true,
+ [0x0CC2]=true,[0x0CC3]=true,[0x0CC4]=true,[0x0CC5]=true,
+ [0x0CC6]=true,[0x0CC7]=true,[0x0CC8]=true,[0x0CC9]=true,
+ [0x0CCA]=true,[0x0CCB]=true,[0x0CCC]=true,
+ [0x0D3E]=true,[0x0D3F]=true,[0x0D40]=true,[0x0D41]=true,
+ [0x0D42]=true,[0x0D43]=true,[0x0D44]=true,[0x0D46]=true,
+ [0x0D47]=true,[0x0D48]=true,[0x0D4A]=true,[0x0D4B]=true,
+ [0x0D4C]=true,[0x0D57]=true,
+}
+local vowel_modifier={
+ [0x0900]=true,[0x0901]=true,[0x0902]=true,[0x0903]=true,
+ [0xA8E0]=true,[0xA8E1]=true,[0xA8E2]=true,[0xA8E3]=true,
+ [0xA8E4]=true,[0xA8E5]=true,[0xA8E6]=true,[0xA8E7]=true,
+ [0xA8E8]=true,[0xA8E9]=true,[0xA8EA]=true,[0xA8EB]=true,
+ [0xA8EC]=true,[0xA8ED]=true,[0xA8EE]=true,[0xA8EF]=true,
+ [0xA8F0]=true,[0xA8F1]=true,
+ [0x0D02]=true,[0x0D03]=true,
+}
+local stress_tone_mark={
+ [0x0951]=true,[0x0952]=true,[0x0953]=true,[0x0954]=true,
+ [0x0CCD]=true,
+ [0x0D4D]=true,
+}
+local nukta={
+ [0x093C]=true,
+ [0x0CBC]=true,
+}
+local halant={
+ [0x094D]=true,
+ [0x0CCD]=true,
+ [0x0D4D]=true,
+}
+local ra={
+ [0x0930]=true,
+ [0x0CB0]=true,
+ [0x0D30]=true,
+}
+local c_anudatta=0x0952
+local c_nbsp=0x00A0
+local c_zwnj=0x200C
+local c_zwj=0x200D
+local zw_char={
+ [0x200C]=true,
+ [0x200D]=true,
+}
+local pre_mark={
+ [0x093F]=true,[0x094E]=true,
+ [0x0D46]=true,[0x0D47]=true,[0x0D48]=true,
+}
+local above_mark={
+ [0x0900]=true,[0x0901]=true,[0x0902]=true,[0x093A]=true,
+ [0x0945]=true,[0x0946]=true,[0x0947]=true,[0x0948]=true,
+ [0x0951]=true,[0x0953]=true,[0x0954]=true,[0x0955]=true,
+ [0xA8E0]=true,[0xA8E1]=true,[0xA8E2]=true,[0xA8E3]=true,
+ [0xA8E4]=true,[0xA8E5]=true,[0xA8E6]=true,[0xA8E7]=true,
+ [0xA8E8]=true,[0xA8E9]=true,[0xA8EA]=true,[0xA8EB]=true,
+ [0xA8EC]=true,[0xA8ED]=true,[0xA8EE]=true,[0xA8EF]=true,
+ [0xA8F0]=true,[0xA8F1]=true,
+ [0x0D4E]=true,
+}
+local below_mark={
+ [0x093C]=true,[0x0941]=true,[0x0942]=true,[0x0943]=true,
+ [0x0944]=true,[0x094D]=true,[0x0952]=true,[0x0956]=true,
+ [0x0957]=true,[0x0962]=true,[0x0963]=true,
+}
+local post_mark={
+ [0x0903]=true,[0x093B]=true,[0x093E]=true,[0x0940]=true,
+ [0x0949]=true,[0x094A]=true,[0x094B]=true,[0x094C]=true,
+ [0x094F]=true,
+}
+local twopart_mark={
+ [0x0D4A]={ 0x0D46,0x0D3E,},
+ [0x0D4B]={ 0x0D47,0x0D3E,},
+ [0x0D4C]={ 0x0D46,0x0D57,},
+}
+local mark_four={}
+for k,v in next,pre_mark do mark_four[k]=pre_mark end
+for k,v in next,above_mark do mark_four[k]=above_mark end
+for k,v in next,below_mark do mark_four[k]=below_mark end
+for k,v in next,post_mark do mark_four[k]=post_mark end
+local mark_above_below_post={}
+for k,v in next,above_mark do mark_above_below_post[k]=above_mark end
+for k,v in next,below_mark do mark_above_below_post[k]=below_mark end
+for k,v in next,post_mark do mark_above_below_post[k]=post_mark end
+local reorder_class={
+ [0x0930]="before postscript",
+ [0x093F]="before half",
+ [0x0940]="after subscript",
+ [0x0941]="after subscript",
+ [0x0942]="after subscript",
+ [0x0943]="after subscript",
+ [0x0944]="after subscript",
+ [0x0945]="after subscript",
+ [0x0946]="after subscript",
+ [0x0947]="after subscript",
+ [0x0948]="after subscript",
+ [0x0949]="after subscript",
+ [0x094A]="after subscript",
+ [0x094B]="after subscript",
+ [0x094C]="after subscript",
+ [0x0962]="after subscript",
+ [0x0963]="after subscript",
+ [0x093E]="after subscript",
+ [0x0CB0]="after postscript",
+ [0x0CBF]="before subscript",
+ [0x0CC6]="before subscript",
+ [0x0CCC]="before subscript",
+ [0x0CBE]="before subscript",
+ [0x0CE2]="before subscript",
+ [0x0CE3]="before subscript",
+ [0x0CC1]="before subscript",
+ [0x0CC2]="before subscript",
+ [0x0CC3]="after subscript",
+ [0x0CC4]="after subscript",
+ [0x0CD5]="after subscript",
+ [0x0CD6]="after subscript",
+}
+local dflt_true={
+ dflt=true
+}
+local dev2_defaults={
+ dev2=dflt_true,
+}
+local deva_defaults={
+ dev2=dflt_true,
+ deva=dflt_true,
+}
+local false_flags={ false,false,false,false }
+local both_joiners_true={
+ [0x200C]=true,
+ [0x200D]=true,
+}
+local sequence_reorder_matras={
+ features={ dv01=dev2_defaults },
+ flags=false_flags,
+ name="dv01_reorder_matras",
+ order={ "dv01" },
+ type="devanagari_reorder_matras",
+ nofsteps=1,
+ steps={
+ {
+ osdstep=true,
+ coverage=pre_mark,
+ }
+ }
+}
+local sequence_reorder_reph={
+ features={ dv02=dev2_defaults },
+ flags=false_flags,
+ name="dv02_reorder_reph",
+ order={ "dv02" },
+ type="devanagari_reorder_reph",
+ nofsteps=1,
+ steps={
+ {
+ osdstep=true,
+ coverage={},
+ }
+ }
+}
+local sequence_reorder_pre_base_reordering_consonants={
+ features={ dv03=dev2_defaults },
+ flags=false_flags,
+ name="dv03_reorder_pre_base_reordering_consonants",
+ order={ "dv03" },
+ type="devanagari_reorder_pre_base_reordering_consonants",
+ nofsteps=1,
+ steps={
+ {
+ osdstep=true,
+ coverage={},
+ }
+ }
+}
+local sequence_remove_joiners={
+ features={ dv04=deva_defaults },
+ flags=false_flags,
+ name="dv04_remove_joiners",
+ order={ "dv04" },
+ type="devanagari_remove_joiners",
+ nofsteps=1,
+ steps={
+ { osdstep=true,
+ coverage=both_joiners_true,
+ },
+ }
+}
+local basic_shaping_forms={
+ nukt=true,
+ akhn=true,
+ rphf=true,
+ pref=true,
+ rkrf=true,
+ blwf=true,
+ half=true,
+ pstf=true,
+ vatu=true,
+ cjct=true,
+}
+local valid={
+ akhn=true,
+ rphf=true,
+ pref=true,
+ half=true,
+ blwf=true,
+ pstf=true,
+ pres=true,
+ blws=true,
+ psts=true,
+}
+local function initializedevanagi(tfmdata)
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ if script=="deva" or script=="dev2" or script=="mlym" or script=="mlm2" then
+ local resources=tfmdata.resources
+ local devanagari=resources.devanagari
+ if not devanagari then
+ report_devanagari("adding devanagari features to font")
+ local gsubfeatures=resources.features.gsub
+ local sequences=resources.sequences
+ local sharedfeatures=tfmdata.shared.features
+ local lastmatch=0
+ for s=1,#sequences do
+ local features=sequences[s].features
+ if features then
+ for k,v in next,features do
+ if basic_shaping_forms[k] then
+ lastmatch=s
+ end
+ end
+ end
+ end
+ local insertindex=lastmatch+1
+ gsubfeatures["dv01"]=dev2_defaults
+ gsubfeatures["dv02"]=dev2_defaults
+ gsubfeatures["dv03"]=dev2_defaults
+ gsubfeatures["dv04"]=deva_defaults
+ local reorder_pre_base_reordering_consonants=copy(sequence_reorder_pre_base_reordering_consonants)
+ local reorder_reph=copy(sequence_reorder_reph)
+ local reorder_matras=copy(sequence_reorder_matras)
+ local remove_joiners=copy(sequence_remove_joiners)
+ insert(sequences,insertindex,reorder_pre_base_reordering_consonants)
+ insert(sequences,insertindex,reorder_reph)
+ insert(sequences,insertindex,reorder_matras)
+ insert(sequences,insertindex,remove_joiners)
+ local blwfcache={}
+ local seqsubset={}
+ local rephstep={
+ coverage={}
+ }
+ local devanagari={
+ reph=false,
+ vattu=false,
+ blwfcache=blwfcache,
+ seqsubset=seqsubset,
+ reorderreph=rephstep,
+ }
+ reorder_reph.steps={ rephstep }
+ local pre_base_reordering_consonants={}
+ reorder_pre_base_reordering_consonants.steps[1].coverage=pre_base_reordering_consonants
+ resources.devanagari=devanagari
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local steps=sequence.steps
+ local nofsteps=sequence.nofsteps
+ local features=sequence.features
+ if features["rphf"] then
+ devanagari.reph=true
+ elseif features["blwf"] then
+ devanagari.vattu=true
+ for i=1,nofsteps do
+ local step=steps[i]
+ local coverage=step.coverage
+ if coverage then
+ for k,v in next,coverage do
+ if not blwfcache[k] then
+ blwfcache[k]=v
+ end
+ end
+ end
+ end
+ end
+ if valid[kind] then
+ for i=1,nofsteps do
+ local step=steps[i]
+ local coverage=step.coverage
+ if coverage then
+ local reph=false
+ if step.osdstep then
+ for k,v in next,ra do
+ local r=coverage[k]
+ if r then
+ local h=false
+ for k,v in next,halant do
+ local h=r[k]
+ if h then
+ reph=h.ligature or false
+ break
+ end
+ end
+ if reph then
+ break
+ end
+ end
+ end
+ else
+ end
+ seqsubset[#seqsubset+1]={ kind,coverage,reph }
+ end
+ end
+ end
+ if kind=="pref" then
+ local sequence=dataset[3]
+ local steps=sequence.steps
+ local nofsteps=sequence.nofsteps
+ for i=1,nofsteps do
+ local step=steps[i]
+ local coverage=step.coverage
+ if coverage then
+ for k,v in next,halant do
+ local h=coverage[k]
+ if h then
+ local found=false
+ for k,v in next,h do
+ found=v and v.ligature
+ if found then
+ pre_base_reordering_consonants[k]=found
+ break
+ end
+ end
+ if found then
+ break
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if script=="deva" then
+ sharedfeatures["dv04"]=true
+ elseif script=="dev2" then
+ sharedfeatures["dv01"]=true
+ sharedfeatures["dv02"]=true
+ sharedfeatures["dv03"]=true
+ sharedfeatures["dv04"]=true
+ elseif script=="mlym" then
+ sharedfeatures["pstf"]=true
+ elseif script=="mlm2" then
+ sharedfeatures["pstf"]=true
+ sharedfeatures["pref"]=true
+ sharedfeatures["dv03"]=true
+ gsubfeatures ["dv03"]=dev2_defaults
+ insert(sequences,insertindex,sequence_reorder_pre_base_reordering_consonants)
+ end
+ end
end
end
-local function tabstr_mixed(t)
- local s={}
- local n=#t
- if n==0 then
- return ""
- elseif n==1 then
- local k=t[1]
- if k==true then
- return "++"
- elseif k==false then
- return "--"
- else
- return tostring(k)
- end
- else
- for i=1,n do
- local k=t[i]
- if k==true then
- s[i]="++"
- elseif k==false then
- s[i]="--"
- else
- s[i]=k
+registerotffeature {
+ name="devanagari",
+ description="inject additional features",
+ default=true,
+ initializers={
+ node=initializedevanagi,
+ },
+}
+local function deva_initialize(font,attr)
+ local tfmdata=fontdata[font]
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local devanagaridata=datasets.devanagari
+ if not devanagaridata then
+ devanagaridata={
+ reph=false,
+ vattu=false,
+ blwfcache={},
+ }
+ datasets.devanagari=devanagaridata
+ local resources=tfmdata.resources
+ local devanagari=resources.devanagari
+ for s=1,#datasets do
+ local dataset=datasets[s]
+ if dataset and dataset[1] then
+ local kind=dataset[4]
+ if kind=="rphf" then
+ devanagaridata.reph=true
+ elseif kind=="blwf" then
+ devanagaridata.vattu=true
+ devanagaridata.blwfcache=devanagari.blwfcache
+ end
end
end
- return concat(s,",")
end
+ return devanagaridata.reph,devanagaridata.vattu,devanagaridata.blwfcache
end
-local function tabstr_boolean(t)
- local s={}
- local n=0
- for k,v in next,t do
- n=n+1
- if v then
- s[n]=k.."+"
+local function deva_reorder(head,start,stop,font,attr,nbspaces)
+ local reph,vattu,blwfcache=deva_initialize(font,attr)
+ local current=start
+ local n=getnext(start)
+ local base=nil
+ local firstcons=nil
+ local lastcons=nil
+ local basefound=false
+ if reph and ra[getchar(start)] and halant[getchar(n)] then
+ if n==stop then
+ return head,stop,nbspaces
+ end
+ if getchar(getnext(n))==c_zwj then
+ current=start
else
- s[n]=k.."-"
+ current=getnext(n)
+ setprop(start,a_state,s_rphf)
end
end
- if n==0 then
- return ""
- elseif n==1 then
- return s[1]
- else
- sort(s)
- return concat(s,",")
- end
-end
-local function packdata(data)
- if data then
- local h,t,c={},{},{}
- local hh,tt,cc={},{},{}
- local nt,ntt=0,0
- local function pack_normal(v)
- local tag=tabstr_normal(v)
- local ht=h[tag]
- if ht then
- c[ht]=c[ht]+1
- return ht
- else
- nt=nt+1
- t[nt]=v
- h[tag]=nt
- c[nt]=1
- return nt
+ if getchar(current)==c_nbsp then
+ if current==stop then
+ stop=getprev(stop)
+ head=remove_node(head,current)
+ free_node(current)
+ return head,stop,nbspaces
+ else
+ nbspaces=nbspaces+1
+ base=current
+ firstcons=current
+ lastcons=current
+ current=getnext(current)
+ if current~=stop then
+ if nukta[getchar(current)] then
+ current=getnext(current)
+ end
+ if getchar(current)==c_zwj then
+ if current~=stop then
+ local next=getnext(current)
+ if next~=stop and halant[getchar(next)] then
+ current=next
+ next=getnext(current)
+ local tmp=next and getnext(next) or nil
+ local changestop=next==stop
+ local tempcurrent=copy_node(next)
+ copyinjection(tempcurrent,next)
+ local nextcurrent=copy_node(current)
+ copyinjection(nextcurrent,current)
+ setlink(tempcurrent,nextcurrent)
+ setprop(tempcurrent,a_state,s_blwf)
+ tempcurrent=processcharacters(tempcurrent,font)
+ setprop(tempcurrent,a_state,unsetvalue)
+ if getchar(next)==getchar(tempcurrent) then
+ flush_list(tempcurrent)
+ local n=copy_node(current)
+ copyinjection(n,current)
+ setchar(current,dotted_circle)
+ head=insert_node_after(head,current,n)
+ else
+ setchar(current,getchar(tempcurrent))
+ local freenode=getnext(current)
+ setlink(current,tmp)
+ free_node(freenode)
+ flush_list(tempcurrent)
+ if changestop then
+ stop=current
+ end
+ end
+ end
+ end
+ end
end
end
- local function pack_flat(v)
- local tag=tabstr_flat(v)
- local ht=h[tag]
- if ht then
- c[ht]=c[ht]+1
- return ht
- else
- nt=nt+1
- t[nt]=v
- h[tag]=nt
- c[nt]=1
- return nt
+ end
+ while not basefound do
+ local char=getchar(current)
+ if consonant[char] then
+ setprop(current,a_state,s_half)
+ if not firstcons then
+ firstcons=current
end
- end
- local function pack_boolean(v)
- local tag=tabstr_boolean(v)
- local ht=h[tag]
- if ht then
- c[ht]=c[ht]+1
- return ht
+ lastcons=current
+ if not base then
+ base=current
+ elseif blwfcache[char] then
+ setprop(current,a_state,s_blwf)
else
- nt=nt+1
- t[nt]=v
- h[tag]=nt
- c[nt]=1
- return nt
+ base=current
end
end
- local function pack_indexed(v)
- local tag=concat(v," ")
- local ht=h[tag]
- if ht then
- c[ht]=c[ht]+1
- return ht
- else
- nt=nt+1
- t[nt]=v
- h[tag]=nt
- c[nt]=1
- return nt
- end
+ basefound=current==stop
+ current=getnext(current)
+ end
+ if base~=lastcons then
+ local np=base
+ local n=getnext(base)
+ local ch=getchar(n)
+ if nukta[ch] then
+ np=n
+ n=getnext(n)
+ ch=getchar(n)
+ end
+ if halant[ch] then
+ if lastcons~=stop then
+ local ln=getnext(lastcons)
+ if nukta[getchar(ln)] then
+ lastcons=ln
+ end
+ end
+ local nn=getnext(n)
+ local ln=getnext(lastcons)
+ setlink(np,nn)
+ setnext(lastcons,n)
+ if ln then
+ setprev(ln,n)
+ end
+ setnext(n,ln)
+ setprev(n,lastcons)
+ if lastcons==stop then
+ stop=n
+ end
+ end
+ end
+ n=getnext(start)
+ if n~=stop and ra[getchar(start)] and halant[getchar(n)] and not zw_char[getchar(getnext(n))] then
+ local matra=base
+ if base~=stop then
+ local next=getnext(base)
+ if dependent_vowel[getchar(next)] then
+ matra=next
+ end
+ end
+ local sp=getprev(start)
+ local nn=getnext(n)
+ local mn=getnext(matra)
+ setlink(sp,nn)
+ setlink(matra,start)
+ setlink(n,mn)
+ if head==start then
+ head=nn
end
- local function pack_mixed(v)
- local tag=tabstr_mixed(v)
- local ht=h[tag]
- if ht then
- c[ht]=c[ht]+1
- return ht
- else
- nt=nt+1
- t[nt]=v
- h[tag]=nt
- c[nt]=1
- return nt
- end
+ start=nn
+ if matra==stop then
+ stop=n
end
- local function pack_final(v)
- if c[v]<=criterium then
- return t[v]
- else
- local hv=hh[v]
- if hv then
- return hv
- else
- ntt=ntt+1
- tt[ntt]=t[v]
- hh[v]=ntt
- cc[ntt]=c[v]
- return ntt
+ end
+ local current=start
+ while current~=stop do
+ local next=getnext(current)
+ if next~=stop and halant[getchar(next)] and getchar(getnext(next))==c_zwnj then
+ setprop(current,a_state,unsetvalue)
+ end
+ current=next
+ end
+ if base~=stop and getprop(base,a_state) then
+ local next=getnext(base)
+ if halant[getchar(next)] and not (next~=stop and getchar(getnext(next))==c_zwj) then
+ setprop(base,a_state,unsetvalue)
+ end
+ end
+ local current,allreordered,moved=start,false,{ [base]=true }
+ local a,b,p,bn=base,base,base,getnext(base)
+ if base~=stop and nukta[getchar(bn)] then
+ a,b,p=bn,bn,bn
+ end
+ while not allreordered do
+ local c=current
+ local n=getnext(current)
+ local l=nil
+ if c~=stop then
+ local ch=getchar(n)
+ if nukta[ch] then
+ c=n
+ n=getnext(n)
+ ch=getchar(n)
+ end
+ if c~=stop then
+ if halant[ch] then
+ c=n
+ n=getnext(n)
+ ch=getchar(n)
+ end
+ while c~=stop and dependent_vowel[ch] do
+ c=n
+ n=getnext(n)
+ ch=getchar(n)
+ end
+ if c~=stop then
+ if vowel_modifier[ch] then
+ c=n
+ n=getnext(n)
+ ch=getchar(n)
+ end
+ if c~=stop and stress_tone_mark[ch] then
+ c=n
+ n=getnext(n)
+ end
end
end
end
- local function success(stage,pass)
- if nt==0 then
- if trace_loading or trace_packing then
- report_otf("pack quality: nothing to pack")
+ local bp=getprev(firstcons)
+ local cn=getnext(current)
+ local last=getnext(c)
+ while cn~=last do
+ if pre_mark[getchar(cn)] then
+ if bp then
+ setnext(bp,cn)
end
- return false
- elseif nt>=threshold then
- local one,two,rest=0,0,0
- if pass==1 then
- for k,v in next,c do
- if v==1 then
- one=one+1
- elseif v==2 then
- two=two+1
- else
- rest=rest+1
- end
- end
- else
- for k,v in next,cc do
- if v>20 then
- rest=rest+1
- elseif v>10 then
- two=two+1
- else
- one=one+1
- end
- end
- data.tables=tt
+ local prev,next=getboth(cn)
+ if next then
+ setprev(next,prev)
end
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium)
+ setnext(prev,next)
+ if cn==stop then
+ stop=prev
end
- return true
- else
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold)
+ setprev(cn,bp)
+ setlink(cn,firstcons)
+ if firstcons==start then
+ if head==start then
+ head=cn
+ end
+ start=cn
end
- return false
- end
- end
- local function packers(pass)
- if pass==1 then
- return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
- else
- return pack_final,pack_final,pack_final,pack_final,pack_final
- end
- end
- local resources=data.resources
- local lookuptypes=resources.lookuptypes
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 1, pass %s",pass)
+ break
end
- local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
- for unicode,description in next,data.descriptions do
- local boundingbox=description.boundingbox
- if boundingbox then
- description.boundingbox=pack_indexed(boundingbox)
- end
- local slookups=description.slookups
- if slookups then
- for tag,slookup in next,slookups do
- local what=lookuptypes[tag]
- if what=="pair" then
- local t=slookup[2] if t then slookup[2]=pack_indexed(t) end
- local t=slookup[3] if t then slookup[3]=pack_indexed(t) end
- elseif what~="substitution" then
- slookups[tag]=pack_indexed(slookup)
- end
+ cn=getnext(cn)
+ end
+ allreordered=c==stop
+ current=getnext(c)
+ end
+ if reph or vattu then
+ local current,cns=start,nil
+ while current~=stop do
+ local c=current
+ local n=getnext(current)
+ if ra[getchar(current)] and halant[getchar(n)] then
+ c=n
+ n=getnext(n)
+ local b,bn=base,base
+ while bn~=stop do
+ local next=getnext(bn)
+ if dependent_vowel[getchar(next)] then
+ b=next
end
+ bn=next
end
- local mlookups=description.mlookups
- if mlookups then
- for tag,mlookup in next,mlookups do
- local what=lookuptypes[tag]
- if what=="pair" then
- for i=1,#mlookup do
- local lookup=mlookup[i]
- local t=lookup[2] if t then lookup[2]=pack_indexed(t) end
- local t=lookup[3] if t then lookup[3]=pack_indexed(t) end
- end
- elseif what~="substitution" then
- for i=1,#mlookup do
- mlookup[i]=pack_indexed(mlookup[i])
+ if getprop(current,a_state)==s_rphf then
+ if b~=current then
+ if current==start then
+ if head==start then
+ head=n
end
+ start=n
+ end
+ if b==stop then
+ stop=c
end
+ local prev=getprev(current)
+ setlink(prev,n)
+ local next=getnext(b)
+ setlink(c,next)
+ setlink(b,current)
end
- end
- local kerns=description.kerns
- if kerns then
- for tag,kern in next,kerns do
- kerns[tag]=pack_flat(kern)
+ elseif cns and getnext(cns)~=current then
+ local cp=getprev(current)
+ local cnsn=getnext(cns)
+ setlink(cp,n)
+ setlink(cns,current)
+ setlink(c,cnsn)
+ if c==stop then
+ stop=cp
+ break
end
+ current=getprev(n)
end
- local math=description.math
- if math then
- local kerns=math.kerns
- if kerns then
- for tag,kern in next,kerns do
- kerns[tag]=pack_normal(kern)
- end
+ else
+ local char=getchar(current)
+ if consonant[char] then
+ cns=current
+ local next=getnext(cns)
+ if halant[getchar(next)] then
+ cns=next
end
- end
- local anchors=description.anchors
- if anchors then
- for what,anchor in next,anchors do
- if what=="baselig" then
- for _,a in next,anchor do
- for k=1,#a do
- a[k]=pack_indexed(a[k])
- end
- end
- else
- for k,v in next,anchor do
- anchor[k]=pack_indexed(v)
- end
- end
+ elseif char==c_nbsp then
+ nbspaces=nbspaces+1
+ cns=current
+ local next=getnext(cns)
+ if halant[getchar(next)] then
+ cns=next
end
end
- local altuni=description.altuni
- if altuni then
- for i=1,#altuni do
- altuni[i]=pack_flat(altuni[i])
+ end
+ current=getnext(current)
+ end
+ end
+ if getchar(base)==c_nbsp then
+ nbspaces=nbspaces-1
+ head=remove_node(head,base)
+ free_node(base)
+ end
+ return head,stop,nbspaces
+end
+function handlers.devanagari_reorder_matras(head,start)
+ local current=start
+ local startfont=getfont(start)
+ local startattr=getprop(start,a_syllabe)
+ while current do
+ local char=ischar(current,startfont)
+ local next=getnext(current)
+ if char and getprop(current,a_syllabe)==startattr then
+ if halant[char] and not getprop(current,a_state) then
+ if next then
+ local char=ischar(next,startfont)
+ if char and zw_char[char] and getprop(next,a_syllabe)==startattr then
+ current=next
+ next=getnext(current)
end
end
+ local startnext=getnext(start)
+ head=remove_node(head,start)
+ setlink(start,next)
+ setlink(current,start)
+ start=startnext
+ break
end
- local lookups=data.lookups
- if lookups then
- for _,lookup in next,lookups do
- local rules=lookup.rules
- if rules then
- for i=1,#rules do
- local rule=rules[i]
- local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
- local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
- local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
- local r=rule.replacements if r then rule.replacements=pack_flat (r) end
- local r=rule.lookups if r then rule.lookups=pack_indexed(r) end
- end
+ end
+ current=next
+ end
+ return head,start,true
+end
+function handlers.devanagari_reorder_reph(head,start)
+ local current=getnext(start)
+ local startnext=nil
+ local startprev=nil
+ local startfont=getfont(start)
+ local startattr=getprop(start,a_syllabe)
+ while current do
+ local char=ischar(current,font)
+ if char and getprop(current,a_syllabe)==startattr then
+ if halant[char] and not getprop(current,a_state) then
+ local next=getnext(current)
+ if next then
+ local nextchar=ischar(next,font)
+ if nextchar and zw_char[nextchar] and getprop(next,a_syllabe)==startattr then
+ current=next
+ next=getnext(current)
end
end
+ startnext=getnext(start)
+ head=remove_node(head,start)
+ setlink(start,next)
+ setlink(current,start)
+ start=startnext
+ startattr=getprop(start,a_syllabe)
+ break
end
- local anchor_to_lookup=resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor,lookup in next,anchor_to_lookup do
- anchor_to_lookup[anchor]=pack_normal(lookup)
+ current=getnext(current)
+ else
+ break
+ end
+ end
+ if not startnext then
+ current=getnext(start)
+ while current do
+ local char=ischar(current,font)
+ if char and getprop(current,a_syllabe)==startattr then
+ if getprop(current,a_state)==s_pstf then
+ startnext=getnext(start)
+ head=remove_node(head,start)
+ local prev=getprev(current)
+ setlink(prev,start)
+ setlink(start,current)
+ start=startnext
+ startattr=getprop(start,a_syllabe)
+ break
end
+ current=getnext(current)
+ else
+ break
end
- local lookup_to_anchor=resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup,anchor in next,lookup_to_anchor do
- lookup_to_anchor[lookup]=pack_normal(anchor)
+ end
+ end
+ if not startnext then
+ current=getnext(start)
+ local c=nil
+ while current do
+ local char=ischar(current,font)
+ if char and getprop(current,a_syllabe)==startattr then
+ if not c and mark_above_below_post[char] and reorder_class[char]~="after subscript" then
+ c=current
end
+ current=getnext(current)
+ else
+ break
end
- local sequences=resources.sequences
- if sequences then
- for feature,sequence in next,sequences do
- local flags=sequence.flags
- if flags then
- sequence.flags=pack_normal(flags)
- end
- local subtables=sequence.subtables
- if subtables then
- sequence.subtables=pack_normal(subtables)
- end
- local features=sequence.features
- if features then
- for script,feature in next,features do
- features[script]=pack_normal(feature)
+ end
+ if c then
+ startnext=getnext(start)
+ head=remove_node(head,start)
+ local prev=getprev(c)
+ setlink(prev,start)
+ setlink(start,c)
+ start=startnext
+ startattr=getprop(start,a_syllabe)
+ end
+ end
+ if not startnext then
+ current=start
+ local next=getnext(current)
+ while next do
+ local nextchar=ischar(next,font)
+ if nextchar and getprop(next,a_syllabe)==startattr then
+ current=next
+ next=getnext(current)
+ else
+ break
+ end
+ end
+ if start~=current then
+ startnext=getnext(start)
+ head=remove_node(head,start)
+ local next=getnext(current)
+ setlink(start,next)
+ setlink(current,"next",start)
+ start=startnext
+ end
+ end
+ return head,start,true
+end
+function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
+ local current=start
+ local startnext=nil
+ local startprev=nil
+ local startfont=getfont(start)
+ local startattr=getprop(start,a_syllabe)
+ while current do
+ local char=ischar(current,font)
+ if char and getprop(current,a_syllabe)==startattr then
+ local next=getnext(current)
+ if halant[char] and not getprop(current,a_state) then
+ if next then
+ local nextchar=ischar(next,font)
+ if nextchar and getprop(next,a_syllabe)==startattr then
+ if nextchar==c_zwnj or nextchar==c_zwj then
+ current=next
+ next=getnext(current)
end
end
- local order=sequence.order
- if order then
- sequence.order=pack_indexed(order)
- end
- local markclass=sequence.markclass
- if markclass then
- sequence.markclass=pack_boolean(markclass)
- end
end
+ startnext=getnext(start)
+ removenode(start,start)
+ setlink(start,next)
+ setlink(current,start)
+ start=startnext
+ break
end
- local lookups=resources.lookups
- if lookups then
- for name,lookup in next,lookups do
- local flags=lookup.flags
- if flags then
- lookup.flags=pack_normal(flags)
+ current=next
+ else
+ break
+ end
+ end
+ if not startnext then
+ current=getnext(start)
+ startattr=getprop(start,a_syllabe)
+ while current do
+ local char=ischar(current,font)
+ if char and getprop(current,a_syllabe)==startattr then
+ if not consonant[char] and getprop(current,a_state) then
+ startnext=getnext(start)
+ removenode(start,start)
+ local prev=getprev(current)
+ setlink(start,prev)
+ setlink(start,current)
+ start=startnext
+ break
+ end
+ current=getnext(current)
+ else
+ break
+ end
+ end
+ end
+ return head,start,true
+end
+function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
+ local stop=getnext(start)
+ local font=getfont(start)
+ local last=start
+ while stop do
+ local char=ischar(stop,font)
+ if char and (char==c_zwnj or char==c_zwj) then
+ last=stop
+ stop=getnext(stop)
+ else
+ break
+ end
+ end
+ local prev=getprev(start)
+ if stop then
+ setnext(last)
+ setlink(prev,stop)
+ elseif prev then
+ setnext(prev)
+ end
+ if head==start then
+ head=stop
+ end
+ flush_list(start)
+ return head,stop,true
+end
+local function dev2_initialize(font,attr)
+ local devanagari=fontdata[font].resources.devanagari
+ if devanagari then
+ return devanagari.seqsubset or {},devanagari.reorderreph or {}
+ else
+ return {},{}
+ end
+end
+local function dev2_reorder(head,start,stop,font,attr,nbspaces)
+ local seqsubset,reorderreph=dev2_initialize(font,attr)
+ local reph=false
+ local halfpos=nil
+ local basepos=nil
+ local subpos=nil
+ local postpos=nil
+ local locl={}
+ for i=1,#seqsubset do
+ local subset=seqsubset[i]
+ local kind=subset[1]
+ local lookupcache=subset[2]
+ if kind=="rphf" then
+ for k,v in next,ra do
+ local r=lookupcache[k]
+ if r then
+ for k,v in next,halant do
+ local h=r[k]
+ if h then
+ reph=h.ligature or false
+ break
+ end
end
- local subtables=lookup.subtables
- if subtables then
- lookup.subtables=pack_normal(subtables)
+ if reph then
+ break
end
end
end
- local features=resources.features
- if features then
- for _,what in next,glists do
- local list=features[what]
- if list then
- for feature,spec in next,list do
- list[feature]=pack_normal(spec)
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ if current~=stop then
+ local c=locl[current] or getchar(current)
+ local found=lookupcache[c]
+ if found then
+ local next=getnext(current)
+ local n=locl[next] or getchar(next)
+ if found[n] then
+ local afternext=next~=stop and getnext(next)
+ if afternext and zw_char[getchar(afternext)] then
+ current=next
+ current=getnext(current)
+ elseif current==start then
+ setprop(current,a_state,s_rphf)
+ current=next
+ else
+ current=next
+ end
end
end
end
+ current=getnext(current)
end
- if not success(1,pass) then
- return
- end
- end
- if nt>0 then
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 2, pass %s",pass)
- end
- local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
- for unicode,description in next,data.descriptions do
- local kerns=description.kerns
- if kerns then
- description.kerns=pack_normal(kerns)
- end
- local math=description.math
- if math then
- local kerns=math.kerns
- if kerns then
- math.kerns=pack_normal(kerns)
+ elseif kind=="pref" then
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ if current~=stop then
+ local c=locl[current] or getchar(current)
+ local found=lookupcache[c]
+ if found then
+ local next=getnext(current)
+ local n=locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_pref)
+ setprop(next,a_state,s_pref)
+ current=next
end
end
- local anchors=description.anchors
- if anchors then
- description.anchors=pack_normal(anchors)
+ end
+ current=getnext(current)
+ end
+ elseif kind=="half" then
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ if current~=stop then
+ local c=locl[current] or getchar(current)
+ local found=lookupcache[c]
+ if found then
+ local next=getnext(current)
+ local n=locl[next] or getchar(next)
+ if found[n] then
+ if next~=stop and getchar(getnext(next))==c_zwnj then
+ current=next
+ else
+ setprop(current,a_state,s_half)
+ if not halfpos then
+ halfpos=current
+ end
+ end
+ current=getnext(current)
+ end
end
- local mlookups=description.mlookups
- if mlookups then
- for tag,mlookup in next,mlookups do
- mlookups[tag]=pack_normal(mlookup)
+ end
+ current=getnext(current)
+ end
+ elseif kind=="blwf" then
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ if current~=stop then
+ local c=locl[current] or getchar(current)
+ local found=lookupcache[c]
+ if found then
+ local next=getnext(current)
+ local n=locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_blwf)
+ setprop(next,a_state,s_blwf)
+ current=next
+ subpos=current
end
end
- local altuni=description.altuni
- if altuni then
- description.altuni=pack_normal(altuni)
+ end
+ current=getnext(current)
+ end
+ elseif kind=="pstf" then
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ if current~=stop then
+ local c=locl[current] or getchar(current)
+ local found=lookupcache[c]
+ if found then
+ local next=getnext(current)
+ local n=locl[next] or getchar(next)
+ if found[n] then
+ setprop(current,a_state,s_pstf)
+ setprop(next,a_state,s_pstf)
+ current=next
+ postpos=current
+ end
end
end
- local lookups=data.lookups
- if lookups then
- for _,lookup in next,lookups do
- local rules=lookup.rules
- if rules then
- for i=1,#rules do
- local rule=rules[i]
- local r=rule.before if r then rule.before=pack_normal(r) end
- local r=rule.after if r then rule.after=pack_normal(r) end
- local r=rule.current if r then rule.current=pack_normal(r) end
+ current=getnext(current)
+ end
+ end
+ end
+ reorderreph.coverage={ [reph]=true }
+ local current,base,firstcons=start,nil,nil
+ if getprop(start,a_state)==s_rphf then
+ current=getnext(getnext(start))
+ end
+ if current~=getnext(stop) and getchar(current)==c_nbsp then
+ if current==stop then
+ stop=getprev(stop)
+ head=remove_node(head,current)
+ free_node(current)
+ return head,stop,nbspaces
+ else
+ nbspaces=nbspaces+1
+ base=current
+ current=getnext(current)
+ if current~=stop then
+ local char=getchar(current)
+ if nukta[char] then
+ current=getnext(current)
+ char=getchar(current)
+ end
+ if char==c_zwj then
+ local next=getnext(current)
+ if current~=stop and next~=stop and halant[getchar(next)] then
+ current=next
+ next=getnext(current)
+ local tmp=getnext(next)
+ local changestop=next==stop
+ setnext(next,nil)
+ setprop(current,a_state,s_pref)
+ current=processcharacters(current,font)
+ setprop(current,a_state,s_blwf)
+ current=processcharacters(current,font)
+ setprop(current,a_state,s_pstf)
+ current=processcharacters(current,font)
+ setprop(current,a_state,unsetvalue)
+ if halant[getchar(current)] then
+ setnext(getnext(current),tmp)
+ local nc=copy_node(current)
+ copyinjection(nc,current)
+ setchar(current,dotted_circle)
+ head=insert_node_after(head,current,nc)
+ else
+ setnext(current,tmp)
+ if changestop then
+ stop=current
end
end
end
end
- local sequences=resources.sequences
- if sequences then
- for feature,sequence in next,sequences do
- sequence.features=pack_normal(sequence.features)
+ end
+ end
+ else
+ local last=getnext(stop)
+ while current~=last do
+ local next=getnext(current)
+ if consonant[getchar(current)] then
+ if not (current~=stop and next~=stop and halant[getchar(next)] and getchar(getnext(next))==c_zwj) then
+ if not firstcons then
+ firstcons=current
+ end
+ local a=getprop(current,a_state)
+ if not (a==s_pref or a==s_blwf or a==s_pstf) then
+ base=current
end
- end
- if not success(2,pass) then
end
end
- for pass=1,2 do
- local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
- for unicode,description in next,data.descriptions do
- local slookups=description.slookups
- if slookups then
- description.slookups=pack_normal(slookups)
+ current=next
+ end
+ if not base then
+ base=firstcons
+ end
+ end
+ if not base then
+ if getprop(start,a_state)==s_rphf then
+ setprop(start,a_state,unsetvalue)
+ end
+ return head,stop,nbspaces
+ else
+ if getprop(base,a_state) then
+ setprop(base,a_state,unsetvalue)
+ end
+ basepos=base
+ end
+ if not halfpos then
+ halfpos=base
+ end
+ if not subpos then
+ subpos=base
+ end
+ if not postpos then
+ postpos=subpos or base
+ end
+ local moved={}
+ local current=start
+ local last=getnext(stop)
+ while current~=last do
+ local char,target,cn=locl[current] or getchar(current),nil,getnext(current)
+ local tpm=twopart_mark[char]
+ if tpm then
+ local extra=copy_node(current)
+ copyinjection(extra,current)
+ char=tpm[1]
+ setchar(current,char)
+ setchar(extra,tpm[2])
+ head=insert_node_after(head,current,extra)
+ end
+ if not moved[current] and dependent_vowel[char] then
+ if pre_mark[char] then
+ moved[current]=true
+ local prev,next=getboth(current)
+ setlink(prev,next)
+ if current==stop then
+ stop=getprev(current)
+ end
+ if halfpos==start then
+ if head==start then
+ head=current
end
- local mlookups=description.mlookups
- if mlookups then
- description.mlookups=pack_normal(mlookups)
+ start=current
+ end
+ local prev=getprev(halfpos)
+ setlink(prev,current)
+ setlink(current,halfpos)
+ halfpos=current
+ elseif above_mark[char] then
+ target=basepos
+ if subpos==basepos then
+ subpos=current
+ end
+ if postpos==basepos then
+ postpos=current
+ end
+ basepos=current
+ elseif below_mark[char] then
+ target=subpos
+ if postpos==subpos then
+ postpos=current
+ end
+ subpos=current
+ elseif post_mark[char] then
+ target=postpos
+ postpos=current
+ end
+ if mark_above_below_post[char] then
+ local prev=getprev(current)
+ if prev~=target then
+ local next=getnext(current)
+ setlink(next,prev)
+ if current==stop then
+ stop=prev
end
+ local next=getnext(target)
+ setlink(current,next)
+ setlink(target,current)
end
end
end
+ current=cn
end
-end
-local unpacked_mt={
- __index=function(t,k)
- t[k]=false
- return k
+ local current,c=start,nil
+ while current~=stop do
+ local char=getchar(current)
+ if halant[char] or stress_tone_mark[char] then
+ if not c then
+ c=current
+ end
+ else
+ c=nil
end
-}
-local function unpackdata(data)
- if data then
- local tables=data.tables
- if tables then
- local resources=data.resources
- local lookuptypes=resources.lookuptypes
- local unpacked={}
- setmetatable(unpacked,unpacked_mt)
- for unicode,description in next,data.descriptions do
- local tv=tables[description.boundingbox]
- if tv then
- description.boundingbox=tv
- end
- local slookups=description.slookups
- if slookups then
- local tv=tables[slookups]
- if tv then
- description.slookups=tv
- slookups=unpacked[tv]
- end
- if slookups then
- for tag,lookup in next,slookups do
- local what=lookuptypes[tag]
- if what=="pair" then
- local tv=tables[lookup[2]]
- if tv then
- lookup[2]=tv
- end
- local tv=tables[lookup[3]]
- if tv then
- lookup[3]=tv
- end
- elseif what~="substitution" then
- local tv=tables[lookup]
- if tv then
- slookups[tag]=tv
+ local next=getnext(current)
+ if c and nukta[getchar(next)] then
+ if head==c then
+ head=next
+ end
+ if stop==next then
+ stop=current
+ end
+ local prev=getprev(c)
+ setlink(next,prev)
+ local nextnext=getnext(next)
+ setnext(current,nextnext)
+ local nextnextnext=getnext(nextnext)
+ if nextnextnext then
+ setprev(nextnextnext,current)
+ end
+ setlink(nextnext,c)
+ end
+ if stop==current then break end
+ current=getnext(current)
+ end
+ if getchar(base)==c_nbsp then
+ nbspaces=nbspaces-1
+ head=remove_node(head,base)
+ free_node(base)
+ end
+ return head,stop,nbspaces
+end
+local separator={}
+imerge(separator,consonant)
+imerge(separator,independent_vowel)
+imerge(separator,dependent_vowel)
+imerge(separator,vowel_modifier)
+imerge(separator,stress_tone_mark)
+for k,v in next,nukta do separator[k]=true end
+for k,v in next,halant do separator[k]=true end
+local function analyze_next_chars_one(c,font,variant)
+ local n=getnext(c)
+ if not n then
+ return c
+ end
+ if variant==1 then
+ local v=ischar(n,font)
+ if v and nukta[v] then
+ n=getnext(n)
+ if n then
+ v=ischar(n,font)
+ end
+ end
+ if n and v then
+ local nn=getnext(n)
+ if nn then
+ local vv=ischar(nn,font)
+ if vv then
+ local nnn=getnext(nn)
+ if nnn then
+ local vvv=ischar(nnn,font)
+ if vvv then
+ if vv==c_zwj and consonant[vvv] then
+ c=nnn
+ elseif (vv==c_zwnj or vv==c_zwj) and halant[vvv] then
+ local nnnn=getnext(nnn)
+ if nnnn then
+ local vvvv=ischar(nnnn)
+ if vvvv and consonant[vvvv] then
+ c=nnnn
+ end
end
end
end
end
end
- local mlookups=description.mlookups
- if mlookups then
- local tv=tables[mlookups]
- if tv then
- description.mlookups=tv
- mlookups=unpacked[tv]
+ end
+ end
+ elseif variant==2 then
+ local v=ischar(n,font)
+ if v and nukta[v] then
+ c=n
+ end
+ n=getnext(c)
+ if n then
+ v=ischar(n,font)
+ if v then
+ local nn=getnext(n)
+ if nn then
+ local vv=ischar(nn,font)
+ if vv and zw_char[vv] then
+ n=nn
+ v=vv
+ nn=getnext(nn)
+ vv=nn and ischar(nn,font)
end
- if mlookups then
- for tag,list in next,mlookups do
- local tv=tables[list]
- if tv then
- mlookups[tag]=tv
- list=unpacked[tv]
- end
- if list then
- local what=lookuptypes[tag]
- if what=="pair" then
- for i=1,#list do
- local lookup=list[i]
- local tv=tables[lookup[2]]
- if tv then
- lookup[2]=tv
- end
- local tv=tables[lookup[3]]
- if tv then
- lookup[3]=tv
- end
- end
- elseif what~="substitution" then
- for i=1,#list do
- local tv=tables[list[i]]
- if tv then
- list[i]=tv
- end
- end
- end
- end
- end
+ if vv and halant[v] and consonant[vv] then
+ c=nn
end
end
- local kerns=description.kerns
- if kerns then
- local tm=tables[kerns]
- if tm then
- description.kerns=tm
- kerns=unpacked[tm]
+ end
+ end
+ end
+ local n=getnext(c)
+ if not n then
+ return c
+ end
+ local v=ischar(n,font)
+ if not v then
+ return c
+ end
+ if dependent_vowel[v] then
+ c=getnext(c)
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if nukta[v] then
+ c=getnext(c)
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if halant[v] then
+ c=getnext(c)
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if vowel_modifier[v] then
+ c=getnext(c)
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if stress_tone_mark[v] then
+ c=getnext(c)
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if stress_tone_mark[v] then
+ return n
+ else
+ return c
+ end
+end
+local function analyze_next_chars_two(c,font)
+ local n=getnext(c)
+ if not n then
+ return c
+ end
+ local v=ischar(n,font)
+ if v and nukta[v] then
+ c=n
+ end
+ n=c
+ while true do
+ local nn=getnext(n)
+ if nn then
+ local vv=ischar(nn,font)
+ if vv then
+ if halant[vv] then
+ n=nn
+ local nnn=getnext(nn)
+ if nnn then
+ local vvv=ischar(nnn,font)
+ if vvv and zw_char[vvv] then
+ n=nnn
+ end
end
- if kerns then
- for k,kern in next,kerns do
- local tv=tables[kern]
- if tv then
- kerns[k]=tv
- end
+ elseif vv==c_zwnj or vv==c_zwj then
+ local nnn=getnext(nn)
+ if nnn then
+ local vvv=ischar(nnn,font)
+ if vvv and halant[vvv] then
+ n=nnn
end
end
+ else
+ break
end
- local math=description.math
- if math then
- local kerns=math.kerns
- if kerns then
- local tm=tables[kerns]
- if tm then
- math.kerns=tm
- kerns=unpacked[tm]
- end
- if kerns then
- for k,kern in next,kerns do
- local tv=tables[kern]
- if tv then
- kerns[k]=tv
- end
+ local nn=getnext(n)
+ if nn then
+ local vv=ischar(nn,font)
+ if vv and consonant[vv] then
+ n=nn
+ local nnn=getnext(nn)
+ if nnn then
+ local vvv=ischar(nnn,font)
+ if vvv and nukta[vvv] then
+ n=nnn
end
end
+ c=n
+ else
+ break
end
+ else
+ break
end
- local anchors=description.anchors
- if anchors then
- local ta=tables[anchors]
- if ta then
- description.anchors=ta
- anchors=unpacked[ta]
- end
- if anchors then
- for tag,anchor in next,anchors do
- if tag=="baselig" then
- for _,list in next,anchor do
- for i=1,#list do
- local tv=tables[list[i]]
- if tv then
- list[i]=tv
- end
- end
- end
- else
- for a,data in next,anchor do
- local tv=tables[data]
- if tv then
- anchor[a]=tv
- end
- end
- end
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if not c then
+ return
+ end
+ local n=getnext(c)
+ if not n then
+ return c
+ end
+ local v=ischar(n,font)
+ if not v then
+ return c
+ end
+ if v==c_anudatta then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if halant[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ if v==c_zwnj or v==c_zwj then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ else
+ if dependent_vowel[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if nukta[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if halant[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ end
+ if vowel_modifier[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if stress_tone_mark[v] then
+ c=n
+ n=getnext(c)
+ if not n then
+ return c
+ end
+ v=ischar(n,font)
+ if not v then
+ return c
+ end
+ end
+ if stress_tone_mark[v] then
+ return n
+ else
+ return c
+ end
+end
+local function inject_syntax_error(head,current,mark)
+ local signal=copy_node(current)
+ copyinjection(signal,current)
+ if mark==pre_mark then
+ setchar(signal,dotted_circle)
+ else
+ setchar(current,dotted_circle)
+ end
+ return insert_node_after(head,current,signal)
+end
+function methods.deva(head,font,attr)
+ head=tonut(head)
+ local current=head
+ local start=true
+ local done=false
+ local nbspaces=0
+ while current do
+ local char=ischar(current,font)
+ if char then
+ done=true
+ local syllablestart=current
+ local syllableend=nil
+ local c=current
+ local n=getnext(c)
+ local first=char
+ if n and ra[first] then
+ local second=ischar(n,font)
+ if second and halant[second] then
+ local n=getnext(n)
+ if n then
+ local third=ischar(n,font)
+ if third then
+ c=n
+ first=third
end
end
end
- local altuni=description.altuni
- if altuni then
- local altuni=tables[altuni]
- if altuni then
- description.altuni=altuni
- for i=1,#altuni do
- local tv=tables[altuni[i]]
- if tv then
- altuni[i]=tv
- end
- end
+ end
+ local standalone=first==c_nbsp
+ if standalone then
+ local prev=getprev(current)
+ if prev then
+ local prevchar=ischar(prev,font)
+ if not prevchar then
+ elseif not separator[prevchar] then
+ else
+ standalone=false
end
+ else
end
end
- local lookups=data.lookups
- if lookups then
- for _,lookup in next,lookups do
- local rules=lookup.rules
- if rules then
- for i=1,#rules do
- local rule=rules[i]
- local before=rule.before
- if before then
- local tv=tables[before]
- if tv then
- rule.before=tv
- before=unpacked[tv]
- end
- if before then
- for i=1,#before do
- local tv=tables[before[i]]
- if tv then
- before[i]=tv
- end
- end
- end
+ if standalone then
+ local syllableend=analyze_next_chars_one(c,font,2)
+ current=getnext(syllableend)
+ if syllablestart~=syllableend then
+ head,current,nbspaces=deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ current=getnext(current)
+ end
+ else
+ if consonant[char] then
+ local prevc=true
+ while prevc do
+ prevc=false
+ local n=getnext(current)
+ if not n then
+ break
+ end
+ local v=ischar(n,font)
+ if not v then
+ break
+ end
+ if nukta[v] then
+ n=getnext(n)
+ if not n then
+ break
end
- local after=rule.after
- if after then
- local tv=tables[after]
- if tv then
- rule.after=tv
- after=unpacked[tv]
- end
- if after then
- for i=1,#after do
- local tv=tables[after[i]]
- if tv then
- after[i]=tv
- end
- end
- end
+ v=ischar(n,font)
+ if not v then
+ break
end
- local current=rule.current
- if current then
- local tv=tables[current]
- if tv then
- rule.current=tv
- current=unpacked[tv]
- end
- if current then
- for i=1,#current do
- local tv=tables[current[i]]
- if tv then
- current[i]=tv
- end
- end
- end
+ end
+ if halant[v] then
+ n=getnext(n)
+ if not n then
+ break
end
- local replacements=rule.replacements
- if replacements then
- local tv=tables[replacements]
- if tv then
- rule.replacements=tv
- end
+ v=ischar(n,font)
+ if not v then
+ break
end
- local lookups=rule.lookups
- if lookups then
- local tv=tables[lookups]
- if tv then
- rule.lookups=tv
+ if v==c_zwnj or v==c_zwj then
+ n=getnext(n)
+ if not n then
+ break
+ end
+ v=ischar(n,font)
+ if not v then
+ break
end
end
+ if consonant[v] then
+ prevc=true
+ current=n
+ end
end
end
- end
- end
- local anchor_to_lookup=resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor,lookup in next,anchor_to_lookup do
- local tv=tables[lookup]
- if tv then
- anchor_to_lookup[anchor]=tv
- end
- end
- end
- local lookup_to_anchor=resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup,anchor in next,lookup_to_anchor do
- local tv=tables[anchor]
- if tv then
- lookup_to_anchor[lookup]=tv
- end
- end
- end
- local ls=resources.sequences
- if ls then
- for _,feature in next,ls do
- local flags=feature.flags
- if flags then
- local tv=tables[flags]
- if tv then
- feature.flags=tv
- end
- end
- local subtables=feature.subtables
- if subtables then
- local tv=tables[subtables]
- if tv then
- feature.subtables=tv
+ local n=getnext(current)
+ if n then
+ local v=ischar(n,font)
+ if v and nukta[v] then
+ current=n
+ n=getnext(current)
end
end
- local features=feature.features
- if features then
- local tv=tables[features]
- if tv then
- feature.features=tv
- features=unpacked[tv]
- end
- if features then
- for script,data in next,features do
- local tv=tables[data]
- if tv then
- features[script]=tv
+ syllableend=current
+ current=n
+ if current then
+ local v=ischar(current,font)
+ if not v then
+ elseif halant[v] then
+ local n=getnext(current)
+ if n then
+ local v=ischar(n,font)
+ if v and zw_char[v] then
+ syllableend=n
+ current=getnext(n)
+ else
+ syllableend=current
+ current=n
end
+ else
+ syllableend=current
+ current=n
+ end
+ else
+ if dependent_vowel[v] then
+ syllableend=current
+ current=getnext(current)
+ v=ischar(current,font)
+ end
+ if v and vowel_modifier[v] then
+ syllableend=current
+ current=getnext(current)
+ v=ischar(current,font)
+ end
+ if v and stress_tone_mark[v] then
+ syllableend=current
+ current=getnext(current)
end
end
end
- local order=feature.order
- if order then
- local tv=tables[order]
- if tv then
- feature.order=tv
- end
+ if syllablestart~=syllableend then
+ head,current,nbspaces=deva_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ current=getnext(current)
end
- local markclass=feature.markclass
- if markclass then
- local tv=tables[markclass]
- if tv then
- feature.markclass=tv
+ elseif independent_vowel[char] then
+ syllableend=current
+ current=getnext(current)
+ if current then
+ local v=ischar(current,font)
+ if v then
+ if vowel_modifier[v] then
+ syllableend=current
+ current=getnext(current)
+ v=ischar(current,font)
+ end
+ if v and stress_tone_mark[v] then
+ syllableend=current
+ current=getnext(current)
+ end
end
end
+ else
+ local mark=mark_four[char]
+ if mark then
+ head,current=inject_syntax_error(head,current,mark)
+ end
+ current=getnext(current)
end
end
- local lookups=resources.lookups
- if lookups then
- for _,lookup in next,lookups do
- local flags=lookup.flags
- if flags then
- local tv=tables[flags]
- if tv then
- lookup.flags=tv
- end
- end
- local subtables=lookup.subtables
- if subtables then
- local tv=tables[subtables]
- if tv then
- lookup.subtables=tv
+ else
+ current=getnext(current)
+ end
+ start=false
+ end
+ if nbspaces>0 then
+ head=replace_all_nbsp(head)
+ end
+ head=tonode(head)
+ return head,done
+end
+function methods.dev2(head,font,attr)
+ head=tonut(head)
+ local current=head
+ local start=true
+ local done=false
+ local syllabe=0
+ local nbspaces=0
+ while current do
+ local syllablestart=nil
+ local syllableend=nil
+ local char=ischar(current,font)
+ if char then
+ done=true
+ syllablestart=current
+ local c=current
+ local n=getnext(current)
+ if n and ra[char] then
+ local nextchar=ischar(n,font)
+ if nextchar and halant[nextchar] then
+ local n=getnext(n)
+ if n then
+ local nextnextchar=ischar(n,font)
+ if nextnextchar then
+ c=n
+ char=nextnextchar
end
end
end
end
- local features=resources.features
- if features then
- for _,what in next,glists do
- local feature=features[what]
- if feature then
- for tag,spec in next,feature do
- local tv=tables[spec]
- if tv then
- feature[tag]=tv
- end
- end
+ if independent_vowel[char] then
+ current=analyze_next_chars_one(c,font,1)
+ syllableend=current
+ else
+ local standalone=char==c_nbsp
+ if standalone then
+ nbspaces=nbspaces+1
+ local p=getprev(current)
+ if not p then
+ elseif ischar(p,font) then
+ elseif not separator[getchar(p)] then
+ else
+ standalone=false
end
end
+ if standalone then
+ current=analyze_next_chars_one(c,font,2)
+ syllableend=current
+ elseif consonant[getchar(current)] then
+ current=analyze_next_chars_two(current,font)
+ syllableend=current
+ end
+ end
+ end
+ if syllableend then
+ syllabe=syllabe+1
+ local c=syllablestart
+ local n=getnext(syllableend)
+ while c~=n do
+ setprop(c,a_syllabe,syllabe)
+ c=getnext(c)
end
- data.tables=nil
end
+ if syllableend and syllablestart~=syllableend then
+ head,current,nbspaces=dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
+ end
+ if not syllableend then
+ local char=ischar(current,font)
+ if char and not getprop(current,a_state) then
+ local mark=mark_four[char]
+ if mark then
+ head,current=inject_syntax_error(head,current,mark)
+ end
+ end
+ end
+ start=false
+ current=getnext(current)
end
+ if nbspaces>0 then
+ head=replace_all_nbsp(head)
+ end
+ head=tonode(head)
+ return head,done
end
-if otf.enhancers.register then
- otf.enhancers.register("pack",packdata)
- otf.enhancers.register("unpack",unpackdata)
-end
-otf.enhancers.unpack=unpackdata
-otf.enhancers.pack=packdata
+methods.mlym=methods.deva
+methods.mlm2=methods.dev2
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['luatex-fonts-lua']={
+if not modules then modules={} end modules ['font-lua']={
version=1.001,
- comment="companion to luatex-*.tex",
+ comment="companion to font-ini.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local report_lua=logs.reporter("fonts","lua loading")
local fonts=fonts
+local readers=fonts.readers
fonts.formats.lua="lua"
-function fonts.readers.lua(specification)
+local function check_lua(specification,fullname)
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+readers.check_lua=check_lua
+function readers.lua(specification)
+ local original=specification.specification
+ if trace_defining then
+ report_lua("using lua reader for %a",original)
+ end
local fullname=specification.filename or ""
if fullname=="" then
local forced=specification.forced or ""
@@ -15819,12 +23002,7 @@ function fonts.readers.lua(specification)
fullname=specification.name
end
end
- local fullname=resolvers.findfile(fullname) or ""
- if fullname~="" then
- local loader=loadfile(fullname)
- loader=loader and loader()
- return loader and loader(specification)
- end
+ return check_lua(specification,fullname)
end
end -- closure
@@ -16465,7 +23643,7 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['luatex-fonts-cbk']={
+if not modules then modules={} end modules ['font-gbn']={
version=1.001,
comment="companion to luatex-*.tex",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -16478,45 +23656,59 @@ if context then
end
local fonts=fonts
local nodes=nodes
-local traverse_id=node.traverse_id
-local free_node=node.free
-local remove_node=node.remove
+local nuts=nodes.nuts
+local traverse_id=nuts.traverse_id
+local free_node=nuts.free
+local remove_node=nuts.remove
local glyph_code=nodes.nodecodes.glyph
local disc_code=nodes.nodecodes.disc
-local ligaturing=node.ligaturing
-local kerning=node.kerning
-local basepass=true
+local tonode=nuts.tonode
+local tonut=nuts.tonut
+local getfont=nuts.getfont
+local getchar=nuts.getchar
+local getid=nuts.getid
+local getprev=nuts.getprev
+local getnext=nuts.getnext
+local getdisc=nuts.getdisc
+local setchar=nuts.setchar
+local setlink=nuts.setlink
+local n_ligaturing=node.ligaturing
+local n_kerning=node.kerning
+local ligaturing=nuts.ligaturing
+local kerning=nuts.kerning
+local basemodepass=true
local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning=nil end
local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning=nil end
function node.ligaturing(...)
- if basepass and l_warning then
+ if basemodepass and l_warning then
l_warning()
end
- return ligaturing(...)
+ return n_ligaturing(...)
end
function node.kerning(...)
- if basepass and k_warning then
+ if basemodepass and k_warning then
k_warning()
end
- return kerning(...)
+ return n_kerning(...)
end
-function nodes.handlers.setbasepass(v)
- basepass=v
+function nodes.handlers.setbasemodepass(v)
+ basemodepass=v
end
function nodes.handlers.nodepass(head)
local fontdata=fonts.hashes.identifiers
if fontdata then
+ local nuthead=tonut(head)
local usedfonts={}
local basefonts={}
local prevfont=nil
local basefont=nil
local variants=nil
local redundant=nil
- for n in traverse_id(glyph_code,head) do
- local font=n.font
+ for n in traverse_id(glyph_code,nuthead) do
+ local font=getfont(n)
if font~=prevfont then
if basefont then
- basefont[2]=n.prev
+ basefont[2]=getprev(n)
end
prevfont=font
local used=usedfonts[font]
@@ -16528,7 +23720,7 @@ function nodes.handlers.nodepass(head)
local processors=shared.processes
if processors and #processors>0 then
usedfonts[font]=processors
- elseif basepass then
+ elseif basemodepass then
basefont={ n,nil }
basefonts[#basefonts+1]=basefont
end
@@ -16547,15 +23739,15 @@ function nodes.handlers.nodepass(head)
end
end
if variants then
- local char=n.char
+ local char=getchar(n)
if char>=0xFE00 and (char<=0xFE0F or (char>=0xE0100 and char<=0xE01EF)) then
local hash=variants[char]
if hash then
- local p=n.prev
- if p and p.id==glyph_code then
- local variant=hash[p.char]
+ local p=getprev(n)
+ if p and getid(p)==glyph_code then
+ local variant=hash[getchar(p)]
if variant then
- p.char=variant
+ setchar(p,variant)
if not redundant then
redundant={ n }
else
@@ -16570,15 +23762,15 @@ function nodes.handlers.nodepass(head)
if redundant then
for i=1,#redundant do
local n=redundant[i]
- remove_node(head,n)
+ remove_node(nuthead,n)
free_node(n)
end
end
- for d in traverse_id(disc_code,head) do
- local r=d.replace
+ for d in traverse_id(disc_code,nuthead) do
+ local _,_,r=getdisc(d)
if r then
for n in traverse_id(glyph_code,r) do
- local font=n.font
+ local font=getfont(n)
if font~=prevfont then
prevfont=font
local used=usedfonts[font]
@@ -16605,34 +23797,31 @@ function nodes.handlers.nodepass(head)
end
end
end
- if basepass and #basefonts>0 then
+ if basemodepass and #basefonts>0 then
for i=1,#basefonts do
local range=basefonts[i]
local start=range[1]
local stop=range[2]
- if start or stop then
- local prev=nil
- local next=nil
- local front=start==head
+ if start then
+ local front=nuthead==start
+ local prev,next
if stop then
- next=stop.next
+ next=getnext(stop)
start,stop=ligaturing(start,stop)
start,stop=kerning(start,stop)
- elseif start then
- prev=start.prev
+ else
+ prev=getprev(start)
start=ligaturing(start)
start=kerning(start)
end
if prev then
- start.prev=prev
- prev.next=start
+ setlink(prev,start)
end
if next then
- stop.next=next
- next.prev=stop
+ setlink(stop,next)
end
- if front then
- head=start
+ if front and nuthead~=start then
+ head=tonode(start)
end
end
end
@@ -16643,9 +23832,9 @@ function nodes.handlers.nodepass(head)
end
end
function nodes.handlers.basepass(head)
- if basepass then
- head=ligaturing(head)
- head=kerning(head)
+ if not basemodepass then
+ head=n_ligaturing(head)
+ head=n_kerning(head)
end
return head,true
end
@@ -16657,7 +23846,9 @@ function nodes.simple_font_handler(head)
if head then
head=nodepass(head)
head=injectpass(head)
- head=basepass(head)
+ if not basemodepass then
+ head=basepass(head)
+ end
protectpass(head)
return head,true
else
diff --git a/tex/generic/context/luatex/luatex-fonts-ota.lua b/tex/generic/context/luatex/luatex-fonts-ota.lua
deleted file mode 100644
index 256ead5a5..000000000
--- a/tex/generic/context/luatex/luatex-fonts-ota.lua
+++ /dev/null
@@ -1,451 +0,0 @@
-if not modules then modules = { } end modules ['luatex-fonts-ota'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type = type
-
-if not trackers then trackers = { register = function() end } end
-
------ trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local fonts, nodes, node = fonts, nodes, node
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-
-local analyzers = fonts.analyzers
-local initializers = allocate()
-local methods = allocate()
-
-analyzers.initializers = initializers
-analyzers.methods = methods
-
-local a_state = attributes.private('state')
-
-local nuts = nodes.nuts
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local getnext = nuts.getnext
-local getprev = nuts.getprev
-local getid = nuts.getid
-local getprop = nuts.getprop
-local setprop = nuts.setprop
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
-
-local traverse_id = nuts.traverse_id
-local traverse_node_list = nuts.traverse
-local end_of_math = nuts.end_of_math
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local math_code = nodecodes.math
-
-local fontdata = fonts.hashes.identifiers
-local categories = characters and characters.categories or { } -- sorry, only in context
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
--- never use these numbers directly
-
-local s_init = 1 local s_rphf = 7
-local s_medi = 2 local s_half = 8
-local s_fina = 3 local s_pref = 9
-local s_isol = 4 local s_blwf = 10
-local s_mark = 5 local s_pstf = 11
-local s_rest = 6
-
-local states = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- mark = s_mark,
- rest = s_rest,
- rphf = s_rphf,
- half = s_half,
- pref = s_pref,
- blwf = s_blwf,
- pstf = s_pstf,
-}
-
-local features = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- -- mark = s_mark,
- -- rest = s_rest,
- rphf = s_rphf,
- half = s_half,
- pref = s_pref,
- blwf = s_blwf,
- pstf = s_pstf,
-}
-
-analyzers.states = states
-analyzers.features = features
-analyzers.useunicodemarks = false
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-
-function analyzers.setstate(head,font)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- current = tonut(current)
- while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font then
- done = true
- local char = getchar(current)
- local d = descriptions[char]
- if d then
- if d.class == "mark" then
- done = true
- setprop(current,a_state,s_mark)
- elseif useunicodemarks and categories[char] == "mn" then
- done = true
- setprop(current,a_state,s_mark)
- elseif n == 0 then
- first, last, n = current, current, 1
- setprop(current,a_state,s_init)
- else
- last, n = current, n+1
- setprop(current,a_state,s_medi)
- end
- else -- finish
- if first and first == last then
- setprop(last,a_state,s_isol)
- elseif last then
- setprop(last,a_state,s_fina)
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc_code then
- -- always in the middle .. it doesn't make much sense to assign a property
- -- here ... we might at some point decide to flag the components when present
- -- but even then it's kind of bogus
- setprop(current,a_state,s_medi)
- last = current
- else -- finish
- if first and first == last then
- setprop(last,a_state,s_isol)
- elseif last then
- setprop(last,a_state,s_fina)
- end
- first, last, n = nil, nil, 0
- if id == math_code then
- current = end_of_math(current)
- end
- end
- current = getnext(current)
- end
- if first and first == last then
- setprop(last,a_state,s_isol)
- elseif last then
- setprop(last,a_state,s_fina)
- end
- return head, done
-end
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-local function analyzeinitializer(tfmdata,value) -- attr
- local script, language = otf.scriptandlanguage(tfmdata) -- attr
- local action = initializers[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
- end
-end
-
-local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- return head, false
-end
-
-registerotffeature {
- name = "analyze",
- description = "analysis of character classes",
- default = true,
- initializers = {
- node = analyzeinitializer,
- },
- processors = {
- position = 1,
- node = analyzeprocessor,
- }
-}
-
--- latin
-
-methods.latn = analyzers.setstate
-
-
-local tatweel = 0x0640
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isolated = { -- isol
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0604] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true,
- -- mandaic
- [0x0856] = true, [0x0858] = true, [0x0857] = true,
- -- n'ko
- [0x07FA] = true,
- -- also here:
- [zwnj] = true,
- -- 7
- [0x08AD] = true,
-}
-
-local final = { -- isol_fina
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true,
- [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
- [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
- -- syriac
- [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
- [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
- [0x072C] = true, [0x071E] = true,
- [0x072F] = true, [0x074D] = true,
- -- mandaic
- [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
- [0x084F] = true,
- -- 7
- [0x08AE] = true, [0x08B1] = true, [0x08B2] = true,
-}
-
-local medial = { -- isol_fina_medi_init
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true,
- [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
- [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
- [0x08A7] = true, [0x08A3] = true,
- -- syriac
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
- [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
- [0x074E] = true, [0x074F] = true,
- -- mandaic
- [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
- [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
- [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
- [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
- [0x0853] = true,
- -- n'ko
- [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
- [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
- [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
- [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
- [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
- [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
- [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
- [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
- [0x07E6] = true,
- -- also here:
- [tatweel]= true, [zwj] = true,
- -- 7
- [0x08A1] = true, [0x08AF] = true, [0x08B0] = true,
-}
-
-local arab_warned = { }
-
-local function warning(current,what)
- local char = getchar(current)
- if not arab_warned[char] then
- log.report("analyze","arab: character %C has no %a class",char,what)
- arab_warned[char] = true
- end
-end
-
--- potential optimization: local medial_final = table.merged(medial,final)
-
-local function finish(first,last)
- if last then
- if first == last then
- local fc = getchar(first)
- if medial[fc] or final[fc] then
- setprop(first,a_state,s_isol)
- else
- warning(first,"isol")
- setprop(first,a_state,s_error)
- end
- else
- local lc = getchar(last)
- if medial[lc] or final[lc] then
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- setprop(last,a_state,s_fina)
- else
- warning(last,"fina")
- setprop(last,a_state,s_error)
- end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = getchar(first)
- if medial[fc] or final[fc] then
- setprop(first,a_state,s_isol)
- else
- warning(first,"isol")
- setprop(first,a_state,s_error)
- end
- first = nil
- end
- return first, last
-end
-
-function methods.arab(head,font,attr)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local marks = tfmdata.resources.marks
- local first, last, current, done = nil, nil, head, false
- current = tonut(current)
- while current do
- local id = getid(current)
- if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
- done = true
- local char = getchar(current)
- if marks[char] or (useunicodemarks and categories[char] == "mn") then
- setprop(current,a_state,s_mark)
- elseif isolated[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- setprop(current,a_state,s_isol)
- first, last = nil, nil
- elseif not first then
- if medial[char] then
- setprop(current,a_state,s_init)
- first, last = first or current, current
- elseif final[char] then
- setprop(current,a_state,s_isol)
- first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
- end
- elseif medial[char] then
- first, last = first or current, current
- setprop(current,a_state,s_medi)
- elseif final[char] then
- if getprop(last,a_state) ~= s_init then
- -- tricky, we need to check what last may be !
- setprop(last,a_state,s_medi)
- end
- setprop(current,a_state,s_fina)
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
- setprop(current,a_state,s_rest)
- first, last = finish(first,last)
- else -- no
- first, last = finish(first,last)
- end
- else
- if first or last then
- first, last = finish(first,last)
- end
- if id == math_code then
- current = end_of_math(current)
- end
- end
- current = getnext(current)
- end
- if first or last then
- finish(first,last)
- end
- return head, done
-end
-
-methods.syrc = methods.arab
-methods.mand = methods.arab
-methods.nko = methods.arab
-
-directives.register("otf.analyze.useunicodemarks",function(v)
- analyzers.useunicodemarks = v
-end)
diff --git a/tex/generic/context/luatex/luatex-fonts-syn.lua b/tex/generic/context/luatex/luatex-fonts-syn.lua
index f03d558bf..376fd05fb 100644
--- a/tex/generic/context/luatex/luatex-fonts-syn.lua
+++ b/tex/generic/context/luatex/luatex-fonts-syn.lua
@@ -36,15 +36,14 @@ end
local fonts = fonts
fonts.names = fonts.names or { }
-fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple
-fonts.names.basename = "luatex-fonts-names"
-fonts.names.new_to_old = { }
-fonts.names.old_to_new = { }
-fonts.names.cache = containers.define("fonts","data",fonts.names.version,true)
+fonts.names.version = 1.001 -- not the same as in context but matches mtx-fonts --simple
+fonts.names.basename = "luatex-fonts-names"
+fonts.names.cache = containers.define("fonts","data",fonts.names.version,true)
-local data, loaded = nil, false
+local data = nil
+local loaded = false
-local fileformats = { "lua", "tex", "other text files" }
+local fileformats = { "lua", "tex", "other text files" }
function fonts.names.reportmissingbase()
texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
diff --git a/tex/generic/context/luatex/luatex-fonts-tfm.lua b/tex/generic/context/luatex/luatex-fonts-tfm.lua
deleted file mode 100644
index b9bb1bd0f..000000000
--- a/tex/generic/context/luatex/luatex-fonts-tfm.lua
+++ /dev/null
@@ -1,38 +0,0 @@
-if not modules then modules = { } end modules ['luatex-fonts-tfm'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-local tfm = { }
-fonts.handlers.tfm = tfm
-fonts.formats.tfm = "type1" -- we need to have at least a value here
-
-function fonts.readers.tfm(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local foundname = resolvers.findbinfile(fullname, 'tfm') or ""
- if foundname == "" then
- foundname = resolvers.findbinfile(fullname, 'ofm') or ""
- end
- if foundname ~= "" then
- specification.filename = foundname
- specification.format = "ofm"
- return font.read_tfm(specification.filename,specification.size)
- end
-end
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 2e34fb801..e1ec3764e 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -6,6 +6,18 @@ if not modules then modules = { } end modules ['luatex-fonts'] = {
license = "see context related readme files"
}
+-- A merged file is generated with:
+--
+-- mtxrun --script package --merge ./luatex-fonts.lua
+--
+-- A needed resource file is made by:
+--
+-- mtxrun --script context luatex-basics-prepare.tex
+--
+-- A font (generic) database is created with:
+--
+-- mtxrun --script font --reload --simple
+
-- The following code isolates the generic context code from already defined or to be defined
-- namespaces. This is the reference loader for plain tex. This generic code is also used in
-- luaotfload which is a low level lualatex opentype font loader but somehow has gotten a bit
@@ -174,62 +186,91 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule("l-file.lua")
loadmodule("l-boolean.lua")
loadmodule("l-math.lua")
+
+ -- A few slightly higher level support modules:
+
loadmodule("util-str.lua")
+ loadmodule("util-fil.lua")
- -- The following modules contain code that is either not used at all outside context or will fail
- -- when enabled due to lack of other modules.
+ -- The following modules contain code that is either not used at all outside context or will
+ -- fail when enabled due to lack of other modules.
- -- First we load a few helper modules. This is about the miminum needed to let the font modules do
- -- their work. Don't depend on their functions as we might strip them in future versions of his
- -- generic variant.
+ -- First we load a few helper modules. This is about the miminum needed to let the font modules
+ -- do their work. Don't depend on their functions as we might strip them in future versions of
+ -- this generic variant.
loadmodule('luatex-basics-gen.lua')
loadmodule('data-con.lua')
- -- We do need some basic node support. The code in there is not for general use as it might change.
+ -- We do need some basic node support. The code in there is not for general use as it might
+ -- change.
loadmodule('luatex-basics-nod.lua')
- -- Now come the font modules that deal with traditional tex fonts as well as open type fonts. We only
- -- support OpenType fonts here.
- --
- -- The font database file (if used at all) must be put someplace visible for kpse and is not shared
- -- with context. The mtx-fonts script can be used to genate this file (using the --names option).
+ -- We ship a resources needed for font handling (more might end up here).
+
+ loadmodule('luatex-basics-chr.lua')
- -- In 2013/14 I will merge/move some generic files into luatex-fonts-* files (copies) so that
- -- intermediate updates of context don't interfere. We can then also use the general merger and
- -- consider stripping debug code.
+ -- Now come the font modules that deal with traditional tex fonts as well as open type fonts.
+ --
+ -- The font database file (if used at all) must be put someplace visible for kpse and is not
+ -- shared with context. The mtx-fonts script can be used to generate this file (using the
+ -- --reload --force --simple option).
loadmodule('font-ini.lua')
loadmodule('font-con.lua')
loadmodule('luatex-fonts-enc.lua') -- will load font-age on demand
loadmodule('font-cid.lua')
loadmodule('font-map.lua') -- for loading lum file (will be stripped)
- loadmodule('luatex-fonts-syn.lua') -- deals with font names (synonyms)
- -- begin of test
- loadmodule('font-tfm.lua') -- optional
- loadmodule('font-afm.lua') -- optional
- loadmodule('font-afk.lua') -- optional
- -- end of test
- loadmodule('luatex-fonts-tfm.lua')
+
+ -- We use a bit simpler database because using the context one demands loading more helper
+ -- code and although it is more flexible (more wauys to resolve and so) it will never be
+ -- uses in plain/latex anyway, so let's stick to a simple approach.
+
+ loadmodule('luatex-fonts-syn.lua')
+
+ loadmodule('font-tfm.lua')
+ loadmodule('font-afm.lua')
+ loadmodule('font-afk.lua')
loadmodule('font-oti.lua')
- loadmodule('font-otf.lua')
- loadmodule('font-otb.lua')
- ----------('luatex-fonts-inj.lua') -- normally the same as font-inj.lua / beware loadmodule is parsed
- loadmodule('font-inj.lua')
- loadmodule('luatex-fonts-ota.lua')
- ----------('luatex-fonts-otn.lua') -- normally the same as font-otn.lua / beware loadmodule is parsed
- loadmodule('font-otn.lua')
- loadmodule('font-otp.lua')
- loadmodule('luatex-fonts-lua.lua')
- loadmodule('font-def.lua') -- this code (stripped) might end up in luatex-fonts-def.lua
- loadmodule('luatex-fonts-def.lua')
+
+ -- These are the old loader and processing modules. These use the built-in font loader and
+ -- will stay around (but not be extended), only fixed.
+
+ -- font-otf.lua
+ -- font-otb.lua
+ -- font-inj.lua
+ -- font-ota.lua
+ -- font-otn.lua
+ -- font-otp.lua
+
+ -- Here come the new loader and processing modules. The loader is written in Lua and although
+ -- initial loading is somewhat slower, identifying is faster, cached files can be slightly
+ -- more efficient, and processing is somewhat faster (only measureable on complex fonts).
+
+ loadmodule('font-otr.lua')
+ loadmodule('font-cff.lua')
+ loadmodule('font-ttf.lua')
+ loadmodule('font-dsp.lua')
+ loadmodule('font-oup.lua')
+ loadmodule('font-otl.lua')
+ loadmodule('font-oto.lua')
+ loadmodule('font-otj.lua')
+ loadmodule('font-ota.lua')
+ loadmodule('font-ots.lua')
+ loadmodule('font-osd.lua')
+
+ -- common code
+
+ loadmodule('font-lua.lua')
+ loadmodule('font-def.lua')
+ loadmodule('font-xtx.lua') -- xetex compatible specifiers (plain/latex only)
loadmodule('luatex-fonts-ext.lua') -- some extensions
- -- We need to plug into a callback and the following module implements the handlers. Actual plugging
- -- in happens later.
+ -- We need to plug into a callback and the following module implements the handlers. Actual
+ -- plugging in happens later.
- loadmodule('luatex-fonts-cbk.lua')
+ loadmodule('font-gbn.lua')
end
@@ -241,8 +282,8 @@ end
resolvers.loadmodule = loadmodule
--- In order to deal with the fonts we need to initialize some callbacks. One can overload them later on if
--- needed. First a bit of abstraction.
+-- In order to deal with the fonts we need to initialize some callbacks. One can overload them later
+-- on if needed. First a bit of abstraction.
generic_context.callback_ligaturing = false
generic_context.callback_kerning = false
diff --git a/tex/generic/context/luatex/luatex-pdf.tex b/tex/generic/context/luatex/luatex-pdf.tex
index 1263f21e2..e38f77c4c 100644
--- a/tex/generic/context/luatex/luatex-pdf.tex
+++ b/tex/generic/context/luatex/luatex-pdf.tex
@@ -67,30 +67,45 @@
% commands
- \protected\gdef\pdfliteral {\pdfextension literal}
- \protected\gdef\pdfcolorstack {\pdfextension colorstack}
- \protected\gdef\pdfsetmatrix {\pdfextension setmatrix}
- \protected\gdef\pdfsave {\pdfextension save\relax}
- \protected\gdef\pdfrestore {\pdfextension restore\relax}
- \protected\gdef\pdfobj {\pdfextension obj }
- \protected\gdef\pdfrefobj {\pdfextension refobj }
- \protected\gdef\pdfannot {\pdfextension annot }
- \protected\gdef\pdfstartlink {\pdfextension startlink }
- \protected\gdef\pdfendlink {\pdfextension endlink\relax}
- \protected\gdef\pdfoutline {\pdfextension outline }
- \protected\gdef\pdfdest {\pdfextension dest }
- \protected\gdef\pdfthread {\pdfextension thread }
- \protected\gdef\pdfstartthread {\pdfextension startthread }
- \protected\gdef\pdfendthread {\pdfextension endthread\relax}
- \protected\gdef\pdfinfo {\pdfextension info }
- \protected\gdef\pdfcatalog {\pdfextension catalog }
- \protected\gdef\pdfnames {\pdfextension names }
- \protected\gdef\pdfincludechars {\pdfextension includechars }
- \protected\gdef\pdffontattr {\pdfextension fontattr }
- \protected\gdef\pdfmapfile {\pdfextension mapfile }
- \protected\gdef\pdfmapline {\pdfextension mapline }
- \protected\gdef\pdftrailer {\pdfextension trailer }
- \protected\gdef\pdfglyphtounicode {\pdfextension glyphtounicode }
+ \protected\gdef\pdfliteral {\pdfextension literal}
+ \protected\gdef\pdfcolorstack {\pdfextension colorstack}
+ \protected\gdef\pdfsetmatrix {\pdfextension setmatrix}
+ \protected\gdef\pdfsave {\pdfextension save\relax}
+ \protected\gdef\pdfrestore {\pdfextension restore\relax}
+ \protected\gdef\pdfobj {\pdfextension obj }
+ \protected\gdef\pdfrefobj {\pdfextension refobj }
+ \protected\gdef\pdfannot {\pdfextension annot }
+ \protected\gdef\pdfstartlink {\pdfextension startlink }
+ \protected\gdef\pdfendlink {\pdfextension endlink\relax}
+ \protected\gdef\pdfoutline {\pdfextension outline }
+ \protected\gdef\pdfdest {\pdfextension dest }
+ \protected\gdef\pdfthread {\pdfextension thread }
+ \protected\gdef\pdfstartthread {\pdfextension startthread }
+ \protected\gdef\pdfendthread {\pdfextension endthread\relax}
+ \protected\gdef\pdfinfo {\pdfextension info }
+ \protected\gdef\pdfcatalog {\pdfextension catalog }
+ \protected\gdef\pdfnames {\pdfextension names }
+ \protected\gdef\pdfincludechars {\pdfextension includechars }
+ \protected\gdef\pdffontattr {\pdfextension fontattr }
+ \protected\gdef\pdfmapfile {\pdfextension mapfile }
+ \protected\gdef\pdfmapline {\pdfextension mapline }
+ \protected\gdef\pdftrailer {\pdfextension trailer }
+ \protected\gdef\pdfglyphtounicode {\pdfextension glyphtounicode }
+ \protected\gdef\pdfsuppressoptionalinfo {\pdfextension suppressoptionalinfo }
+ \global \let \pdfsuppressptexinfo \pdfsuppressoptionalinfo
+ \protected\gdef\pdfinfoid {\pdfextension infoid }
+ \protected\gdef\pdfinfoomitdate {\pdfextension suppressoptionalinfo \numexpr32+64\relax}
+
+ \chardef\pdfnofullbanner = 1
+ \chardef\pdfnofilename = 2
+ \chardef\pdfnopagenumber = 4
+ \chardef\pdfnoinfodict = 8
+ \chardef\pdfnocreator = 16
+ \chardef\pdfnocreationdate = 32
+ \chardef\pdfnomoddate = 64
+ \chardef\pdfnoproducer = 128
+ \chardef\pdfnotrapped = 256
+ \chardef\pdfnoid = 512
% converted
diff --git a/tex/generic/context/luatex/luatex-test.tex b/tex/generic/context/luatex/luatex-test.tex
index 9844428ac..0bb752b3f 100644
--- a/tex/generic/context/luatex/luatex-test.tex
+++ b/tex/generic/context/luatex/luatex-test.tex
@@ -1,4 +1,4 @@
-texformat=luatex-plain
+% texformat=luatex-plain
%D \module
%D [ file=luatex-test,
@@ -147,4 +147,15 @@ $\sin{x}$
\egroup
+% \font\amiri=file:amiri-regular.ttf:%
+% mode=node;analyze=yes;language=dflt;script=arab;ccmp=yes;%
+% init=yes;medi=yes;fina=yes;isol=yes;%
+% mark=yes;mkmk=yes;kern=yes;curs=yes;%
+% liga=yes;dlig=yes;rlig=yes;clig=yes;calt=yes %
+% at 32pt
+
+% \bgroup
+% \textdir TRT\amiri بِسْمِ اللَّـهِ الرَّ‌حْمَـٰنِ الرَّ‌حِيمِ
+% \egroup
+
\end