summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2016-01-29 16:15:09 +0100
committerContext Git Mirror Bot <phg42.2a@gmail.com>2016-01-29 16:15:09 +0100
commit452587cdeefbf6e3bf1eee91e4e976f1135b785f (patch)
treee52f05dfd327c3b31a1b0fb82545dbdec639d2e2
parent975f4f9f2d71d8021900955404f8b144ca6895f5 (diff)
downloadcontext-452587cdeefbf6e3bf1eee91e4e976f1135b785f.tar.gz
2016-01-28 22:37:00
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-context.lua4
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-tex.lua4
-rw-r--r--context/data/scite/context/scite-context-data-context.properties8
-rw-r--r--context/data/scite/context/scite-context-data-tex.properties122
-rw-r--r--doc/context/documents/general/manuals/luatex.pdfbin993444 -> 1019915 bytes
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-libraries.tex56
-rw-r--r--doc/context/sources/general/manuals/luatex/luatex-modifications.tex10
-rw-r--r--metapost/context/base/mpii/mp-tool.mpii2
-rw-r--r--metapost/context/base/mpiv/mp-tool.mpiv2
-rw-r--r--scripts/context/lua/mtx-fonts.lua2
-rw-r--r--scripts/context/lua/mtxrun.lua807
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua807
-rw-r--r--scripts/context/stubs/unix/mtxrun807
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua807
-rw-r--r--tex/context/base/context-version.pdfbin4160 -> 4176 bytes
-rw-r--r--tex/context/base/mkiv/back-pdf.mkiv20
-rw-r--r--tex/context/base/mkiv/catc-ini.mkiv48
-rw-r--r--tex/context/base/mkiv/char-ini.mkiv2
-rw-r--r--tex/context/base/mkiv/cldf-ini.lua34
-rw-r--r--tex/context/base/mkiv/colo-ini.lua9
-rw-r--r--tex/context/base/mkiv/colo-ini.mkiv11
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context.mkiv2
-rw-r--r--tex/context/base/mkiv/data-env.lua5
-rw-r--r--tex/context/base/mkiv/data-res.lua14
-rw-r--r--tex/context/base/mkiv/font-cff.lua1
-rw-r--r--tex/context/base/mkiv/font-otl.lua2
-rw-r--r--tex/context/base/mkiv/font-vf.lua6
-rw-r--r--tex/context/base/mkiv/l-unicode.lua9
-rw-r--r--tex/context/base/mkiv/lang-rep.lua35
-rw-r--r--tex/context/base/mkiv/lpdf-ini.lua27
-rw-r--r--tex/context/base/mkiv/luat-fio.lua59
-rw-r--r--tex/context/base/mkiv/lxml-ent.lua13
-rw-r--r--tex/context/base/mkiv/lxml-ini.lua227
-rw-r--r--tex/context/base/mkiv/lxml-ini.mkiv261
-rw-r--r--tex/context/base/mkiv/lxml-lpt.lua64
-rw-r--r--tex/context/base/mkiv/lxml-tab.lua899
-rw-r--r--tex/context/base/mkiv/lxml-tex.lua206
-rw-r--r--tex/context/base/mkiv/mlib-lua.lua12
-rw-r--r--tex/context/base/mkiv/mult-dim.mkvi4
-rw-r--r--tex/context/base/mkiv/mult-ini.lua3
-rw-r--r--tex/context/base/mkiv/mult-ini.mkiv5
-rw-r--r--tex/context/base/mkiv/mult-low.lua4
-rw-r--r--tex/context/base/mkiv/mult-prm.lua5
-rw-r--r--tex/context/base/mkiv/node-acc.lua1
-rw-r--r--tex/context/base/mkiv/pack-rul.mkiv4
-rw-r--r--tex/context/base/mkiv/sort-ini.lua3
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin9067 -> 9104 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin258745 -> 270197 bytes
-rw-r--r--tex/context/base/mkiv/strc-con.mkvi15
-rw-r--r--tex/context/base/mkiv/strc-ini.lua36
-rw-r--r--tex/context/base/mkiv/syst-aux.mkiv218
-rw-r--r--tex/context/base/mkiv/syst-ini.mkiv200
-rw-r--r--tex/context/base/mkiv/util-tab.lua2
-rw-r--r--tex/context/modules/mkiv/m-scite.mkiv3
-rw-r--r--tex/context/modules/mkiv/x-mathml.lua2
-rw-r--r--tex/context/modules/mkiv/x-mathml.mkiv4
-rw-r--r--tex/context/modules/mkiv/x-set-11.mkiv488
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
-rw-r--r--tex/generic/context/luatex/luatex-gadgets.lua29
-rw-r--r--web2c/contextcnf.lua1
61 files changed, 4117 insertions, 2318 deletions
diff --git a/context/data/scite/context/lexers/data/scite-context-data-context.lua b/context/data/scite/context/lexers/data/scite-context-data-context.lua
index 9df5e74f7..98b0e8a1f 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plustwohundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar" },
- ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "mathortext", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhpack", "ruledvpack", "ruledtpack", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "flushnextbox", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "removepunctuation", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "cldprocessfile", "cldloadfile", "cldcontext", "cldcommand", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "nobreak", "allowbreak", "goodbreak", "nospace", "nospacing", "dospacing", "naturalhbox", "naturalvbox", "naturalhpack", "naturalvpack" },
+ ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plustwohundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar", "Ux" },
+ ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "mathortext", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhpack", "ruledvpack", "ruledtpack", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "flushnextbox", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "removepunctuation", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "cldprocessfile", "cldloadfile", "cldcontext", "cldcommand", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "leftorrighthbox", "leftorrightvbox", "leftorrightvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "break", "nobreak", "allowbreak", "goodbreak", "nospace", "nospacing", "dospacing", "naturalhbox", "naturalvbox", "naturalhpack", "naturalvpack" },
} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/data/scite-context-data-tex.lua b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
index 001dc1acb..0eeac562e 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-tex.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
@@ -3,7 +3,7 @@ return {
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "bodydir", "boxdir", "catcodetable", "clearmarks", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "hjcode", "fontid", "formatname", "gleaders", "hyphenationmin", "ifabsdim", "ifabsnum", "ifprimitive", "ignoreligaturesinfont", "initcatcodetable", "insertht", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "luafunction", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathoption", "mathscriptsmode", "mathstyle", "mathsurroundskip", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "normaldeviate", "outputbox", "outputmode", "pagedir", "pageheight", "pagebottomoffset", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "preexhyphenchar", "prehyphenchar", "primitive", "protrudechars", "pxdimen", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "suppressmathparerror", "synctex", "tagcode", "textdir", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
["omega"]={ "OmegaVersion", "bodydir", "chardp", "charht", "charit", "charwd", "leftghost", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "mathdir", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "oradical", "pagedir", "pageheight", "pagewidth", "pardir", "rightghost", "textdir" },
- ["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
- ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "boundary", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "copyfont", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "dviextension", "dvifeedback", "dvivariable", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "hjcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "expandglyphsinfont", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignoreligaturesinfont", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastskip", "lastxpos", "lastypos", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letcharcode", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathinner", "mathop", "mathopen", "mathoption", "mathord", "mathpunct", "mathrel", "mathscriptsmode", "mathstyle", "mathsurroundskip", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "normaldeviate", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdfextension", "pdfvariable", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "pxdimen", "quitvmode", "radical", "raise", "randomseed", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "saveboxresource", "saveimageresource", "savepos", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setfontid", "setlanguage", "setrandomseed", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracingfonts", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "uniformdeviate", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "useboxresource", "useimageresource", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
+ ["pdftex"]={ "efcode", "expanded", "ifincsname", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "leftmarginkern", "letterspacefont", "lpcode", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfignoreunknownimages", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "quitvmode", "rightmarginkern", "rpcode", "tagcode" },
+ ["tex"]={ "-", "/", "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion", "OmegaVersion", "Omegaminorversion", "Omegarevision", "Omegaversion", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Uleft", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathcodenumdef", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Umiddle", "Uoverdelimiter", "Uradical", "Uright", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "alignmark", "aligntab", "atop", "atopwithdelims", "attribute", "attributedef", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "bodydir", "botmark", "botmarks", "boundary", "box", "boxdir", "boxmaxdepth", "brokenpenalty", "catcode", "catcodetable", "char", "chardef", "cleaders", "clearmarks", "closein", "closeout", "clubpenalties", "clubpenalty", "copy", "copyfont", "count", "countdef", "cr", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "crcr", "csname", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "detokenize", "dimen", "dimendef", "dimexpr", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalties", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "dviextension", "dvifeedback", "dvivariable", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "edef", "efcode", "hjcode", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyeof", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "expanded", "expandglyphsinfont", "fam", "fi", "finalhyphendemerits", "firstmark", "firstmarks", "floatingpenalty", "font", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "fontdimen", "fontid", "fontname", "formatname", "futurelet", "gdef", "gleaders", "global", "globaldefs", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifabsdim", "ifabsnum", "ifcase", "ifcat", "ifcsname", "ifdefined", "ifdim", "ifeof", "iffalse", "iffontchar", "ifhbox", "ifhmode", "ifincsname", "ifinner", "ifmmode", "ifnum", "ifodd", "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "ifprimitive", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignoreligaturesinfont", "ignorespaces", "immediate", "indent", "initcatcodetable", "input", "inputlineno", "insert", "insertpenalties", "interactionmode", "interlinepenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastlinefit", "lastnodetype", "lastpenalty", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastskip", "lastxpos", "lastypos", "latelua", "lccode", "leaders", "left", "leftghost", "lefthyphenmin", "leftmarginkern", "leftskip", "leqno", "let", "letcharcode", "letterspacefont", "limits", "linepenalty", "lineskip", "lineskiplimit", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "long", "looseness", "lower", "lowercase", "lpcode", "luaescapestring", "luatexbanner", "luatexrevision", "luatexversion", "mag", "mark", "marks", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathdir", "mathdisplayskipmode", "matheqnogapstep", "mathinner", "mathop", "mathopen", "mathoption", "mathord", "mathpunct", "mathrel", "mathscriptsmode", "mathstyle", "mathsurroundskip", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "muexpr", "multiply", "muskip", "muskipdef", "mutoglue", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nokerns", "nohrule", "noligs", "nospaces", "novrule", "nolimits", "nolocaldirs", "nolocalwhatsits", "nonscript", "nonstopmode", "normaldeviate", "nulldelimiterspace", "nullfont", "number", "numexpr", "odelcode", "odelimiter", "omathaccent", "omathchar", "omathchardef", "omathcode", "omit", "openin", "openout", "or", "oradical", "outer", "output", "outputbox", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagebottomoffset", "pagedepth", "pagedir", "pagediscards", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageheight", "pageleftoffset", "pagerightoffset", "pageshrink", "pagestretch", "pagetopoffset", "pagetotal", "pagewidth", "par", "pardir", "parfillskip", "parindent", "parshape", "parshapedimen", "parshapeindent", "parshapelength", "parskip", "patterns", "pausing", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlineheight", "pdfeachlinedepth", "pdfendlink", "pdfendthread", "pdfextension", "pdfvariable", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfxformmargin", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfignoreunknownimages", "pdfinfo", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkmode", "pdfpkresolution", "pdfpkfixeddpi", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformname", "pdfxformresources", "pdfximage", "penalty", "postdisplaypenalty", "postexhyphenchar", "posthyphenchar", "predisplaydirection", "predisplaypenalty", "predisplaysize", "preexhyphenchar", "prehyphenchar", "pretolerance", "prevdepth", "prevgraf", "primitive", "protected", "pxdimen", "quitvmode", "radical", "raise", "randomseed", "read", "readline", "relax", "relpenalty", "right", "rightghost", "righthyphenmin", "rightmarginkern", "rightskip", "romannumeral", "rpcode", "saveboxresource", "saveimageresource", "savepos", "savecatcodetable", "savinghyphcodes", "savingvdiscards", "scantextokens", "scantokens", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setfontid", "setlanguage", "setrandomseed", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showgroups", "showifs", "showlists", "showthe", "showtokens", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitbotmarks", "splitdiscards", "splitfirstmark", "splitfirstmarks", "splitmaxdepth", "splittopskip", "string", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressoutererror", "synctex", "tabskip", "tagcode", "textdir", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topmarks", "topskip", "tracingassigns", "tracingcommands", "tracingfonts", "tracinggroups", "tracingifs", "tracinglostchars", "tracingmacros", "tracingnesting", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingscantokens", "tracingstats", "uccode", "uchyph", "underline", "unexpanded", "unhbox", "unhcopy", "uniformdeviate", "unkern", "unless", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "useboxresource", "useimageresource", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalties", "widowpenalty", "write", "xdef", "xleaders", "xspaceskip", "year", "vpack", "hpack", "tpack", "csstring", "begincsname", "lastnamedcs", "toksapp", "tokspre", "etoksapp", "etokspre" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/context/data/scite/context/scite-context-data-context.properties b/context/data/scite/context/scite-context-data-context.properties
index 163ee8cf6..3b9c858b7 100644
--- a/context/data/scite/context/scite-context-data-context.properties
+++ b/context/data/scite/context/scite-context-data-context.properties
@@ -64,7 +64,7 @@ righttoleftmark breakablethinspace nobreakspace nonbreakablespace narrownobreaks
zerowidthnobreakspace ideographicspace ideographichalffillspace twoperemspace threeperemspace \
fourperemspace fiveperemspace sixperemspace figurespace punctuationspace \
hairspace zerowidthspace zerowidthnonjoiner zerowidthjoiner zwnj \
-zwj optionalspace asciispacechar
+zwj optionalspace asciispacechar Ux
keywordclass.context.helpers=\
startsetups stopsetups startxmlsetups stopxmlsetups \
@@ -204,7 +204,7 @@ ltrhbox rtlvbox ltrvbox rtlvtop ltrvtop \
autodirhbox autodirvbox autodirvtop leftorrighthbox leftorrightvbox \
leftorrightvtop lefttoright righttoleft synchronizelayoutdirection synchronizedisplaydirection \
synchronizeinlinedirection lesshyphens morehyphens nohyphens dohyphens \
-Ucheckedstartdisplaymath Ucheckedstopdisplaymath nobreak allowbreak goodbreak \
-nospace nospacing dospacing naturalhbox naturalvbox \
-naturalhpack naturalvpack
+Ucheckedstartdisplaymath Ucheckedstopdisplaymath break nobreak allowbreak \
+goodbreak nospace nospacing dospacing naturalhbox \
+naturalvbox naturalhpack naturalvpack
diff --git a/context/data/scite/context/scite-context-data-tex.properties b/context/data/scite/context/scite-context-data-tex.properties
index ea6257e8c..ab2e85d8f 100644
--- a/context/data/scite/context/scite-context-data-tex.properties
+++ b/context/data/scite/context/scite-context-data-tex.properties
@@ -90,21 +90,21 @@ pdfdestmargin pdfdraftmode pdfeachlineheight pdfeachlinedepth pdfendlink \
pdfendthread pdffirstlineheight pdffontattr pdffontexpand pdffontname \
pdffontobjnum pdffontsize pdfxformmargin pdfgamma pdfgentounicode \
pdfglyphtounicode pdfhorigin pdfignoreddimen pdfimageapplygamma pdfimagegamma \
-pdfimagehicolor pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel \
-pdfinfo pdfinsertht pdflastannot pdflastlinedepth pdflastlink \
-pdflastobj pdflastxform pdflastximage pdflastximagepages pdflastxpos \
-pdflastypos pdflinkmargin pdfliteral pdfmapfile pdfmapline \
-pdfminorversion pdfnames pdfnoligatures pdfnormaldeviate pdfobj \
-pdfobjcompresslevel pdfoutline pdfoutput pdfpageattr pdfpagebox \
-pdfpageheight pdfpageref pdfpageresources pdfpagesattr pdfpagewidth \
-pdfpkmode pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen \
-pdfrandomseed pdfrefobj pdfrefxform pdfrefximage pdfreplacefont \
-pdfrestore pdfretval pdfsave pdfsavepos pdfsetmatrix \
-pdfsetrandomseed pdfstartlink pdfstartthread pdftexbanner pdftexrevision \
-pdftexversion pdfthread pdfthreadmargin pdftracingfonts pdftrailer \
-pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform pdfxformattr \
-pdfxformname pdfxformresources pdfximage quitvmode rightmarginkern \
-rpcode tagcode
+pdfimagehicolor pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfignoreunknownimages \
+pdfinclusionerrorlevel pdfignoreunknownimages pdfinfo pdfinsertht pdflastannot \
+pdflastlinedepth pdflastlink pdflastobj pdflastxform pdflastximage \
+pdflastximagepages pdflastxpos pdflastypos pdflinkmargin pdfliteral \
+pdfmapfile pdfmapline pdfminorversion pdfnames pdfnoligatures \
+pdfnormaldeviate pdfobj pdfobjcompresslevel pdfoutline pdfoutput \
+pdfpageattr pdfpagebox pdfpageheight pdfpageref pdfpageresources \
+pdfpagesattr pdfpagewidth pdfpkmode pdfpkresolution pdfpkfixeddpi \
+pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed pdfrefobj \
+pdfrefxform pdfrefximage pdfreplacefont pdfrestore pdfretval \
+pdfsave pdfsavepos pdfsetmatrix pdfsetrandomseed pdfstartlink \
+pdfstartthread pdftexbanner pdftexrevision pdftexversion pdfthread \
+pdfthreadmargin pdftracingfonts pdftrailer pdfuniformdeviate pdfuniqueresname \
+pdfvorigin pdfxform pdfxformattr pdfxformname pdfxformresources \
+pdfximage quitvmode rightmarginkern rpcode tagcode
keywordclass.tex.tex=\
- / AlephVersion Alephminorversion \
@@ -215,52 +215,52 @@ pdfendlink pdfendthread pdfextension pdfvariable pdffirstlineheight \
pdffontattr pdffontexpand pdffontname pdffontobjnum pdffontsize \
pdfxformmargin pdfgamma pdfgentounicode pdfglyphtounicode pdfhorigin \
pdfignoreddimen pdfimageaddfilename pdfimageapplygamma pdfimagegamma pdfimagehicolor \
-pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel pdfinfo \
-pdfinsertht pdflastannot pdflastlinedepth pdflastlink pdflastobj \
-pdflastxform pdflastximage pdflastximagepages pdflastxpos pdflastypos \
-pdflinkmargin pdfliteral pdfmapfile pdfmapline pdfminorversion \
-pdfnames pdfnoligatures pdfnormaldeviate pdfobj pdfobjcompresslevel \
-pdfoutline pdfoutput pdfpageattr pdfpagebox pdfpageheight \
-pdfpageref pdfpageresources pdfpagesattr pdfpagewidth pdfpkmode \
-pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed \
-pdfrefobj pdfrefxform pdfrefximage pdfreplacefont pdfrestore \
-pdfretval pdfsave pdfsavepos pdfsetmatrix pdfsetrandomseed \
-pdfstartlink pdfstartthread pdftexbanner pdftexrevision pdftexversion \
-pdfthread pdfthreadmargin pdftracingfonts pdftrailer pdfuniformdeviate \
-pdfuniqueresname pdfvorigin pdfxform pdfxformattr pdfxformname \
-pdfxformresources pdfximage penalty postdisplaypenalty postexhyphenchar \
-posthyphenchar predisplaydirection predisplaypenalty predisplaysize preexhyphenchar \
-prehyphenchar pretolerance prevdepth prevgraf primitive \
-protected pxdimen quitvmode radical raise \
-randomseed read readline relax relpenalty \
-right rightghost righthyphenmin rightmarginkern rightskip \
-romannumeral rpcode saveboxresource saveimageresource savepos \
-savecatcodetable savinghyphcodes savingvdiscards scantextokens scantokens \
-scriptfont scriptscriptfont scriptscriptstyle scriptspace scriptstyle \
-scrollmode setbox setfontid setlanguage setrandomseed \
-sfcode shipout show showbox showboxbreadth \
-showboxdepth showgroups showifs showlists showthe \
-showtokens skewchar skip skipdef spacefactor \
-spaceskip span special splitbotmark splitbotmarks \
-splitdiscards splitfirstmark splitfirstmarks splitmaxdepth splittopskip \
-string suppressfontnotfounderror suppressifcsnameerror suppresslongerror suppressoutererror \
-synctex tabskip tagcode textdir textfont \
-textstyle the thickmuskip thinmuskip time \
-toks toksdef tolerance topmark topmarks \
-topskip tracingassigns tracingcommands tracingfonts tracinggroups \
-tracingifs tracinglostchars tracingmacros tracingnesting tracingonline \
-tracingoutput tracingpages tracingparagraphs tracingrestores tracingscantokens \
-tracingstats uccode uchyph underline unexpanded \
-unhbox unhcopy uniformdeviate unkern unless \
-unpenalty unskip unvbox unvcopy uppercase \
-useboxresource useimageresource vadjust valign vbadness \
-vbox vcenter vfil vfill vfilneg \
-vfuzz voffset vrule vsize vskip \
-vsplit vss vtop wd widowpenalties \
-widowpenalty write xdef xleaders xspaceskip \
-year vpack hpack tpack csstring \
-begincsname lastnamedcs toksapp tokspre etoksapp \
-etokspre
+pdfimageresolution pdfincludechars pdfinclusioncopyfonts pdfinclusionerrorlevel pdfignoreunknownimages \
+pdfinfo pdfinsertht pdflastannot pdflastlinedepth pdflastlink \
+pdflastobj pdflastxform pdflastximage pdflastximagepages pdflastxpos \
+pdflastypos pdflinkmargin pdfliteral pdfmapfile pdfmapline \
+pdfminorversion pdfnames pdfnoligatures pdfnormaldeviate pdfobj \
+pdfobjcompresslevel pdfoutline pdfoutput pdfpageattr pdfpagebox \
+pdfpageheight pdfpageref pdfpageresources pdfpagesattr pdfpagewidth \
+pdfpkmode pdfpkresolution pdfpkfixeddpi pdfprimitive pdfprotrudechars \
+pdfpxdimen pdfrandomseed pdfrefobj pdfrefxform pdfrefximage \
+pdfreplacefont pdfrestore pdfretval pdfsave pdfsavepos \
+pdfsetmatrix pdfsetrandomseed pdfstartlink pdfstartthread pdftexbanner \
+pdftexrevision pdftexversion pdfthread pdfthreadmargin pdftracingfonts \
+pdftrailer pdfuniformdeviate pdfuniqueresname pdfvorigin pdfxform \
+pdfxformattr pdfxformname pdfxformresources pdfximage penalty \
+postdisplaypenalty postexhyphenchar posthyphenchar predisplaydirection predisplaypenalty \
+predisplaysize preexhyphenchar prehyphenchar pretolerance prevdepth \
+prevgraf primitive protected pxdimen quitvmode \
+radical raise randomseed read readline \
+relax relpenalty right rightghost righthyphenmin \
+rightmarginkern rightskip romannumeral rpcode saveboxresource \
+saveimageresource savepos savecatcodetable savinghyphcodes savingvdiscards \
+scantextokens scantokens scriptfont scriptscriptfont scriptscriptstyle \
+scriptspace scriptstyle scrollmode setbox setfontid \
+setlanguage setrandomseed sfcode shipout show \
+showbox showboxbreadth showboxdepth showgroups showifs \
+showlists showthe showtokens skewchar skip \
+skipdef spacefactor spaceskip span special \
+splitbotmark splitbotmarks splitdiscards splitfirstmark splitfirstmarks \
+splitmaxdepth splittopskip string suppressfontnotfounderror suppressifcsnameerror \
+suppresslongerror suppressoutererror synctex tabskip tagcode \
+textdir textfont textstyle the thickmuskip \
+thinmuskip time toks toksdef tolerance \
+topmark topmarks topskip tracingassigns tracingcommands \
+tracingfonts tracinggroups tracingifs tracinglostchars tracingmacros \
+tracingnesting tracingonline tracingoutput tracingpages tracingparagraphs \
+tracingrestores tracingscantokens tracingstats uccode uchyph \
+underline unexpanded unhbox unhcopy uniformdeviate \
+unkern unless unpenalty unskip unvbox \
+unvcopy uppercase useboxresource useimageresource vadjust \
+valign vbadness vbox vcenter vfil \
+vfill vfilneg vfuzz voffset vrule \
+vsize vskip vsplit vss vtop \
+wd widowpenalties widowpenalty write xdef \
+xleaders xspaceskip year vpack hpack \
+tpack csstring begincsname lastnamedcs toksapp \
+tokspre etoksapp etokspre
keywordclass.tex.xetex=\
XeTeXversion
diff --git a/doc/context/documents/general/manuals/luatex.pdf b/doc/context/documents/general/manuals/luatex.pdf
index 30eaefa9c..12e0e4e24 100644
--- a/doc/context/documents/general/manuals/luatex.pdf
+++ b/doc/context/documents/general/manuals/luatex.pdf
Binary files differ
diff --git a/doc/context/sources/general/manuals/luatex/luatex-libraries.tex b/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
index ec401c282..be4052637 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-libraries.tex
@@ -176,15 +176,16 @@ Like \type {find_font_file}, but for subfont definition files.
\subsubsection{\type {find_pk_file}}
-Like \type {find_font_file}, but for pk bitmap files. The argument \type
-{asked_name} is a bit special in this case. Its form is
+Like \type {find_font_file}, but for pk bitmap files. This callback takes two
+arguments: \type {name} and \type {dpi}. In your callback you can decide to
+look for:
\starttyping
<base res>dpi/<fontname>.<actual res>pk
\stoptyping
-So you may be asked for \type {600dpi/manfnt.720pk}. It is up to you to find a
-\quote {reasonable} bitmap file to go with that specification.
+but other strategies are possible. It is up to you to find a \quote {reasonable}
+bitmap file to go with that specification.
\subsubsection{\type {find_data_file}}
@@ -2668,6 +2669,13 @@ Once the image is scanned, all the values in the \type {<image>} except \type
{width}, \type {height} and \type {depth}, become frozen, and you cannot change
them any more.
+You can use \type {pdf.setignoreunknownimages(1)} (or at the \TEX\ end the \type
+{\pdfvariable} \type {ignoreunknownimages}) to get around a quit when no known
+image type is found (based on name or preamble). Beware: this will not catch
+invalid images and we cannot guarantee side effects. A zero dimension image is
+still included when requested. No special flags are set. A proper workflow will
+not rely in such a catch but make sure that images are valid.
+
\subsection{\type {img.keys}}
\startfunctioncall
@@ -4379,7 +4387,13 @@ the maximum is~9.
\subsection{\type {pdf.setdecimaldigits} and \type {pdf.getdecimaldigits}}
-These two functions set the accuracy of floats written to the \PDF file.
+These two functions set the accuracy of floats written to the \PDF file. You can
+set any value but the backend will not go below 3 and above 6.
+
+\subsection{\type {pdf.setpkresolution} and \type {pdf.getpkresolution}}
+
+These setter takes two arguments: the resolution and an optional zero or one that
+indicates if this is a fixed one. The getter returns these two values.
\subsection{\type {pdf.lastobj}, \type {pdf.lastlink}, \type {pdf.lastannot},
and \type {pdf.retval}}
@@ -4401,7 +4415,7 @@ pdf.setorigin(tex.sp("1in"),tex.sp("1in"))
The counterpart of this function returns two values.
\subsection{\type {pdf.setlinkmargin}, \type {pdf.getlinkmargin} \type
-{pdf.setdestmargin}, \type {pdf.getdestmargin}} \type {pdf.setthreadmargin},
+{pdf.setdestmargin}, \type {pdf.getdestmargin} \type {pdf.setthreadmargin},
\type {pdf.getthreadmargin} \type {pdf.setxformmargin}, \type
{pdf.getxformmargin}}
@@ -4978,34 +4992,6 @@ tex.year
\stoptyping
\stoptwocolumns
-% tex.pdfadjustspacing
-% tex.pdfcompresslevel
-% tex.pdfdecimaldigits
-% tex.pdfgamma
-% tex.pdfgentounicode
-% tex.pdfimageapplygamma
-% tex.pdfimagegamma
-% tex.pdfimagehicolor
-% tex.pdfimageresolution
-% tex.pdfinclusionerrorlevel
-% tex.pdfminorversion
-% tex.pdfobjcompresslevel
-% tex.pdfoutput
-% tex.pdfpagebox
-% tex.pdfpkresolution
-% tex.pdfprotrudechars
-% tex.pdftracingfonts
-% tex.pdfuniqueresname
-
-% tex.pdfdestmargin
-% tex.pdflinkmargin
-% tex.pdfthreadmargin
-% tex.pdfxformmargin
-% tex.pdfhorigin
-% tex.pdfvorigin
-
-% tex.pdfpxdimen
-
Read|-|only:
\startthreecolumns
@@ -6385,7 +6371,7 @@ in the input.
The creator function can be used as follows:
\starttyping
-local t = token("relax")
+local t = token.create("relax")
\stoptyping
This gives back a token object that has the properties of the \type {\relax}
diff --git a/doc/context/sources/general/manuals/luatex/luatex-modifications.tex b/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
index 180766816..f0bbe2a26 100644
--- a/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
+++ b/doc/context/sources/general/manuals/luatex/luatex-modifications.tex
@@ -541,8 +541,10 @@ The configuration related registers have become:
\edef\pdfimagehicolor {\pdfvariable imagehicolor}
\edef\pdfimageaddfilename {\pdfvariable imageaddfilename}
\edef\pdfpkresolution {\pdfvariable pkresolution}
+\edef\pdfpkfixeddpi {\pdfvariable pkfixeddpi}
\edef\pdfinclusioncopyfonts {\pdfvariable inclusioncopyfonts}
\edef\pdfinclusionerrorlevel {\pdfvariable inclusionerrorlevel}
+\edef\pdfignoreunknownimages {\pdfvariable ignoreunknownimages}
\edef\pdfgentounicode {\pdfvariable gentounicode}
\edef\pdfpagebox {\pdfvariable pagebox}
\edef\pdfminorversion {\pdfvariable minorversion}
@@ -553,7 +555,7 @@ The configuration related registers have become:
\edef\pdflinkmargin {\pdfvariable linkmargin}
\edef\pdfdestmargin {\pdfvariable destmargin}
\edef\pdfthreadmargin {\pdfvariable threadmargin}
-\edef\pdfformmargin {\pdfvariable formmargin}
+\edef\pdfxformmargin {\pdfvariable xformmargin}
\edef\pdfpagesattr {\pdfvariable pagesattr}
\edef\pdfpageattr {\pdfvariable pageattr}
@@ -586,8 +588,8 @@ the defaults; these are:
\starttyping
\pdfcompresslevel 9
-\pdfobjcompresslevel 1
-\pdfdecimaldigits 4
+\pdfobjcompresslevel 1 % used: (0,9)
+\pdfdecimaldigits 4 % used: (3,6)
\pdfgamma 1000
\pdfimageresolution 71
\pdfimageapplygamma 0
@@ -595,8 +597,10 @@ the defaults; these are:
\pdfimagehicolor 1
\pdfimageaddfilename 1
\pdfpkresolution 72
+\pdfpkfixeddpi 0
\pdfinclusioncopyfonts 0
\pdfinclusionerrorlevel 0
+\pdfignoreunknownimages 0
\pdfgentounicode 0
\pdfpagebox 0
\pdfminorversion 4
diff --git a/metapost/context/base/mpii/mp-tool.mpii b/metapost/context/base/mpii/mp-tool.mpii
index a5bb345a1..f476361c7 100644
--- a/metapost/context/base/mpii/mp-tool.mpii
+++ b/metapost/context/base/mpii/mp-tool.mpii
@@ -2373,7 +2373,7 @@ if unknown darkcyan : color darkcyan ; darkcyan := .625(0,1,1) fi ;
if unknown darkmagenta : color darkmagenta ; darkmagenta := .625(1,0,1) fi ;
if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
-if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
+if unknown lightgray : color lightgray ; lightgray := .850(1,1,1) fi ;
% an improved plain mp macro
diff --git a/metapost/context/base/mpiv/mp-tool.mpiv b/metapost/context/base/mpiv/mp-tool.mpiv
index 13104f17e..eea2e12a3 100644
--- a/metapost/context/base/mpiv/mp-tool.mpiv
+++ b/metapost/context/base/mpiv/mp-tool.mpiv
@@ -2150,7 +2150,7 @@ if unknown darkcyan : color darkcyan ; darkcyan := .625(0,1,1) fi ;
if unknown darkmagenta : color darkmagenta ; darkmagenta := .625(1,0,1) fi ;
if unknown darkyellow : color darkyellow ; darkyellow := .625(1,1,0) fi ;
if unknown darkgray : color darkgray ; darkgray := .625(1,1,1) fi ;
-if unknown lightgray : color lightgray ; lightgray := .875(1,1,1) fi ;
+if unknown lightgray : color lightgray ; lightgray := .850(1,1,1) fi ;
% an improved plain mp macro
diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua
index ada88e46b..f25e198bc 100644
--- a/scripts/context/lua/mtx-fonts.lua
+++ b/scripts/context/lua/mtx-fonts.lua
@@ -11,7 +11,7 @@ local setargument = environment.setargument
local givenfiles = environment.files
local otfversion = 2.819
-local otlversion = 3.012
+local otlversion = 3.013
local helpinfo = [[
<?xml version="1.0"?>
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 8f10f004a..0167ac5e8 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -4552,7 +4552,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 38659, stripped down to: 16287
+-- original size: 38699, stripped down to: 16321
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4781,9 +4781,10 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping,option)
+function utf.remapper(mapping,option,action)
local variant=type(mapping)
if variant=="table" then
+ action=action or mapping
if option=="dynamic" then
local pattern=false
table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
@@ -4792,15 +4793,15 @@ function utf.remapper(mapping,option)
return ""
else
if not pattern then
- pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
end
return lpegmatch(pattern,str)
end
end
elseif option=="pattern" then
- return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return Cs((tabletopattern(mapping)/action+p_utf8char)^0)
else
- local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ local pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
return function(str)
if not str or str=="" then
return ""
@@ -5904,7 +5905,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 28677, stripped down to: 18633
+-- original size: 28680, stripped down to: 18636
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6565,7 +6566,7 @@ local function serialize(root,name,specification)
end
table.serialize=serialize
if setinspector then
- setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table")) return true end end)
+ setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table",{})) return true end end)
end
@@ -9984,7 +9985,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 47426, stripped down to: 28810
+-- original size: 55622, stripped down to: 34927
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9993,7 +9994,7 @@ if not modules then modules={} end modules ['lxml-tab']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local trace_entities=false trackers .register("xml.entities",function(v) trace_entities=v end)
local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
@@ -10005,6 +10006,7 @@ local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
+do
xml.xmlns=xml.xmlns or {}
local check=P(false)
local parse=check
@@ -10021,24 +10023,68 @@ end
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
+end
local nsremap,resolvens=xml.xmlns,xml.resolvens
-local stack={}
-local top={}
-local dt={}
-local at={}
-local xmlns={}
-local errorstr=nil
-local entities={}
-local strip=false
-local cleanup=false
-local utfize=false
-local resolve=false
-local resolve_predefined=false
-local unify_predefined=false
-local dcache={}
-local hcache={}
-local acache={}
-local mt={}
+local stack,level,top,at,xmlnms,errorstr
+local entities,parameters
+local strip,utfize,resolve,cleanup,resolve_predefined,unify_predefined
+local dcache,hcache,acache
+local mt,dt,nt
+local function preparexmlstate(settings)
+ if settings then
+ stack={}
+ level=0
+ top={}
+ at={}
+ mt={}
+ dt={}
+ nt=0
+ xmlns={}
+ errorstr=nil
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ parameters={}
+ reported_at_errors={}
+ dcache={}
+ hcache={}
+ acache={}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ else
+ stack=nil
+ level=nil
+ top=nil
+ at=nil
+ mt=nil
+ dt=nil
+ nt=nil
+ xmlns=nil
+ errorstr=nil
+ strip=nil
+ utfize=nil
+ resolve=nil
+ resolve_predefined=nil
+ unify_predefined=nil
+ cleanup=nil
+ entities=nil
+ parameters=nil
+ reported_at_errors=nil
+ dcache=nil
+ hcache=nil
+ acache=nil
+ end
+end
local function initialize_mt(root)
mt={ __index=root }
end
@@ -10048,8 +10094,9 @@ end
function xml.checkerror(top,toclose)
return ""
end
+local checkns=xml.checkns
local function add_attribute(namespace,tag,value)
- if cleanup and #value>0 then
+ if cleanup and value~="" then
value=cleanup(value)
end
if tag=="xmlns" then
@@ -10058,21 +10105,30 @@ local function add_attribute(namespace,tag,value)
elseif namespace=="" then
at[tag]=value
elseif namespace=="xmlns" then
- xml.checkns(tag,value)
+ checkns(tag,value)
at["xmlns:"..tag]=value
else
at[namespace..":"..tag]=value
end
end
local function add_empty(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top=stack[#stack]
+ top=stack[level]
dt=top.dt
- local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
- dt[#dt+1]=t
+ nt=#dt+1
+ local t={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=top
+ }
+ dt[nt]=t
setmetatable(t,mt)
if at.xmlns then
remove(xmlns)
@@ -10080,23 +10136,35 @@ local function add_empty(spacing,namespace,tag)
at={}
end
local function add_begin(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ top={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=stack[level]
+ }
setmetatable(top,mt)
dt=top.dt
- stack[#stack+1]=top
+ nt=#dt
+ level=level+1
+ stack[level]=top
at={}
end
local function add_end(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
- local toclose=remove(stack)
- top=stack[#stack]
- if #stack<1 then
+ local toclose=stack[level]
+ level=level-1
+ top=stack[level]
+ if level<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
report_xml(errorstr)
elseif toclose.tg~=tag then
@@ -10104,195 +10172,229 @@ local function add_end(spacing,namespace,tag)
report_xml(errorstr)
end
dt=top.dt
- dt[#dt+1]=toclose
+ nt=#dt+1
+ dt[nt]=toclose
if toclose.at.xmlns then
remove(xmlns)
end
end
-local spaceonly=lpegpatterns.whitespace^0*P(-1)
local function add_text(text)
- local n=#dt
- if cleanup and #text>0 then
- if n>0 then
- local s=dt[n]
+ if text=="" then
+ return
+ end
+ if cleanup then
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..cleanup(text)
+ dt[nt]=s..cleanup(text)
else
- dt[n+1]=cleanup(text)
+ nt=nt+1
+ dt[nt]=cleanup(text)
end
else
+ nt=1
dt[1]=cleanup(text)
end
else
- if n>0 then
- local s=dt[n]
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..text
+ dt[nt]=s..text
else
- dt[n+1]=text
+ nt=nt+1
+ dt[nt]=text
end
else
+ nt=1
dt[1]=text
end
end
end
local function add_special(what,spacing,text)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
if strip and (what=="@cm@" or what=="@dt@") then
else
- dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ nt=nt+1
+ dt[nt]={ special=true,ns="",tg=what,dt={ text } }
end
end
local function set_message(txt)
errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
-local badentity="&error;"
-local badentity="&"
-xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
- unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
-}
-local placeholders=xml.placeholders
-local function fromhex(s)
- local n=tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s),true
- end
-end
-local function fromdec(s)
- local n=tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s),true
- end
-end
-local p_rest=(1-P(";"))^0
-local p_many=P(1)^0
-local p_char=lpegpatterns.utf8character
-local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
-local predefined_unified={
- [38]="&amp;",
- [42]="&quot;",
- [47]="&apos;",
- [74]="&lt;",
- [76]="&gt;",
-}
-local predefined_simplified={
- [38]="&",amp="&",
- [42]='"',quot='"',
- [47]="'",apos="'",
- [74]="<",lt="<",
- [76]=">",gt=">",
-}
-local nofprivates=0xF0000
-local privates_u={
- [ [[&]] ]="&amp;",
- [ [["]] ]="&quot;",
- [ [[']] ]="&apos;",
- [ [[<]] ]="&lt;",
- [ [[>]] ]="&gt;",
-}
-local privates_p={}
-local privates_n={
-}
-local escaped=utf.remapper(privates_u,"dynamic")
-local unprivatized=utf.remapper(privates_p,"dynamic")
-xml.unprivatized=unprivatized
-local function unescaped(s)
- local p=privates_n[s]
- if not p then
- nofprivates=nofprivates+1
- p=utfchar(nofprivates)
- privates_n[s]=p
- s="&"..s..";"
- privates_u[p]=s
- privates_p[p]=s
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
+do
+ local badentity="&"
+ xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+ }
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
- return p
-end
-xml.privatetoken=unescaped
-xml.privatecodes=privates_n
-local function handle_hex_entity(str)
- local h=hcache[str]
- if not h then
- local n=tonumber(str,16)
- h=unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h="&#x"..str..";"
+ return formatters["d:%s"](s),true
+ end
+ end
+ local p_rest=(1-P(";"))^0
+ local p_many=P(1)^0
+ local p_char=lpegpatterns.utf8character
+ local parsedentity=P("&#")*(P("x")*(p_rest/fromhex)+(p_rest/fromdec))*P(";")*P(-1)+P ("#")*(P("x")*(p_many/fromhex)+(p_many/fromdec))
+ xml.parsedentitylpeg=parsedentity
+ local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+ }
+ local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+ }
+ local nofprivates=0xF0000
+ local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+ }
+ local privates_p={
+ }
+ local privates_s={
+ [ [["]] ]="&U+22;",
+ [ [[#]] ]="&U+23;",
+ [ [[$]] ]="&U+24;",
+ [ [[%]] ]="&U+25;",
+ [ [[&]] ]="&U+26;",
+ [ [[']] ]="&U+27;",
+ [ [[<]] ]="&U+3C;",
+ [ [[>]] ]="&U+3E;",
+ [ [[\]] ]="&U+5C;",
+ [ [[{]] ]="&U+7B;",
+ [ [[|]] ]="&U+7C;",
+ [ [[}]] ]="&U+7D;",
+ [ [[~]] ]="&U+7E;",
+ }
+ local privates_n={
+ }
+ local escaped=utf.remapper(privates_u,"dynamic")
+ local unprivatized=utf.remapper(privates_p,"dynamic")
+ local unspecialized=utf.remapper(privates_s,"dynamic")
+ xml.unprivatized=unprivatized
+ xml.unspecialized=unspecialized
+ xml.escaped=escaped
+ local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ privates_s[p]=s
end
- hcache[str]=h
+ return p
end
- return h
-end
-local function handle_dec_entity(str)
- local d=dcache[str]
- if not d then
- local n=tonumber(str)
- d=unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ xml.privatetoken=unescaped
+ xml.privatecodes=privates_n
+ xml.specialcodes=privates_s
+ function xml.addspecialcode(key,value)
+ privates_s[key]=value or "&"..s..";"
+ end
+ handle_hex_entity=function(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
+ hcache[str]=h
+ end
+ return h
+ end
+ handle_dec_entity=function(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
- d="&#"..str..";"
+ dcache[str]=d
end
- dcache[str]=d
+ return d
end
- return d
-end
-xml.parsedentitylpeg=parsedentity
-local function handle_any_entity(str)
- if resolve then
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ handle_any_entity_dtd=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
if trace_entities then
report_xml("resolving entity &%s; to predefined %a",str,a)
@@ -10335,40 +10437,161 @@ local function handle_any_entity(str)
end
end
end
- acache[str]=a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str]=a
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
end
+ return a
end
- return a
- else
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ end
+ handle_any_entity_text=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
- acache[str]=a
if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str=="" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
- a=badentity
- acache[str]=a
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
+ if type(resolve)=="function" then
+ a=resolve(str,entities) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(grammar_parsed_text_two,a) or a
+ if type(a)=="number" then
+ return ""
+ else
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ end
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a=badentity
+ else
+ a="&"..str..";"
+ end
+ end
end
- a=unescaped(str)
- acache[str]=a
end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
end
- return a
end
+ local p_rest=(1-P(";"))^1
+ local spec={
+ [0x23]="\\Ux{23}",
+ [0x24]="\\Ux{24}",
+ [0x25]="\\Ux{25}",
+ [0x5C]="\\Ux{5C}",
+ [0x7B]="\\Ux{7B}",
+ [0x7C]="\\Ux{7C}",
+ [0x7D]="\\Ux{7D}",
+ [0x7E]="\\Ux{7E}",
+ }
+ local hash=table.setmetatableindex(spec,function(t,k)
+ local v=utfchar(k)
+ t[k]=v
+ return v
+ end)
+ local function fromuni(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["u:%s"](s),true
+ end
+ end
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["h:%s"](s),true
+ end
+ end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return hash[n]
+ else
+ return formatters["d:%s"](s),true
+ end
+ end
+ local reparsedentity=P("U+")*(p_rest/fromuni)+P("#")*(
+ P("x")*(p_rest/fromhex)+p_rest/fromdec
+ )
+ xml.reparsedentitylpeg=reparsedentity
end
+local escaped=xml.escaped
+local unescaped=xml.unescaped
+local placeholders=xml.placeholders
local function handle_end_entity(str)
report_xml("error in entity, %a found without ending %a",str,";")
return str
@@ -10399,10 +10622,14 @@ local hexentitycontent=R("AF","af","09")^1
local decentitycontent=R("09")^1
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
- )+(anyentitycontent/handle_any_entity)
+ )+(anyentitycontent/handle_any_entity_dtd)
+local parsedentity_text=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity_text)
local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
+local entity_text=(ampersand/"")*parsedentity_text*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
-local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local text_parsed=(Cs((1-open-ampersand)^1)/add_text+Cs(entity_text)/add_text)^1
local somespace=space^1
local optionalspace=space^0
local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
@@ -10412,7 +10639,7 @@ local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
local attributevalue=value+wrongvalue
local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext=text_parsed/add_text
+local parsedtext=text_parsed
local unparsedtext=text_unparsed/add_text
local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
@@ -10427,21 +10654,52 @@ local endcdata=P("]]")*close
local someinstruction=C((1-endinstruction)^0)
local somecomment=C((1-endcomment )^0)
local somecdata=C((1-endcdata )^0)
-local function normalentity(k,v ) entities[k]=v end
-local function systementity(k,v,n) entities[k]=v end
-local function publicentity(k,v,n) entities[k]=v end
+local function weirdentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","weird",k,v)
+ end
+ parameters[k]=v
+end
+local function normalentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","normal",k,v)
+ end
+ entities[k]=v
+end
+local function systementity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","system",k,v)
+ end
+ entities[k]=v
+end
+local function publicentity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","public",k,v)
+ end
+ entities[k]=v
+end
local begindoctype=open*P("!DOCTYPE")
local enddoctype=close
local beginset=P("[")
local endset=P("]")
+local wrdtypename=C((1-somespace-P(";"))^1)
local doctypename=C((1-somespace-close)^0)
local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local weirdentitytype=P("%")*(somespace*doctypename*somespace*value)/weirdentity
local normalentitytype=(doctypename*somespace*value)/normalentity
local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
-local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
-local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype+weirdentitytype)*optionalspace*close
+local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+end
+local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+end
+local entityresolve=P("%")*(wrdtypename/weirdresolve )*P(";")+P("&")*(wrdtypename/normalresolve)*P(";")
+entitydoctype=entitydoctype+entityresolve
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+entityresolve+basiccomment+space)^0*optionalspace*endset
local definitiondoctype=doctypename*somespace*doctypeset
local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
@@ -10453,11 +10711,15 @@ local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
-local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
-local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
-local grammar_parsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+grammar_parsed_text_one=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0,
+}
+grammar_parsed_text_two=P { "followup",
+ followup=V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
@@ -10467,37 +10729,26 @@ local grammar_unparsed_text=P { "preamble",
children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
- settings=settings or {}
- strip=settings.strip_cm_and_dt
- utfize=settings.utfize_entities
- resolve=settings.resolve_entities
- resolve_predefined=settings.resolve_predefined_entities
- unify_predefined=settings.unify_predefined_entities
- cleanup=settings.text_cleanup
- entities=settings.entities or {}
- if utfize==nil then
- settings.utfize_entities=true
- utfize=true
- end
- if resolve_predefined==nil then
- settings.resolve_predefined_entities=true
- resolve_predefined=true
- end
- stack,top,at,xmlns,errorstr={},{},{},{},nil
- acache,hcache,dcache={},{},{}
- reported_attribute_errors={}
+ settings=settings or {}
+ preparexmlstate(settings)
if settings.parent_root then
mt=getmetatable(settings.parent_root)
else
initialize_mt(top)
end
- stack[#stack+1]=top
+ level=level+1
+ stack[level]=top
top.dt={}
dt=top.dt
+ nt=0
if not data or data=="" then
errorstr="empty xml file"
elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
+ local m=lpegmatch(grammar_parsed_text_one,data)
+ if m then
+ m=lpegmatch(grammar_parsed_text_two,data,m)
+ end
+ if m then
else
errorstr="invalid xml file - parsed text"
end
@@ -10513,8 +10764,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
-setmetatable(result,mt)
-setmetatable(result.dt[1],mt)
+ setmetatable(result,mt)
+ setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -10556,13 +10807,10 @@ setmetatable(result.dt[1],mt)
decimals=dcache,
hexadecimals=hcache,
names=acache,
+ intermediates=parameters,
}
}
- strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
- unify_predefined,cleanup,entities=nil,nil,nil
- stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
- acache,hcache,dcache=nil,nil,nil
- reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ preparexmlstate()
return result
end
local function xmlconvert(data,settings)
@@ -10624,15 +10872,15 @@ function xml.toxml(data)
return data
end
end
-local function copy(old,tables)
+local function copy(old)
if old then
- tables=tables or {}
local new={}
- if not tables[old] then
- tables[old]=new
- end
for k,v in next,old do
- new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ if type(v)=="table" then
+ new[k]=table.copy(v)
+ else
+ new[k]=v
+ end
end
local mt=getmetatable(old)
if mt then
@@ -10731,7 +10979,7 @@ local function verbose_cdata(e,handlers)
handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE",e.dt[1],">")
end
local function verbose_root(e,handlers)
handlers.serialize(e.dt,handlers)
@@ -11013,7 +11261,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 50461, stripped down to: 31497
+-- original size: 51229, stripped down to: 31529
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11390,13 +11638,27 @@ local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
local lp_doequal=P("=")/"=="
local lp_or=P("|")/" or "
local lp_and=P("&")/" and "
-local lp_builtin=P (
- P("text")/"(ll.dt[1] or '')"+
- P("content")/"ll.dt"+
- P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
- P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
- P("ns")/"ll.ns"
- )*((spaces*P("(")*spaces*P(")"))/"")
+local builtin={
+ text="(ll.dt[1] or '')",
+ content="ll.dt",
+ name="((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)",
+ tag="ll.tg",
+ position="l",
+ firstindex="1",
+ firstelement="1",
+ first="1",
+ lastindex="(#ll.__p__.dt or 1)",
+ lastelement="(ll.__p__.en or 1)",
+ last="#list",
+ rootposition="order",
+ order="order",
+ element="(ll.ei or 1)",
+ index="(ll.ni or 1)",
+ match="(ll.mi or 1)",
+ namespace="ll.ns",
+ ns="ll.ns",
+}
+local lp_builtin=lpeg.utfchartabletopattern(builtin)/builtin*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
@@ -14196,7 +14458,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 9518, stripped down to: 7037
+-- original size: 9649, stripped down to: 7131
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -14332,6 +14594,11 @@ local relations=allocate {
names={ 'fontconfig','fontconfig file','fontconfig files' },
variable='FONTCONFIG_PATH',
},
+ pk={
+ names={ "pk" },
+ variable='PKFONTS',
+ suffixes={ 'pk' },
+ },
},
obsolete={
enc={
@@ -14970,7 +15237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 67003, stripped down to: 46291
+-- original size: 67241, stripped down to: 46427
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -16240,10 +16507,18 @@ local function findfiles(filename,filetype,allresults)
return result or {},status
end
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,true)
+ end
end
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,false)[1] or ""
+ end
end
function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
@@ -18363,8 +18638,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 782445
--- stripped bytes : 283493
+-- original bytes : 791821
+-- stripped bytes : 286453
-- end library merge
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 8f10f004a..0167ac5e8 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -4552,7 +4552,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 38659, stripped down to: 16287
+-- original size: 38699, stripped down to: 16321
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4781,9 +4781,10 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping,option)
+function utf.remapper(mapping,option,action)
local variant=type(mapping)
if variant=="table" then
+ action=action or mapping
if option=="dynamic" then
local pattern=false
table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
@@ -4792,15 +4793,15 @@ function utf.remapper(mapping,option)
return ""
else
if not pattern then
- pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
end
return lpegmatch(pattern,str)
end
end
elseif option=="pattern" then
- return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return Cs((tabletopattern(mapping)/action+p_utf8char)^0)
else
- local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ local pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
return function(str)
if not str or str=="" then
return ""
@@ -5904,7 +5905,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 28677, stripped down to: 18633
+-- original size: 28680, stripped down to: 18636
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6565,7 +6566,7 @@ local function serialize(root,name,specification)
end
table.serialize=serialize
if setinspector then
- setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table")) return true end end)
+ setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table",{})) return true end end)
end
@@ -9984,7 +9985,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 47426, stripped down to: 28810
+-- original size: 55622, stripped down to: 34927
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9993,7 +9994,7 @@ if not modules then modules={} end modules ['lxml-tab']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local trace_entities=false trackers .register("xml.entities",function(v) trace_entities=v end)
local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
@@ -10005,6 +10006,7 @@ local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
+do
xml.xmlns=xml.xmlns or {}
local check=P(false)
local parse=check
@@ -10021,24 +10023,68 @@ end
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
+end
local nsremap,resolvens=xml.xmlns,xml.resolvens
-local stack={}
-local top={}
-local dt={}
-local at={}
-local xmlns={}
-local errorstr=nil
-local entities={}
-local strip=false
-local cleanup=false
-local utfize=false
-local resolve=false
-local resolve_predefined=false
-local unify_predefined=false
-local dcache={}
-local hcache={}
-local acache={}
-local mt={}
+local stack,level,top,at,xmlnms,errorstr
+local entities,parameters
+local strip,utfize,resolve,cleanup,resolve_predefined,unify_predefined
+local dcache,hcache,acache
+local mt,dt,nt
+local function preparexmlstate(settings)
+ if settings then
+ stack={}
+ level=0
+ top={}
+ at={}
+ mt={}
+ dt={}
+ nt=0
+ xmlns={}
+ errorstr=nil
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ parameters={}
+ reported_at_errors={}
+ dcache={}
+ hcache={}
+ acache={}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ else
+ stack=nil
+ level=nil
+ top=nil
+ at=nil
+ mt=nil
+ dt=nil
+ nt=nil
+ xmlns=nil
+ errorstr=nil
+ strip=nil
+ utfize=nil
+ resolve=nil
+ resolve_predefined=nil
+ unify_predefined=nil
+ cleanup=nil
+ entities=nil
+ parameters=nil
+ reported_at_errors=nil
+ dcache=nil
+ hcache=nil
+ acache=nil
+ end
+end
local function initialize_mt(root)
mt={ __index=root }
end
@@ -10048,8 +10094,9 @@ end
function xml.checkerror(top,toclose)
return ""
end
+local checkns=xml.checkns
local function add_attribute(namespace,tag,value)
- if cleanup and #value>0 then
+ if cleanup and value~="" then
value=cleanup(value)
end
if tag=="xmlns" then
@@ -10058,21 +10105,30 @@ local function add_attribute(namespace,tag,value)
elseif namespace=="" then
at[tag]=value
elseif namespace=="xmlns" then
- xml.checkns(tag,value)
+ checkns(tag,value)
at["xmlns:"..tag]=value
else
at[namespace..":"..tag]=value
end
end
local function add_empty(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top=stack[#stack]
+ top=stack[level]
dt=top.dt
- local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
- dt[#dt+1]=t
+ nt=#dt+1
+ local t={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=top
+ }
+ dt[nt]=t
setmetatable(t,mt)
if at.xmlns then
remove(xmlns)
@@ -10080,23 +10136,35 @@ local function add_empty(spacing,namespace,tag)
at={}
end
local function add_begin(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ top={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=stack[level]
+ }
setmetatable(top,mt)
dt=top.dt
- stack[#stack+1]=top
+ nt=#dt
+ level=level+1
+ stack[level]=top
at={}
end
local function add_end(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
- local toclose=remove(stack)
- top=stack[#stack]
- if #stack<1 then
+ local toclose=stack[level]
+ level=level-1
+ top=stack[level]
+ if level<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
report_xml(errorstr)
elseif toclose.tg~=tag then
@@ -10104,195 +10172,229 @@ local function add_end(spacing,namespace,tag)
report_xml(errorstr)
end
dt=top.dt
- dt[#dt+1]=toclose
+ nt=#dt+1
+ dt[nt]=toclose
if toclose.at.xmlns then
remove(xmlns)
end
end
-local spaceonly=lpegpatterns.whitespace^0*P(-1)
local function add_text(text)
- local n=#dt
- if cleanup and #text>0 then
- if n>0 then
- local s=dt[n]
+ if text=="" then
+ return
+ end
+ if cleanup then
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..cleanup(text)
+ dt[nt]=s..cleanup(text)
else
- dt[n+1]=cleanup(text)
+ nt=nt+1
+ dt[nt]=cleanup(text)
end
else
+ nt=1
dt[1]=cleanup(text)
end
else
- if n>0 then
- local s=dt[n]
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..text
+ dt[nt]=s..text
else
- dt[n+1]=text
+ nt=nt+1
+ dt[nt]=text
end
else
+ nt=1
dt[1]=text
end
end
end
local function add_special(what,spacing,text)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
if strip and (what=="@cm@" or what=="@dt@") then
else
- dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ nt=nt+1
+ dt[nt]={ special=true,ns="",tg=what,dt={ text } }
end
end
local function set_message(txt)
errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
-local badentity="&error;"
-local badentity="&"
-xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
- unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
-}
-local placeholders=xml.placeholders
-local function fromhex(s)
- local n=tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s),true
- end
-end
-local function fromdec(s)
- local n=tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s),true
- end
-end
-local p_rest=(1-P(";"))^0
-local p_many=P(1)^0
-local p_char=lpegpatterns.utf8character
-local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
-local predefined_unified={
- [38]="&amp;",
- [42]="&quot;",
- [47]="&apos;",
- [74]="&lt;",
- [76]="&gt;",
-}
-local predefined_simplified={
- [38]="&",amp="&",
- [42]='"',quot='"',
- [47]="'",apos="'",
- [74]="<",lt="<",
- [76]=">",gt=">",
-}
-local nofprivates=0xF0000
-local privates_u={
- [ [[&]] ]="&amp;",
- [ [["]] ]="&quot;",
- [ [[']] ]="&apos;",
- [ [[<]] ]="&lt;",
- [ [[>]] ]="&gt;",
-}
-local privates_p={}
-local privates_n={
-}
-local escaped=utf.remapper(privates_u,"dynamic")
-local unprivatized=utf.remapper(privates_p,"dynamic")
-xml.unprivatized=unprivatized
-local function unescaped(s)
- local p=privates_n[s]
- if not p then
- nofprivates=nofprivates+1
- p=utfchar(nofprivates)
- privates_n[s]=p
- s="&"..s..";"
- privates_u[p]=s
- privates_p[p]=s
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
+do
+ local badentity="&"
+ xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+ }
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
- return p
-end
-xml.privatetoken=unescaped
-xml.privatecodes=privates_n
-local function handle_hex_entity(str)
- local h=hcache[str]
- if not h then
- local n=tonumber(str,16)
- h=unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h="&#x"..str..";"
+ return formatters["d:%s"](s),true
+ end
+ end
+ local p_rest=(1-P(";"))^0
+ local p_many=P(1)^0
+ local p_char=lpegpatterns.utf8character
+ local parsedentity=P("&#")*(P("x")*(p_rest/fromhex)+(p_rest/fromdec))*P(";")*P(-1)+P ("#")*(P("x")*(p_many/fromhex)+(p_many/fromdec))
+ xml.parsedentitylpeg=parsedentity
+ local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+ }
+ local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+ }
+ local nofprivates=0xF0000
+ local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+ }
+ local privates_p={
+ }
+ local privates_s={
+ [ [["]] ]="&U+22;",
+ [ [[#]] ]="&U+23;",
+ [ [[$]] ]="&U+24;",
+ [ [[%]] ]="&U+25;",
+ [ [[&]] ]="&U+26;",
+ [ [[']] ]="&U+27;",
+ [ [[<]] ]="&U+3C;",
+ [ [[>]] ]="&U+3E;",
+ [ [[\]] ]="&U+5C;",
+ [ [[{]] ]="&U+7B;",
+ [ [[|]] ]="&U+7C;",
+ [ [[}]] ]="&U+7D;",
+ [ [[~]] ]="&U+7E;",
+ }
+ local privates_n={
+ }
+ local escaped=utf.remapper(privates_u,"dynamic")
+ local unprivatized=utf.remapper(privates_p,"dynamic")
+ local unspecialized=utf.remapper(privates_s,"dynamic")
+ xml.unprivatized=unprivatized
+ xml.unspecialized=unspecialized
+ xml.escaped=escaped
+ local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ privates_s[p]=s
end
- hcache[str]=h
+ return p
end
- return h
-end
-local function handle_dec_entity(str)
- local d=dcache[str]
- if not d then
- local n=tonumber(str)
- d=unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ xml.privatetoken=unescaped
+ xml.privatecodes=privates_n
+ xml.specialcodes=privates_s
+ function xml.addspecialcode(key,value)
+ privates_s[key]=value or "&"..s..";"
+ end
+ handle_hex_entity=function(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
+ hcache[str]=h
+ end
+ return h
+ end
+ handle_dec_entity=function(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
- d="&#"..str..";"
+ dcache[str]=d
end
- dcache[str]=d
+ return d
end
- return d
-end
-xml.parsedentitylpeg=parsedentity
-local function handle_any_entity(str)
- if resolve then
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ handle_any_entity_dtd=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
if trace_entities then
report_xml("resolving entity &%s; to predefined %a",str,a)
@@ -10335,40 +10437,161 @@ local function handle_any_entity(str)
end
end
end
- acache[str]=a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str]=a
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
end
+ return a
end
- return a
- else
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ end
+ handle_any_entity_text=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
- acache[str]=a
if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str=="" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
- a=badentity
- acache[str]=a
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
+ if type(resolve)=="function" then
+ a=resolve(str,entities) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(grammar_parsed_text_two,a) or a
+ if type(a)=="number" then
+ return ""
+ else
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ end
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a=badentity
+ else
+ a="&"..str..";"
+ end
+ end
end
- a=unescaped(str)
- acache[str]=a
end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
end
- return a
end
+ local p_rest=(1-P(";"))^1
+ local spec={
+ [0x23]="\\Ux{23}",
+ [0x24]="\\Ux{24}",
+ [0x25]="\\Ux{25}",
+ [0x5C]="\\Ux{5C}",
+ [0x7B]="\\Ux{7B}",
+ [0x7C]="\\Ux{7C}",
+ [0x7D]="\\Ux{7D}",
+ [0x7E]="\\Ux{7E}",
+ }
+ local hash=table.setmetatableindex(spec,function(t,k)
+ local v=utfchar(k)
+ t[k]=v
+ return v
+ end)
+ local function fromuni(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["u:%s"](s),true
+ end
+ end
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["h:%s"](s),true
+ end
+ end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return hash[n]
+ else
+ return formatters["d:%s"](s),true
+ end
+ end
+ local reparsedentity=P("U+")*(p_rest/fromuni)+P("#")*(
+ P("x")*(p_rest/fromhex)+p_rest/fromdec
+ )
+ xml.reparsedentitylpeg=reparsedentity
end
+local escaped=xml.escaped
+local unescaped=xml.unescaped
+local placeholders=xml.placeholders
local function handle_end_entity(str)
report_xml("error in entity, %a found without ending %a",str,";")
return str
@@ -10399,10 +10622,14 @@ local hexentitycontent=R("AF","af","09")^1
local decentitycontent=R("09")^1
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
- )+(anyentitycontent/handle_any_entity)
+ )+(anyentitycontent/handle_any_entity_dtd)
+local parsedentity_text=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity_text)
local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
+local entity_text=(ampersand/"")*parsedentity_text*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
-local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local text_parsed=(Cs((1-open-ampersand)^1)/add_text+Cs(entity_text)/add_text)^1
local somespace=space^1
local optionalspace=space^0
local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
@@ -10412,7 +10639,7 @@ local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
local attributevalue=value+wrongvalue
local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext=text_parsed/add_text
+local parsedtext=text_parsed
local unparsedtext=text_unparsed/add_text
local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
@@ -10427,21 +10654,52 @@ local endcdata=P("]]")*close
local someinstruction=C((1-endinstruction)^0)
local somecomment=C((1-endcomment )^0)
local somecdata=C((1-endcdata )^0)
-local function normalentity(k,v ) entities[k]=v end
-local function systementity(k,v,n) entities[k]=v end
-local function publicentity(k,v,n) entities[k]=v end
+local function weirdentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","weird",k,v)
+ end
+ parameters[k]=v
+end
+local function normalentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","normal",k,v)
+ end
+ entities[k]=v
+end
+local function systementity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","system",k,v)
+ end
+ entities[k]=v
+end
+local function publicentity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","public",k,v)
+ end
+ entities[k]=v
+end
local begindoctype=open*P("!DOCTYPE")
local enddoctype=close
local beginset=P("[")
local endset=P("]")
+local wrdtypename=C((1-somespace-P(";"))^1)
local doctypename=C((1-somespace-close)^0)
local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local weirdentitytype=P("%")*(somespace*doctypename*somespace*value)/weirdentity
local normalentitytype=(doctypename*somespace*value)/normalentity
local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
-local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
-local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype+weirdentitytype)*optionalspace*close
+local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+end
+local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+end
+local entityresolve=P("%")*(wrdtypename/weirdresolve )*P(";")+P("&")*(wrdtypename/normalresolve)*P(";")
+entitydoctype=entitydoctype+entityresolve
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+entityresolve+basiccomment+space)^0*optionalspace*endset
local definitiondoctype=doctypename*somespace*doctypeset
local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
@@ -10453,11 +10711,15 @@ local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
-local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
-local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
-local grammar_parsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+grammar_parsed_text_one=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0,
+}
+grammar_parsed_text_two=P { "followup",
+ followup=V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
@@ -10467,37 +10729,26 @@ local grammar_unparsed_text=P { "preamble",
children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
- settings=settings or {}
- strip=settings.strip_cm_and_dt
- utfize=settings.utfize_entities
- resolve=settings.resolve_entities
- resolve_predefined=settings.resolve_predefined_entities
- unify_predefined=settings.unify_predefined_entities
- cleanup=settings.text_cleanup
- entities=settings.entities or {}
- if utfize==nil then
- settings.utfize_entities=true
- utfize=true
- end
- if resolve_predefined==nil then
- settings.resolve_predefined_entities=true
- resolve_predefined=true
- end
- stack,top,at,xmlns,errorstr={},{},{},{},nil
- acache,hcache,dcache={},{},{}
- reported_attribute_errors={}
+ settings=settings or {}
+ preparexmlstate(settings)
if settings.parent_root then
mt=getmetatable(settings.parent_root)
else
initialize_mt(top)
end
- stack[#stack+1]=top
+ level=level+1
+ stack[level]=top
top.dt={}
dt=top.dt
+ nt=0
if not data or data=="" then
errorstr="empty xml file"
elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
+ local m=lpegmatch(grammar_parsed_text_one,data)
+ if m then
+ m=lpegmatch(grammar_parsed_text_two,data,m)
+ end
+ if m then
else
errorstr="invalid xml file - parsed text"
end
@@ -10513,8 +10764,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
-setmetatable(result,mt)
-setmetatable(result.dt[1],mt)
+ setmetatable(result,mt)
+ setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -10556,13 +10807,10 @@ setmetatable(result.dt[1],mt)
decimals=dcache,
hexadecimals=hcache,
names=acache,
+ intermediates=parameters,
}
}
- strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
- unify_predefined,cleanup,entities=nil,nil,nil
- stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
- acache,hcache,dcache=nil,nil,nil
- reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ preparexmlstate()
return result
end
local function xmlconvert(data,settings)
@@ -10624,15 +10872,15 @@ function xml.toxml(data)
return data
end
end
-local function copy(old,tables)
+local function copy(old)
if old then
- tables=tables or {}
local new={}
- if not tables[old] then
- tables[old]=new
- end
for k,v in next,old do
- new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ if type(v)=="table" then
+ new[k]=table.copy(v)
+ else
+ new[k]=v
+ end
end
local mt=getmetatable(old)
if mt then
@@ -10731,7 +10979,7 @@ local function verbose_cdata(e,handlers)
handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE",e.dt[1],">")
end
local function verbose_root(e,handlers)
handlers.serialize(e.dt,handlers)
@@ -11013,7 +11261,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 50461, stripped down to: 31497
+-- original size: 51229, stripped down to: 31529
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11390,13 +11638,27 @@ local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
local lp_doequal=P("=")/"=="
local lp_or=P("|")/" or "
local lp_and=P("&")/" and "
-local lp_builtin=P (
- P("text")/"(ll.dt[1] or '')"+
- P("content")/"ll.dt"+
- P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
- P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
- P("ns")/"ll.ns"
- )*((spaces*P("(")*spaces*P(")"))/"")
+local builtin={
+ text="(ll.dt[1] or '')",
+ content="ll.dt",
+ name="((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)",
+ tag="ll.tg",
+ position="l",
+ firstindex="1",
+ firstelement="1",
+ first="1",
+ lastindex="(#ll.__p__.dt or 1)",
+ lastelement="(ll.__p__.en or 1)",
+ last="#list",
+ rootposition="order",
+ order="order",
+ element="(ll.ei or 1)",
+ index="(ll.ni or 1)",
+ match="(ll.mi or 1)",
+ namespace="ll.ns",
+ ns="ll.ns",
+}
+local lp_builtin=lpeg.utfchartabletopattern(builtin)/builtin*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
@@ -14196,7 +14458,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 9518, stripped down to: 7037
+-- original size: 9649, stripped down to: 7131
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -14332,6 +14594,11 @@ local relations=allocate {
names={ 'fontconfig','fontconfig file','fontconfig files' },
variable='FONTCONFIG_PATH',
},
+ pk={
+ names={ "pk" },
+ variable='PKFONTS',
+ suffixes={ 'pk' },
+ },
},
obsolete={
enc={
@@ -14970,7 +15237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 67003, stripped down to: 46291
+-- original size: 67241, stripped down to: 46427
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -16240,10 +16507,18 @@ local function findfiles(filename,filetype,allresults)
return result or {},status
end
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,true)
+ end
end
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,false)[1] or ""
+ end
end
function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
@@ -18363,8 +18638,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 782445
--- stripped bytes : 283493
+-- original bytes : 791821
+-- stripped bytes : 286453
-- end library merge
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 8f10f004a..0167ac5e8 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -4552,7 +4552,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 38659, stripped down to: 16287
+-- original size: 38699, stripped down to: 16321
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4781,9 +4781,10 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping,option)
+function utf.remapper(mapping,option,action)
local variant=type(mapping)
if variant=="table" then
+ action=action or mapping
if option=="dynamic" then
local pattern=false
table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
@@ -4792,15 +4793,15 @@ function utf.remapper(mapping,option)
return ""
else
if not pattern then
- pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
end
return lpegmatch(pattern,str)
end
end
elseif option=="pattern" then
- return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return Cs((tabletopattern(mapping)/action+p_utf8char)^0)
else
- local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ local pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
return function(str)
if not str or str=="" then
return ""
@@ -5904,7 +5905,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 28677, stripped down to: 18633
+-- original size: 28680, stripped down to: 18636
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6565,7 +6566,7 @@ local function serialize(root,name,specification)
end
table.serialize=serialize
if setinspector then
- setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table")) return true end end)
+ setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table",{})) return true end end)
end
@@ -9984,7 +9985,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 47426, stripped down to: 28810
+-- original size: 55622, stripped down to: 34927
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9993,7 +9994,7 @@ if not modules then modules={} end modules ['lxml-tab']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local trace_entities=false trackers .register("xml.entities",function(v) trace_entities=v end)
local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
@@ -10005,6 +10006,7 @@ local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
+do
xml.xmlns=xml.xmlns or {}
local check=P(false)
local parse=check
@@ -10021,24 +10023,68 @@ end
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
+end
local nsremap,resolvens=xml.xmlns,xml.resolvens
-local stack={}
-local top={}
-local dt={}
-local at={}
-local xmlns={}
-local errorstr=nil
-local entities={}
-local strip=false
-local cleanup=false
-local utfize=false
-local resolve=false
-local resolve_predefined=false
-local unify_predefined=false
-local dcache={}
-local hcache={}
-local acache={}
-local mt={}
+local stack,level,top,at,xmlnms,errorstr
+local entities,parameters
+local strip,utfize,resolve,cleanup,resolve_predefined,unify_predefined
+local dcache,hcache,acache
+local mt,dt,nt
+local function preparexmlstate(settings)
+ if settings then
+ stack={}
+ level=0
+ top={}
+ at={}
+ mt={}
+ dt={}
+ nt=0
+ xmlns={}
+ errorstr=nil
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ parameters={}
+ reported_at_errors={}
+ dcache={}
+ hcache={}
+ acache={}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ else
+ stack=nil
+ level=nil
+ top=nil
+ at=nil
+ mt=nil
+ dt=nil
+ nt=nil
+ xmlns=nil
+ errorstr=nil
+ strip=nil
+ utfize=nil
+ resolve=nil
+ resolve_predefined=nil
+ unify_predefined=nil
+ cleanup=nil
+ entities=nil
+ parameters=nil
+ reported_at_errors=nil
+ dcache=nil
+ hcache=nil
+ acache=nil
+ end
+end
local function initialize_mt(root)
mt={ __index=root }
end
@@ -10048,8 +10094,9 @@ end
function xml.checkerror(top,toclose)
return ""
end
+local checkns=xml.checkns
local function add_attribute(namespace,tag,value)
- if cleanup and #value>0 then
+ if cleanup and value~="" then
value=cleanup(value)
end
if tag=="xmlns" then
@@ -10058,21 +10105,30 @@ local function add_attribute(namespace,tag,value)
elseif namespace=="" then
at[tag]=value
elseif namespace=="xmlns" then
- xml.checkns(tag,value)
+ checkns(tag,value)
at["xmlns:"..tag]=value
else
at[namespace..":"..tag]=value
end
end
local function add_empty(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top=stack[#stack]
+ top=stack[level]
dt=top.dt
- local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
- dt[#dt+1]=t
+ nt=#dt+1
+ local t={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=top
+ }
+ dt[nt]=t
setmetatable(t,mt)
if at.xmlns then
remove(xmlns)
@@ -10080,23 +10136,35 @@ local function add_empty(spacing,namespace,tag)
at={}
end
local function add_begin(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ top={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=stack[level]
+ }
setmetatable(top,mt)
dt=top.dt
- stack[#stack+1]=top
+ nt=#dt
+ level=level+1
+ stack[level]=top
at={}
end
local function add_end(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
- local toclose=remove(stack)
- top=stack[#stack]
- if #stack<1 then
+ local toclose=stack[level]
+ level=level-1
+ top=stack[level]
+ if level<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
report_xml(errorstr)
elseif toclose.tg~=tag then
@@ -10104,195 +10172,229 @@ local function add_end(spacing,namespace,tag)
report_xml(errorstr)
end
dt=top.dt
- dt[#dt+1]=toclose
+ nt=#dt+1
+ dt[nt]=toclose
if toclose.at.xmlns then
remove(xmlns)
end
end
-local spaceonly=lpegpatterns.whitespace^0*P(-1)
local function add_text(text)
- local n=#dt
- if cleanup and #text>0 then
- if n>0 then
- local s=dt[n]
+ if text=="" then
+ return
+ end
+ if cleanup then
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..cleanup(text)
+ dt[nt]=s..cleanup(text)
else
- dt[n+1]=cleanup(text)
+ nt=nt+1
+ dt[nt]=cleanup(text)
end
else
+ nt=1
dt[1]=cleanup(text)
end
else
- if n>0 then
- local s=dt[n]
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..text
+ dt[nt]=s..text
else
- dt[n+1]=text
+ nt=nt+1
+ dt[nt]=text
end
else
+ nt=1
dt[1]=text
end
end
end
local function add_special(what,spacing,text)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
if strip and (what=="@cm@" or what=="@dt@") then
else
- dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ nt=nt+1
+ dt[nt]={ special=true,ns="",tg=what,dt={ text } }
end
end
local function set_message(txt)
errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
-local badentity="&error;"
-local badentity="&"
-xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
- unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
-}
-local placeholders=xml.placeholders
-local function fromhex(s)
- local n=tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s),true
- end
-end
-local function fromdec(s)
- local n=tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s),true
- end
-end
-local p_rest=(1-P(";"))^0
-local p_many=P(1)^0
-local p_char=lpegpatterns.utf8character
-local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
-local predefined_unified={
- [38]="&amp;",
- [42]="&quot;",
- [47]="&apos;",
- [74]="&lt;",
- [76]="&gt;",
-}
-local predefined_simplified={
- [38]="&",amp="&",
- [42]='"',quot='"',
- [47]="'",apos="'",
- [74]="<",lt="<",
- [76]=">",gt=">",
-}
-local nofprivates=0xF0000
-local privates_u={
- [ [[&]] ]="&amp;",
- [ [["]] ]="&quot;",
- [ [[']] ]="&apos;",
- [ [[<]] ]="&lt;",
- [ [[>]] ]="&gt;",
-}
-local privates_p={}
-local privates_n={
-}
-local escaped=utf.remapper(privates_u,"dynamic")
-local unprivatized=utf.remapper(privates_p,"dynamic")
-xml.unprivatized=unprivatized
-local function unescaped(s)
- local p=privates_n[s]
- if not p then
- nofprivates=nofprivates+1
- p=utfchar(nofprivates)
- privates_n[s]=p
- s="&"..s..";"
- privates_u[p]=s
- privates_p[p]=s
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
+do
+ local badentity="&"
+ xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+ }
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
- return p
-end
-xml.privatetoken=unescaped
-xml.privatecodes=privates_n
-local function handle_hex_entity(str)
- local h=hcache[str]
- if not h then
- local n=tonumber(str,16)
- h=unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h="&#x"..str..";"
+ return formatters["d:%s"](s),true
+ end
+ end
+ local p_rest=(1-P(";"))^0
+ local p_many=P(1)^0
+ local p_char=lpegpatterns.utf8character
+ local parsedentity=P("&#")*(P("x")*(p_rest/fromhex)+(p_rest/fromdec))*P(";")*P(-1)+P ("#")*(P("x")*(p_many/fromhex)+(p_many/fromdec))
+ xml.parsedentitylpeg=parsedentity
+ local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+ }
+ local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+ }
+ local nofprivates=0xF0000
+ local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+ }
+ local privates_p={
+ }
+ local privates_s={
+ [ [["]] ]="&U+22;",
+ [ [[#]] ]="&U+23;",
+ [ [[$]] ]="&U+24;",
+ [ [[%]] ]="&U+25;",
+ [ [[&]] ]="&U+26;",
+ [ [[']] ]="&U+27;",
+ [ [[<]] ]="&U+3C;",
+ [ [[>]] ]="&U+3E;",
+ [ [[\]] ]="&U+5C;",
+ [ [[{]] ]="&U+7B;",
+ [ [[|]] ]="&U+7C;",
+ [ [[}]] ]="&U+7D;",
+ [ [[~]] ]="&U+7E;",
+ }
+ local privates_n={
+ }
+ local escaped=utf.remapper(privates_u,"dynamic")
+ local unprivatized=utf.remapper(privates_p,"dynamic")
+ local unspecialized=utf.remapper(privates_s,"dynamic")
+ xml.unprivatized=unprivatized
+ xml.unspecialized=unspecialized
+ xml.escaped=escaped
+ local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ privates_s[p]=s
end
- hcache[str]=h
+ return p
end
- return h
-end
-local function handle_dec_entity(str)
- local d=dcache[str]
- if not d then
- local n=tonumber(str)
- d=unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ xml.privatetoken=unescaped
+ xml.privatecodes=privates_n
+ xml.specialcodes=privates_s
+ function xml.addspecialcode(key,value)
+ privates_s[key]=value or "&"..s..";"
+ end
+ handle_hex_entity=function(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
+ hcache[str]=h
+ end
+ return h
+ end
+ handle_dec_entity=function(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
- d="&#"..str..";"
+ dcache[str]=d
end
- dcache[str]=d
+ return d
end
- return d
-end
-xml.parsedentitylpeg=parsedentity
-local function handle_any_entity(str)
- if resolve then
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ handle_any_entity_dtd=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
if trace_entities then
report_xml("resolving entity &%s; to predefined %a",str,a)
@@ -10335,40 +10437,161 @@ local function handle_any_entity(str)
end
end
end
- acache[str]=a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str]=a
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
end
+ return a
end
- return a
- else
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ end
+ handle_any_entity_text=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
- acache[str]=a
if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str=="" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
- a=badentity
- acache[str]=a
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
+ if type(resolve)=="function" then
+ a=resolve(str,entities) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(grammar_parsed_text_two,a) or a
+ if type(a)=="number" then
+ return ""
+ else
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ end
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a=badentity
+ else
+ a="&"..str..";"
+ end
+ end
end
- a=unescaped(str)
- acache[str]=a
end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
end
- return a
end
+ local p_rest=(1-P(";"))^1
+ local spec={
+ [0x23]="\\Ux{23}",
+ [0x24]="\\Ux{24}",
+ [0x25]="\\Ux{25}",
+ [0x5C]="\\Ux{5C}",
+ [0x7B]="\\Ux{7B}",
+ [0x7C]="\\Ux{7C}",
+ [0x7D]="\\Ux{7D}",
+ [0x7E]="\\Ux{7E}",
+ }
+ local hash=table.setmetatableindex(spec,function(t,k)
+ local v=utfchar(k)
+ t[k]=v
+ return v
+ end)
+ local function fromuni(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["u:%s"](s),true
+ end
+ end
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["h:%s"](s),true
+ end
+ end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return hash[n]
+ else
+ return formatters["d:%s"](s),true
+ end
+ end
+ local reparsedentity=P("U+")*(p_rest/fromuni)+P("#")*(
+ P("x")*(p_rest/fromhex)+p_rest/fromdec
+ )
+ xml.reparsedentitylpeg=reparsedentity
end
+local escaped=xml.escaped
+local unescaped=xml.unescaped
+local placeholders=xml.placeholders
local function handle_end_entity(str)
report_xml("error in entity, %a found without ending %a",str,";")
return str
@@ -10399,10 +10622,14 @@ local hexentitycontent=R("AF","af","09")^1
local decentitycontent=R("09")^1
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
- )+(anyentitycontent/handle_any_entity)
+ )+(anyentitycontent/handle_any_entity_dtd)
+local parsedentity_text=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity_text)
local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
+local entity_text=(ampersand/"")*parsedentity_text*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
-local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local text_parsed=(Cs((1-open-ampersand)^1)/add_text+Cs(entity_text)/add_text)^1
local somespace=space^1
local optionalspace=space^0
local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
@@ -10412,7 +10639,7 @@ local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
local attributevalue=value+wrongvalue
local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext=text_parsed/add_text
+local parsedtext=text_parsed
local unparsedtext=text_unparsed/add_text
local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
@@ -10427,21 +10654,52 @@ local endcdata=P("]]")*close
local someinstruction=C((1-endinstruction)^0)
local somecomment=C((1-endcomment )^0)
local somecdata=C((1-endcdata )^0)
-local function normalentity(k,v ) entities[k]=v end
-local function systementity(k,v,n) entities[k]=v end
-local function publicentity(k,v,n) entities[k]=v end
+local function weirdentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","weird",k,v)
+ end
+ parameters[k]=v
+end
+local function normalentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","normal",k,v)
+ end
+ entities[k]=v
+end
+local function systementity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","system",k,v)
+ end
+ entities[k]=v
+end
+local function publicentity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","public",k,v)
+ end
+ entities[k]=v
+end
local begindoctype=open*P("!DOCTYPE")
local enddoctype=close
local beginset=P("[")
local endset=P("]")
+local wrdtypename=C((1-somespace-P(";"))^1)
local doctypename=C((1-somespace-close)^0)
local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local weirdentitytype=P("%")*(somespace*doctypename*somespace*value)/weirdentity
local normalentitytype=(doctypename*somespace*value)/normalentity
local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
-local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
-local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype+weirdentitytype)*optionalspace*close
+local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+end
+local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+end
+local entityresolve=P("%")*(wrdtypename/weirdresolve )*P(";")+P("&")*(wrdtypename/normalresolve)*P(";")
+entitydoctype=entitydoctype+entityresolve
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+entityresolve+basiccomment+space)^0*optionalspace*endset
local definitiondoctype=doctypename*somespace*doctypeset
local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
@@ -10453,11 +10711,15 @@ local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
-local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
-local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
-local grammar_parsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+grammar_parsed_text_one=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0,
+}
+grammar_parsed_text_two=P { "followup",
+ followup=V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
@@ -10467,37 +10729,26 @@ local grammar_unparsed_text=P { "preamble",
children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
- settings=settings or {}
- strip=settings.strip_cm_and_dt
- utfize=settings.utfize_entities
- resolve=settings.resolve_entities
- resolve_predefined=settings.resolve_predefined_entities
- unify_predefined=settings.unify_predefined_entities
- cleanup=settings.text_cleanup
- entities=settings.entities or {}
- if utfize==nil then
- settings.utfize_entities=true
- utfize=true
- end
- if resolve_predefined==nil then
- settings.resolve_predefined_entities=true
- resolve_predefined=true
- end
- stack,top,at,xmlns,errorstr={},{},{},{},nil
- acache,hcache,dcache={},{},{}
- reported_attribute_errors={}
+ settings=settings or {}
+ preparexmlstate(settings)
if settings.parent_root then
mt=getmetatable(settings.parent_root)
else
initialize_mt(top)
end
- stack[#stack+1]=top
+ level=level+1
+ stack[level]=top
top.dt={}
dt=top.dt
+ nt=0
if not data or data=="" then
errorstr="empty xml file"
elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
+ local m=lpegmatch(grammar_parsed_text_one,data)
+ if m then
+ m=lpegmatch(grammar_parsed_text_two,data,m)
+ end
+ if m then
else
errorstr="invalid xml file - parsed text"
end
@@ -10513,8 +10764,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
-setmetatable(result,mt)
-setmetatable(result.dt[1],mt)
+ setmetatable(result,mt)
+ setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -10556,13 +10807,10 @@ setmetatable(result.dt[1],mt)
decimals=dcache,
hexadecimals=hcache,
names=acache,
+ intermediates=parameters,
}
}
- strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
- unify_predefined,cleanup,entities=nil,nil,nil
- stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
- acache,hcache,dcache=nil,nil,nil
- reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ preparexmlstate()
return result
end
local function xmlconvert(data,settings)
@@ -10624,15 +10872,15 @@ function xml.toxml(data)
return data
end
end
-local function copy(old,tables)
+local function copy(old)
if old then
- tables=tables or {}
local new={}
- if not tables[old] then
- tables[old]=new
- end
for k,v in next,old do
- new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ if type(v)=="table" then
+ new[k]=table.copy(v)
+ else
+ new[k]=v
+ end
end
local mt=getmetatable(old)
if mt then
@@ -10731,7 +10979,7 @@ local function verbose_cdata(e,handlers)
handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE",e.dt[1],">")
end
local function verbose_root(e,handlers)
handlers.serialize(e.dt,handlers)
@@ -11013,7 +11261,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 50461, stripped down to: 31497
+-- original size: 51229, stripped down to: 31529
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11390,13 +11638,27 @@ local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
local lp_doequal=P("=")/"=="
local lp_or=P("|")/" or "
local lp_and=P("&")/" and "
-local lp_builtin=P (
- P("text")/"(ll.dt[1] or '')"+
- P("content")/"ll.dt"+
- P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
- P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
- P("ns")/"ll.ns"
- )*((spaces*P("(")*spaces*P(")"))/"")
+local builtin={
+ text="(ll.dt[1] or '')",
+ content="ll.dt",
+ name="((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)",
+ tag="ll.tg",
+ position="l",
+ firstindex="1",
+ firstelement="1",
+ first="1",
+ lastindex="(#ll.__p__.dt or 1)",
+ lastelement="(ll.__p__.en or 1)",
+ last="#list",
+ rootposition="order",
+ order="order",
+ element="(ll.ei or 1)",
+ index="(ll.ni or 1)",
+ match="(ll.mi or 1)",
+ namespace="ll.ns",
+ ns="ll.ns",
+}
+local lp_builtin=lpeg.utfchartabletopattern(builtin)/builtin*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
@@ -14196,7 +14458,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 9518, stripped down to: 7037
+-- original size: 9649, stripped down to: 7131
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -14332,6 +14594,11 @@ local relations=allocate {
names={ 'fontconfig','fontconfig file','fontconfig files' },
variable='FONTCONFIG_PATH',
},
+ pk={
+ names={ "pk" },
+ variable='PKFONTS',
+ suffixes={ 'pk' },
+ },
},
obsolete={
enc={
@@ -14970,7 +15237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 67003, stripped down to: 46291
+-- original size: 67241, stripped down to: 46427
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -16240,10 +16507,18 @@ local function findfiles(filename,filetype,allresults)
return result or {},status
end
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,true)
+ end
end
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,false)[1] or ""
+ end
end
function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
@@ -18363,8 +18638,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 782445
--- stripped bytes : 283493
+-- original bytes : 791821
+-- stripped bytes : 286453
-- end library merge
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index 8f10f004a..0167ac5e8 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -4552,7 +4552,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-unicode"] = package.loaded["l-unicode"] or true
--- original size: 38659, stripped down to: 16287
+-- original size: 38699, stripped down to: 16321
if not modules then modules={} end modules ['l-unicode']={
version=1.001,
@@ -4781,9 +4781,10 @@ if not utf.sub then
end
end
end
-function utf.remapper(mapping,option)
+function utf.remapper(mapping,option,action)
local variant=type(mapping)
if variant=="table" then
+ action=action or mapping
if option=="dynamic" then
local pattern=false
table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern=false end)
@@ -4792,15 +4793,15 @@ function utf.remapper(mapping,option)
return ""
else
if not pattern then
- pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
end
return lpegmatch(pattern,str)
end
end
elseif option=="pattern" then
- return Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ return Cs((tabletopattern(mapping)/action+p_utf8char)^0)
else
- local pattern=Cs((tabletopattern(mapping)/mapping+p_utf8char)^0)
+ local pattern=Cs((tabletopattern(mapping)/action+p_utf8char)^0)
return function(str)
if not str or str=="" then
return ""
@@ -5904,7 +5905,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-tab"] = package.loaded["util-tab"] or true
--- original size: 28677, stripped down to: 18633
+-- original size: 28680, stripped down to: 18636
if not modules then modules={} end modules ['util-tab']={
version=1.001,
@@ -6565,7 +6566,7 @@ local function serialize(root,name,specification)
end
table.serialize=serialize
if setinspector then
- setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table")) return true end end)
+ setinspector("table",function(v) if type(v)=="table" then print(serialize(v,"table",{})) return true end end)
end
@@ -9984,7 +9985,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-tab"] = package.loaded["lxml-tab"] or true
--- original size: 47426, stripped down to: 28810
+-- original size: 55622, stripped down to: 34927
if not modules then modules={} end modules ['lxml-tab']={
version=1.001,
@@ -9993,7 +9994,7 @@ if not modules then modules={} end modules ['lxml-tab']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_entities=false trackers.register("xml.entities",function(v) trace_entities=v end)
+local trace_entities=false trackers .register("xml.entities",function(v) trace_entities=v end)
local report_xml=logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
xml=xml or {}
@@ -10005,6 +10006,7 @@ local utfchar=utf.char
local lpegmatch,lpegpatterns=lpeg.match,lpeg.patterns
local P,S,R,C,V,C,Cs=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.C,lpeg.Cs
local formatters=string.formatters
+do
xml.xmlns=xml.xmlns or {}
local check=P(false)
local parse=check
@@ -10021,24 +10023,68 @@ end
function xml.resolvens(url)
return lpegmatch(parse,lower(url)) or ""
end
+end
local nsremap,resolvens=xml.xmlns,xml.resolvens
-local stack={}
-local top={}
-local dt={}
-local at={}
-local xmlns={}
-local errorstr=nil
-local entities={}
-local strip=false
-local cleanup=false
-local utfize=false
-local resolve=false
-local resolve_predefined=false
-local unify_predefined=false
-local dcache={}
-local hcache={}
-local acache={}
-local mt={}
+local stack,level,top,at,xmlnms,errorstr
+local entities,parameters
+local strip,utfize,resolve,cleanup,resolve_predefined,unify_predefined
+local dcache,hcache,acache
+local mt,dt,nt
+local function preparexmlstate(settings)
+ if settings then
+ stack={}
+ level=0
+ top={}
+ at={}
+ mt={}
+ dt={}
+ nt=0
+ xmlns={}
+ errorstr=nil
+ strip=settings.strip_cm_and_dt
+ utfize=settings.utfize_entities
+ resolve=settings.resolve_entities
+ resolve_predefined=settings.resolve_predefined_entities
+ unify_predefined=settings.unify_predefined_entities
+ cleanup=settings.text_cleanup
+ entities=settings.entities or {}
+ parameters={}
+ reported_at_errors={}
+ dcache={}
+ hcache={}
+ acache={}
+ if utfize==nil then
+ settings.utfize_entities=true
+ utfize=true
+ end
+ if resolve_predefined==nil then
+ settings.resolve_predefined_entities=true
+ resolve_predefined=true
+ end
+ else
+ stack=nil
+ level=nil
+ top=nil
+ at=nil
+ mt=nil
+ dt=nil
+ nt=nil
+ xmlns=nil
+ errorstr=nil
+ strip=nil
+ utfize=nil
+ resolve=nil
+ resolve_predefined=nil
+ unify_predefined=nil
+ cleanup=nil
+ entities=nil
+ parameters=nil
+ reported_at_errors=nil
+ dcache=nil
+ hcache=nil
+ acache=nil
+ end
+end
local function initialize_mt(root)
mt={ __index=root }
end
@@ -10048,8 +10094,9 @@ end
function xml.checkerror(top,toclose)
return ""
end
+local checkns=xml.checkns
local function add_attribute(namespace,tag,value)
- if cleanup and #value>0 then
+ if cleanup and value~="" then
value=cleanup(value)
end
if tag=="xmlns" then
@@ -10058,21 +10105,30 @@ local function add_attribute(namespace,tag,value)
elseif namespace=="" then
at[tag]=value
elseif namespace=="xmlns" then
- xml.checkns(tag,value)
+ checkns(tag,value)
at["xmlns:"..tag]=value
else
at[namespace..":"..tag]=value
end
end
local function add_empty(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top=stack[#stack]
+ top=stack[level]
dt=top.dt
- local t={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=top }
- dt[#dt+1]=t
+ nt=#dt+1
+ local t={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=top
+ }
+ dt[nt]=t
setmetatable(t,mt)
if at.xmlns then
remove(xmlns)
@@ -10080,23 +10136,35 @@ local function add_empty(spacing,namespace,tag)
at={}
end
local function add_begin(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
local resolved=namespace=="" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top={ ns=namespace or "",rn=resolved,tg=tag,at=at,dt={},__p__=stack[#stack] }
+ top={
+ ns=namespace or "",
+ rn=resolved,
+ tg=tag,
+ at=at,
+ dt={},
+ __p__=stack[level]
+ }
setmetatable(top,mt)
dt=top.dt
- stack[#stack+1]=top
+ nt=#dt
+ level=level+1
+ stack[level]=top
at={}
end
local function add_end(spacing,namespace,tag)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
- local toclose=remove(stack)
- top=stack[#stack]
- if #stack<1 then
+ local toclose=stack[level]
+ level=level-1
+ top=stack[level]
+ if level<1 then
errorstr=formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
report_xml(errorstr)
elseif toclose.tg~=tag then
@@ -10104,195 +10172,229 @@ local function add_end(spacing,namespace,tag)
report_xml(errorstr)
end
dt=top.dt
- dt[#dt+1]=toclose
+ nt=#dt+1
+ dt[nt]=toclose
if toclose.at.xmlns then
remove(xmlns)
end
end
-local spaceonly=lpegpatterns.whitespace^0*P(-1)
local function add_text(text)
- local n=#dt
- if cleanup and #text>0 then
- if n>0 then
- local s=dt[n]
+ if text=="" then
+ return
+ end
+ if cleanup then
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..cleanup(text)
+ dt[nt]=s..cleanup(text)
else
- dt[n+1]=cleanup(text)
+ nt=nt+1
+ dt[nt]=cleanup(text)
end
else
+ nt=1
dt[1]=cleanup(text)
end
else
- if n>0 then
- local s=dt[n]
+ if nt>0 then
+ local s=dt[nt]
if type(s)=="string" then
- dt[n]=s..text
+ dt[nt]=s..text
else
- dt[n+1]=text
+ nt=nt+1
+ dt[nt]=text
end
else
+ nt=1
dt[1]=text
end
end
end
local function add_special(what,spacing,text)
- if #spacing>0 then
- dt[#dt+1]=spacing
+ if spacing~="" then
+ nt=nt+1
+ dt[nt]=spacing
end
if strip and (what=="@cm@" or what=="@dt@") then
else
- dt[#dt+1]={ special=true,ns="",tg=what,dt={ text } }
+ nt=nt+1
+ dt[nt]={ special=true,ns="",tg=what,dt={ text } }
end
end
local function set_message(txt)
errorstr="garbage at the end of the file: "..gsub(txt,"([ \n\r\t]*)","")
end
-local reported_attribute_errors={}
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str]=true
+ reported_at_errors[str]=true
at._error_=str
end
return str
end
-local badentity="&error;"
-local badentity="&"
-xml.placeholders={
- unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
- unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
-}
-local placeholders=xml.placeholders
-local function fromhex(s)
- local n=tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s),true
- end
-end
-local function fromdec(s)
- local n=tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s),true
- end
-end
-local p_rest=(1-P(";"))^0
-local p_many=P(1)^0
-local p_char=lpegpatterns.utf8character
-local parsedentity=P("&")*(P("#x")*(p_rest/fromhex)+P("#")*(p_rest/fromdec))*P(";")*P(-1)+(P("#x")*(p_many/fromhex)+P("#")*(p_many/fromdec))
-local predefined_unified={
- [38]="&amp;",
- [42]="&quot;",
- [47]="&apos;",
- [74]="&lt;",
- [76]="&gt;",
-}
-local predefined_simplified={
- [38]="&",amp="&",
- [42]='"',quot='"',
- [47]="'",apos="'",
- [74]="<",lt="<",
- [76]=">",gt=">",
-}
-local nofprivates=0xF0000
-local privates_u={
- [ [[&]] ]="&amp;",
- [ [["]] ]="&quot;",
- [ [[']] ]="&apos;",
- [ [[<]] ]="&lt;",
- [ [[>]] ]="&gt;",
-}
-local privates_p={}
-local privates_n={
-}
-local escaped=utf.remapper(privates_u,"dynamic")
-local unprivatized=utf.remapper(privates_p,"dynamic")
-xml.unprivatized=unprivatized
-local function unescaped(s)
- local p=privates_n[s]
- if not p then
- nofprivates=nofprivates+1
- p=utfchar(nofprivates)
- privates_n[s]=p
- s="&"..s..";"
- privates_u[p]=s
- privates_p[p]=s
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
+do
+ local badentity="&"
+ xml.placeholders={
+ unknown_dec_entity=function(str) return str=="" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity=function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity=function(str) return formatters["&#x%s;"](str) end,
+ }
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s),true
+ end
end
- return p
-end
-xml.privatetoken=unescaped
-xml.privatecodes=privates_n
-local function handle_hex_entity(str)
- local h=hcache[str]
- if not h then
- local n=tonumber(str,16)
- h=unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return utfchar(n)
else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
- end
- h="&#x"..str..";"
+ return formatters["d:%s"](s),true
+ end
+ end
+ local p_rest=(1-P(";"))^0
+ local p_many=P(1)^0
+ local p_char=lpegpatterns.utf8character
+ local parsedentity=P("&#")*(P("x")*(p_rest/fromhex)+(p_rest/fromdec))*P(";")*P(-1)+P ("#")*(P("x")*(p_many/fromhex)+(p_many/fromdec))
+ xml.parsedentitylpeg=parsedentity
+ local predefined_unified={
+ [38]="&amp;",
+ [42]="&quot;",
+ [47]="&apos;",
+ [74]="&lt;",
+ [76]="&gt;",
+ }
+ local predefined_simplified={
+ [38]="&",amp="&",
+ [42]='"',quot='"',
+ [47]="'",apos="'",
+ [74]="<",lt="<",
+ [76]=">",gt=">",
+ }
+ local nofprivates=0xF0000
+ local privates_u={
+ [ [[&]] ]="&amp;",
+ [ [["]] ]="&quot;",
+ [ [[']] ]="&apos;",
+ [ [[<]] ]="&lt;",
+ [ [[>]] ]="&gt;",
+ }
+ local privates_p={
+ }
+ local privates_s={
+ [ [["]] ]="&U+22;",
+ [ [[#]] ]="&U+23;",
+ [ [[$]] ]="&U+24;",
+ [ [[%]] ]="&U+25;",
+ [ [[&]] ]="&U+26;",
+ [ [[']] ]="&U+27;",
+ [ [[<]] ]="&U+3C;",
+ [ [[>]] ]="&U+3E;",
+ [ [[\]] ]="&U+5C;",
+ [ [[{]] ]="&U+7B;",
+ [ [[|]] ]="&U+7C;",
+ [ [[}]] ]="&U+7D;",
+ [ [[~]] ]="&U+7E;",
+ }
+ local privates_n={
+ }
+ local escaped=utf.remapper(privates_u,"dynamic")
+ local unprivatized=utf.remapper(privates_p,"dynamic")
+ local unspecialized=utf.remapper(privates_s,"dynamic")
+ xml.unprivatized=unprivatized
+ xml.unspecialized=unspecialized
+ xml.escaped=escaped
+ local function unescaped(s)
+ local p=privates_n[s]
+ if not p then
+ nofprivates=nofprivates+1
+ p=utfchar(nofprivates)
+ privates_n[s]=p
+ s="&"..s..";"
+ privates_u[p]=s
+ privates_p[p]=s
+ privates_s[p]=s
end
- hcache[str]=h
+ return p
end
- return h
-end
-local function handle_dec_entity(str)
- local d=dcache[str]
- if not d then
- local n=tonumber(str)
- d=unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ xml.privatetoken=unescaped
+ xml.privatecodes=privates_n
+ xml.specialcodes=privates_s
+ function xml.addspecialcode(key,value)
+ privates_s[key]=value or "&"..s..";"
+ end
+ handle_hex_entity=function(str)
+ local h=hcache[str]
+ if not h then
+ local n=tonumber(str,16)
+ h=unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h=(n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h="&#x"..str..";"
end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
+ hcache[str]=h
+ end
+ return h
+ end
+ handle_dec_entity=function(str)
+ local d=dcache[str]
+ if not d then
+ local n=tonumber(str)
+ d=unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d=(n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d="&#"..str..";"
end
- d="&#"..str..";"
+ dcache[str]=d
end
- dcache[str]=d
+ return d
end
- return d
-end
-xml.parsedentitylpeg=parsedentity
-local function handle_any_entity(str)
- if resolve then
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ handle_any_entity_dtd=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
if trace_entities then
report_xml("resolving entity &%s; to predefined %a",str,a)
@@ -10335,40 +10437,161 @@ local function handle_any_entity(str)
end
end
end
- acache[str]=a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str]=a
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
end
+ return a
end
- return a
- else
- local a=acache[str]
- if not a then
- a=resolve_predefined and predefined_simplified[str]
+ end
+ handle_any_entity_text=function(str)
+ if resolve then
+ local a=resolve_predefined and predefined_simplified[str]
if a then
- acache[str]=a
if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str=="" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
- a=badentity
- acache[str]=a
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
+ if type(resolve)=="function" then
+ a=resolve(str,entities) or entities[str]
+ else
+ a=entities[str]
+ end
+ if a then
+ if type(a)=="function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a=a(str) or ""
+ end
+ a=lpegmatch(grammar_parsed_text_two,a) or a
+ if type(a)=="number" then
+ return ""
+ else
+ a=lpegmatch(parsedentity,a) or a
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ end
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity=placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a=unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str=="" then
+ a=badentity
+ else
+ a="&"..str..";"
+ end
+ end
end
- a=unescaped(str)
- acache[str]=a
end
+ return a
+ else
+ local a=acache[str]
+ if not a then
+ a=resolve_predefined and predefined_simplified[str]
+ if a then
+ acache[str]=a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str=="" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a=badentity
+ acache[str]=a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ a=unescaped(str)
+ acache[str]=a
+ end
+ end
+ return a
end
- return a
end
+ local p_rest=(1-P(";"))^1
+ local spec={
+ [0x23]="\\Ux{23}",
+ [0x24]="\\Ux{24}",
+ [0x25]="\\Ux{25}",
+ [0x5C]="\\Ux{5C}",
+ [0x7B]="\\Ux{7B}",
+ [0x7C]="\\Ux{7C}",
+ [0x7D]="\\Ux{7D}",
+ [0x7E]="\\Ux{7E}",
+ }
+ local hash=table.setmetatableindex(spec,function(t,k)
+ local v=utfchar(k)
+ t[k]=v
+ return v
+ end)
+ local function fromuni(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["u:%s"](s),true
+ end
+ end
+ local function fromhex(s)
+ local n=tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["h:%s"](s),true
+ end
+ end
+ local function fromdec(s)
+ local n=tonumber(s)
+ if n then
+ return hash[n]
+ else
+ return formatters["d:%s"](s),true
+ end
+ end
+ local reparsedentity=P("U+")*(p_rest/fromuni)+P("#")*(
+ P("x")*(p_rest/fromhex)+p_rest/fromdec
+ )
+ xml.reparsedentitylpeg=reparsedentity
end
+local escaped=xml.escaped
+local unescaped=xml.unescaped
+local placeholders=xml.placeholders
local function handle_end_entity(str)
report_xml("error in entity, %a found without ending %a",str,";")
return str
@@ -10399,10 +10622,14 @@ local hexentitycontent=R("AF","af","09")^1
local decentitycontent=R("09")^1
local parsedentity=P("#")/""*(
P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
- )+(anyentitycontent/handle_any_entity)
+ )+(anyentitycontent/handle_any_entity_dtd)
+local parsedentity_text=P("#")/""*(
+ P("x")/""*(hexentitycontent/handle_hex_entity)+(decentitycontent/handle_dec_entity)
+ )+(anyentitycontent/handle_any_entity_text)
local entity=(ampersand/"")*parsedentity*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
+local entity_text=(ampersand/"")*parsedentity_text*(semicolon/"")+ampersand*(anyentitycontent/handle_end_entity)
local text_unparsed=C((1-open)^1)
-local text_parsed=Cs(((1-open-ampersand)^1+entity)^1)
+local text_parsed=(Cs((1-open-ampersand)^1)/add_text+Cs(entity_text)/add_text)^1
local somespace=space^1
local optionalspace=space^0
local value=(squote*Cs((entity+(1-squote))^0)*squote)+(dquote*Cs((entity+(1-dquote))^0)*dquote)
@@ -10412,7 +10639,7 @@ local wrongvalue=Cs(P(entity+(1-space-endofattributes))^1)/attribute_value_error
local attributevalue=value+wrongvalue
local attribute=(somespace*name*optionalspace*equal*optionalspace*attributevalue)/add_attribute
local attributes=(attribute+somespace^-1*(((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext=text_parsed/add_text
+local parsedtext=text_parsed
local unparsedtext=text_unparsed/add_text
local balanced=P { "["*((1-S"[]")+V(1))^0*"]" }
local emptyelement=(spacing*open*name*attributes*optionalspace*slash*close)/add_empty
@@ -10427,21 +10654,52 @@ local endcdata=P("]]")*close
local someinstruction=C((1-endinstruction)^0)
local somecomment=C((1-endcomment )^0)
local somecdata=C((1-endcdata )^0)
-local function normalentity(k,v ) entities[k]=v end
-local function systementity(k,v,n) entities[k]=v end
-local function publicentity(k,v,n) entities[k]=v end
+local function weirdentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","weird",k,v)
+ end
+ parameters[k]=v
+end
+local function normalentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","normal",k,v)
+ end
+ entities[k]=v
+end
+local function systementity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","system",k,v)
+ end
+ entities[k]=v
+end
+local function publicentity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","public",k,v)
+ end
+ entities[k]=v
+end
local begindoctype=open*P("!DOCTYPE")
local enddoctype=close
local beginset=P("[")
local endset=P("]")
+local wrdtypename=C((1-somespace-P(";"))^1)
local doctypename=C((1-somespace-close)^0)
local elementdoctype=optionalspace*P("<!ELEMENT")*(1-close)^0*close
local basiccomment=begincomment*((1-endcomment)^0)*endcomment
+local weirdentitytype=P("%")*(somespace*doctypename*somespace*value)/weirdentity
local normalentitytype=(doctypename*somespace*value)/normalentity
local publicentitytype=(doctypename*somespace*P("PUBLIC")*somespace*value)/publicentity
local systementitytype=(doctypename*somespace*P("SYSTEM")*somespace*value*somespace*P("NDATA")*somespace*doctypename)/systementity
-local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype)*optionalspace*close
-local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+basiccomment+space)^0*optionalspace*endset
+local entitydoctype=optionalspace*P("<!ENTITY")*somespace*(systementitytype+publicentitytype+normalentitytype+weirdentitytype)*optionalspace*close
+local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+end
+local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+end
+local entityresolve=P("%")*(wrdtypename/weirdresolve )*P(";")+P("&")*(wrdtypename/normalresolve)*P(";")
+entitydoctype=entitydoctype+entityresolve
+local doctypeset=beginset*optionalspace*P(elementdoctype+entitydoctype+entityresolve+basiccomment+space)^0*optionalspace*endset
local definitiondoctype=doctypename*somespace*doctypeset
local publicdoctype=doctypename*somespace*P("PUBLIC")*somespace*value*somespace*value*somespace*doctypeset
local systemdoctype=doctypename*somespace*P("SYSTEM")*somespace*value*somespace*doctypeset
@@ -10453,11 +10711,15 @@ local cdata=(spacing*begincdata*somecdata*endcdata )/function(...) add_special
local doctype=(spacing*begindoctype*somedoctype*enddoctype )/function(...) add_special("@dt@",...) end
local crap_parsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata-ampersand
local crap_unparsed=1-beginelement-endelement-emptyelement-begininstruction-begincomment-begincdata
-local parsedcrap=Cs((crap_parsed^1+entity)^1)/handle_crap_error
-local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local parsedcrap=Cs((crap_parsed^1+entity_text)^1)/handle_crap_error
+local unparsedcrap=Cs((crap_unparsed )^1)/handle_crap_error
local trailer=space^0*(text_unparsed/set_message)^0
-local grammar_parsed_text=P { "preamble",
- preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0*V("parent")*trailer,
+grammar_parsed_text_one=P { "preamble",
+ preamble=utfbom^0*instruction^0*(doctype+comment+instruction)^0,
+}
+grammar_parsed_text_two=P { "followup",
+ followup=V("parent")*trailer,
parent=beginelement*V("children")^0*endelement,
children=parsedtext+V("parent")+emptyelement+comment+cdata+instruction+parsedcrap,
}
@@ -10467,37 +10729,26 @@ local grammar_unparsed_text=P { "preamble",
children=unparsedtext+V("parent")+emptyelement+comment+cdata+instruction+unparsedcrap,
}
local function _xmlconvert_(data,settings)
- settings=settings or {}
- strip=settings.strip_cm_and_dt
- utfize=settings.utfize_entities
- resolve=settings.resolve_entities
- resolve_predefined=settings.resolve_predefined_entities
- unify_predefined=settings.unify_predefined_entities
- cleanup=settings.text_cleanup
- entities=settings.entities or {}
- if utfize==nil then
- settings.utfize_entities=true
- utfize=true
- end
- if resolve_predefined==nil then
- settings.resolve_predefined_entities=true
- resolve_predefined=true
- end
- stack,top,at,xmlns,errorstr={},{},{},{},nil
- acache,hcache,dcache={},{},{}
- reported_attribute_errors={}
+ settings=settings or {}
+ preparexmlstate(settings)
if settings.parent_root then
mt=getmetatable(settings.parent_root)
else
initialize_mt(top)
end
- stack[#stack+1]=top
+ level=level+1
+ stack[level]=top
top.dt={}
dt=top.dt
+ nt=0
if not data or data=="" then
errorstr="empty xml file"
elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
+ local m=lpegmatch(grammar_parsed_text_one,data)
+ if m then
+ m=lpegmatch(grammar_parsed_text_two,data,m)
+ end
+ if m then
else
errorstr="invalid xml file - parsed text"
end
@@ -10513,8 +10764,8 @@ local function _xmlconvert_(data,settings)
local result
if errorstr and errorstr~="" then
result={ dt={ { ns="",tg="error",dt={ errorstr },at={},er=true } } }
-setmetatable(result,mt)
-setmetatable(result.dt[1],mt)
+ setmetatable(result,mt)
+ setmetatable(result.dt[1],mt)
setmetatable(stack,mt)
local errorhandler=settings.error_handler
if errorhandler==false then
@@ -10556,13 +10807,10 @@ setmetatable(result.dt[1],mt)
decimals=dcache,
hexadecimals=hcache,
names=acache,
+ intermediates=parameters,
}
}
- strip,utfize,resolve,resolve_predefined=nil,nil,nil,nil
- unify_predefined,cleanup,entities=nil,nil,nil
- stack,top,at,xmlns,errorstr=nil,nil,nil,nil,nil
- acache,hcache,dcache=nil,nil,nil
- reported_attribute_errors,mt,errorhandler=nil,nil,nil
+ preparexmlstate()
return result
end
local function xmlconvert(data,settings)
@@ -10624,15 +10872,15 @@ function xml.toxml(data)
return data
end
end
-local function copy(old,tables)
+local function copy(old)
if old then
- tables=tables or {}
local new={}
- if not tables[old] then
- tables[old]=new
- end
for k,v in next,old do
- new[k]=(type(v)=="table" and (tables[v] or copy(v,tables))) or v
+ if type(v)=="table" then
+ new[k]=table.copy(v)
+ else
+ new[k]=v
+ end
end
local mt=getmetatable(old)
if mt then
@@ -10731,7 +10979,7 @@ local function verbose_cdata(e,handlers)
handlers.handle("<![CDATA[",e.dt[1],"]]>")
end
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE",e.dt[1],">")
end
local function verbose_root(e,handlers)
handlers.serialize(e.dt,handlers)
@@ -11013,7 +11261,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["lxml-lpt"] = package.loaded["lxml-lpt"] or true
--- original size: 50461, stripped down to: 31497
+-- original size: 51229, stripped down to: 31529
if not modules then modules={} end modules ['lxml-lpt']={
version=1.001,
@@ -11390,13 +11638,27 @@ local lp_noequal=P("!=")/"~="+P("<=")+P(">=")+P("==")
local lp_doequal=P("=")/"=="
local lp_or=P("|")/" or "
local lp_and=P("&")/" and "
-local lp_builtin=P (
- P("text")/"(ll.dt[1] or '')"+
- P("content")/"ll.dt"+
- P("name")/"((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)"+P("tag")/"ll.tg"+P("position")/"l"+
- P("firstindex")/"1"+P("lastindex")/"(#ll.__p__.dt or 1)"+P("firstelement")/"1"+P("lastelement")/"(ll.__p__.en or 1)"+P("first")/"1"+P("last")/"#list"+P("rootposition")/"order"+P("order")/"order"+P("element")/"(ll.ei or 1)"+P("index")/"(ll.ni or 1)"+P("match")/"(ll.mi or 1)"+
- P("ns")/"ll.ns"
- )*((spaces*P("(")*spaces*P(")"))/"")
+local builtin={
+ text="(ll.dt[1] or '')",
+ content="ll.dt",
+ name="((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)",
+ tag="ll.tg",
+ position="l",
+ firstindex="1",
+ firstelement="1",
+ first="1",
+ lastindex="(#ll.__p__.dt or 1)",
+ lastelement="(ll.__p__.en or 1)",
+ last="#list",
+ rootposition="order",
+ order="order",
+ element="(ll.ei or 1)",
+ index="(ll.ni or 1)",
+ match="(ll.mi or 1)",
+ namespace="ll.ns",
+ ns="ll.ns",
+}
+local lp_builtin=lpeg.utfchartabletopattern(builtin)/builtin*((spaces*P("(")*spaces*P(")"))/"")
local lp_attribute=(P("@")+P("attribute::"))/""*Cc("(ll.at and ll.at['")*((R("az","AZ")+S("-_:"))^1)*Cc("'])")
local lp_fastpos_p=P("+")^0*R("09")^1*P(-1)/"l==%0"
local lp_fastpos_n=P("-")*R("09")^1*P(-1)/"(%0<0 and (#list+%0==l))"
@@ -14196,7 +14458,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-env"] = package.loaded["data-env"] or true
--- original size: 9518, stripped down to: 7037
+-- original size: 9649, stripped down to: 7131
if not modules then modules={} end modules ['data-env']={
version=1.001,
@@ -14332,6 +14594,11 @@ local relations=allocate {
names={ 'fontconfig','fontconfig file','fontconfig files' },
variable='FONTCONFIG_PATH',
},
+ pk={
+ names={ "pk" },
+ variable='PKFONTS',
+ suffixes={ 'pk' },
+ },
},
obsolete={
enc={
@@ -14970,7 +15237,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 67003, stripped down to: 46291
+-- original size: 67241, stripped down to: 46427
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -16240,10 +16507,18 @@ local function findfiles(filename,filetype,allresults)
return result or {},status
end
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,true)
+ end
end
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ if not filename or filename=="" then
+ return ""
+ else
+ return findfiles(filename,filetype,false)[1] or ""
+ end
end
function resolvers.findpath(filename,filetype)
return filedirname(findfiles(filename,filetype,false)[1] or "")
@@ -18363,8 +18638,8 @@ end -- of closure
-- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 782445
--- stripped bytes : 283493
+-- original bytes : 791821
+-- stripped bytes : 286453
-- end library merge
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 9bc6c85d8..053e8e353 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/back-pdf.mkiv b/tex/context/base/mkiv/back-pdf.mkiv
index 79f9ed353..c4aaacc9b 100644
--- a/tex/context/base/mkiv/back-pdf.mkiv
+++ b/tex/context/base/mkiv/back-pdf.mkiv
@@ -46,18 +46,18 @@
%D These are already set:
-\pdfhorigin 1in
-\pdfvorigin \pdfhorigin
+% \pdfhorigin 1in
+% \pdfvorigin \pdfhorigin
%D These too and most of them will be protected as well:
-\pdfminorversion \plusseven
-\pdfgentounicode \plusone % \let\pdfgentounicode \undefined \newcount\pdfgentounicode
-\pdfinclusioncopyfonts \plusone % \let\pdfinclusioncopyfonts \undefined \newcount\pdfinclusioncopyfonts
-\pdfinclusionerrorlevel \zerocount % \let\pdfinclusionerrorlevel\undefined \newcount\pdfinclusionerrorlevel
-\pdfdecimaldigits \plussix % \let\pdfdecimaldigits \undefined \newcount\pdfdecimaldigits
-\pdfimageresolution 300
-\pdfpkresolution 600
+% \pdfminorversion \plusseven
+% \pdfgentounicode \plusone % \let\pdfgentounicode \undefined \newcount\pdfgentounicode
+% \pdfinclusioncopyfonts \plusone % \let\pdfinclusioncopyfonts \undefined \newcount\pdfinclusioncopyfonts
+% \pdfinclusionerrorlevel \zerocount % \let\pdfinclusionerrorlevel\undefined \newcount\pdfinclusionerrorlevel
+% \pdfdecimaldigits \plussix % \let\pdfdecimaldigits \undefined \newcount\pdfdecimaldigits
+% \pdfimageresolution 300
+% \pdfpkresolution 600
%D Let's block these (we could share a dummy:
@@ -289,7 +289,7 @@
\edef\dofill{\number#7}%
\edef\mode{\number#8}%
% no \ifcase, else \relax in pdfcode
- \setbox\scratchbox\hpack
+ \setbox\scratchbox\naturalhpack
{\ifnum\dostroke\dofill>\zerocount
\pdfliteral
{q
diff --git a/tex/context/base/mkiv/catc-ini.mkiv b/tex/context/base/mkiv/catc-ini.mkiv
index 26cf17edf..440d53efe 100644
--- a/tex/context/base/mkiv/catc-ini.mkiv
+++ b/tex/context/base/mkiv/catc-ini.mkiv
@@ -224,34 +224,36 @@
\def\reinstatecatcodecommand{\afterassignment\syst_catcodes_reinstate_normal\c_syst_catcodes_b}
-% \def\syst_catcodes_reinstate_normal % can be used when a direct definition has been done
-% {\begingroup % and the selector has been lost
-% \uccode\c_syst_catcodes_hack\c_syst_catcodes_b
-% \catcode\uccode\c_syst_catcodes_hack\activecatcode
-% \uppercase{\xdef~{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}}%
-% \endgroup}
+\def\syst_catcodes_reinstate_normal % can be used when a direct definition has been done
+ {\begingroup % and the selector has been lost
+ \uccode\c_syst_catcodes_hack\c_syst_catcodes_b
+ \catcode\uccode\c_syst_catcodes_hack\activecatcode
+ \uppercase{\xdef~{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}}%
+ \endgroup}
-% \def\syst_catcodes_reinstate_unexpanded % can be used when a direct definition has been done
-% {\begingroup % and the selector has been lost
-% \uccode\c_syst_catcodes_hack\c_syst_catcodes_b
-% \catcode\uccode\c_syst_catcodes_hack\activecatcode
-% \uppercase{\normalprotected\xdef~{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}}%
-% \endgroup}
+\def\syst_catcodes_reinstate_unexpanded % can be used when a direct definition has been done
+ {\begingroup % and the selector has been lost
+ \uccode\c_syst_catcodes_hack\c_syst_catcodes_b
+ \catcode\uccode\c_syst_catcodes_hack\activecatcode
+ \uppercase{\normalprotected\xdef~{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}}%
+ \endgroup}
%D This can be used when a direct definition has been done and the selector has been
%D lost.
-\def\syst_catcodes_reinstate_normal
- {\begingroup
- \edef\next{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}%
- \global\letcharcode\c_syst_catcodes_b\next
- \endgroup}
-
-\def\syst_catcodes_reinstate_unexpanded
- {\begingroup
- \normalprotected\edef\next{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}%
- \global\letcharcode\c_syst_catcodes_b\next
- \endgroup}
+% problem: \next needs to be unique (as it gets bound)
+%
+% \def\syst_catcodes_reinstate_normal
+% {\begingroup
+% \edef\next{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}%
+% \global\letcharcode\c_syst_catcodes_b\next
+% \endgroup}
+%
+% \def\syst_catcodes_reinstate_unexpanded
+% {\begingroup
+% \normalprotected\edef\next{\noexpand\catcodecommand{\number\c_syst_catcodes_b}}%
+% \global\letcharcode\c_syst_catcodes_b\next
+% \endgroup}
\newconstant\defaultcatcodetable
diff --git a/tex/context/base/mkiv/char-ini.mkiv b/tex/context/base/mkiv/char-ini.mkiv
index 95ff7af5a..28bfbc1cf 100644
--- a/tex/context/base/mkiv/char-ini.mkiv
+++ b/tex/context/base/mkiv/char-ini.mkiv
@@ -45,6 +45,8 @@
\def\utfchar #1{\clf_utfchar \numexpr#1\relax}
\def\safechar#1{\clf_safechar\numexpr#1\relax}
+\unexpanded\def\Ux#1{\Uchar"#1\relax} % used in xml
+
\def\checkedchar {\relax\ifmmode\expandafter\checkedmathchar\else\expandafter\checkedtextchar\fi} % #1#2
\def\checkedmathchar#1#2{#2}
%def\checkedtextchar #1{\iffontchar\font#1 \expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi{\char#1}}
diff --git a/tex/context/base/mkiv/cldf-ini.lua b/tex/context/base/mkiv/cldf-ini.lua
index 0253adde5..f4819b11a 100644
--- a/tex/context/base/mkiv/cldf-ini.lua
+++ b/tex/context/base/mkiv/cldf-ini.lua
@@ -485,17 +485,37 @@ local catcodes = {
xml = xmlcatcodes, xmlcatcodes = xmlcatcodes,
}
+-- maybe just increment / decrement
+
+-- local function pushcatcodes(c)
+-- insert(catcodestack,currentcatcodes)
+-- currentcatcodes = (c and catcodes[c] or tonumber(c)) or currentcatcodes
+-- contentcatcodes = currentcatcodes
+-- end
+--
+-- local function popcatcodes()
+-- currentcatcodes = remove(catcodestack) or currentcatcodes
+-- contentcatcodes = currentcatcodes
+-- end
+
+local catcodelevel = 0
+
local function pushcatcodes(c)
- insert(catcodestack,currentcatcodes)
+ catcodelevel = catcodelevel + 1
+ catcodestack[catcodelevel] = currentcatcodes
currentcatcodes = (c and catcodes[c] or tonumber(c)) or currentcatcodes
contentcatcodes = currentcatcodes
end
local function popcatcodes()
- currentcatcodes = remove(catcodestack) or currentcatcodes
+ if catcodelevel > 0 then
+ currentcatcodes = catcodestack[catcodelevel] or currentcatcodes
+ catcodelevel = catcodelevel - 1
+ end
contentcatcodes = currentcatcodes
end
+context.catcodes = catcodes
context.pushcatcodes = pushcatcodes
context.popcatcodes = popcatcodes
@@ -1351,7 +1371,7 @@ do
if data and data ~= "" then
local filename = resolve("virtual",validstring(tag,"viafile"),data)
-- context.startregime { "utf" }
- context.input(filename)
+ input(filename)
-- context.stopregime()
end
end
@@ -1362,7 +1382,7 @@ do
local collected = nil
local nofcollected = 0
- local sentinel = string.char(26) -- endoffileasciicode : ignorecatcode
+ local sentinel = string.char(26) -- ASCII SUB character : endoffileasciicode : ignorecatcode
local level = 0
local function collect(c,...) -- can be optimized
@@ -1385,13 +1405,15 @@ do
local collectdirect = collect
+ -- doesn't work well with tracing do we need to avoid that then
+
function context.startcollecting()
if level == 0 then
collected = { }
nofcollected = 0
--
- flush = collect
- flushdirect = collectdirect
+ flush = collect
+ flushdirect = collectdirect
--
context.__flush = flush
context.__flushdirect = flushdirect
diff --git a/tex/context/base/mkiv/colo-ini.lua b/tex/context/base/mkiv/colo-ini.lua
index 81adfa680..495a09809 100644
--- a/tex/context/base/mkiv/colo-ini.lua
+++ b/tex/context/base/mkiv/colo-ini.lua
@@ -576,10 +576,13 @@ local function mpcolor(model,ca,ta,default)
return formatters["(%s,%s,%s)"](cv[3],cv[4],cv[5])
end
end
- else
- default = default or 0 -- rgb !
- return formatters["(%s,%s,%s)"](default,default,default)
end
+ local tv = transparencyvalues[ta]
+ if tv then
+ return formatters["(%s,%s)"](tv[1],tv[2])
+ end
+ default = default or 0 -- rgb !
+ return formatters["(%s,%s,%s)"](default,default,default)
end
local function mpnamedcolor(name)
diff --git a/tex/context/base/mkiv/colo-ini.mkiv b/tex/context/base/mkiv/colo-ini.mkiv
index 0f8d9b662..0e8973c72 100644
--- a/tex/context/base/mkiv/colo-ini.mkiv
+++ b/tex/context/base/mkiv/colo-ini.mkiv
@@ -1087,6 +1087,17 @@
\colo_helpers_inherited_current_ca{#1} %
\colo_helpers_inherited_current_ta{#1} }
+\def\MPcoloronly#1%
+ {\clf_mpcolor
+ \attribute\colormodelattribute
+ \colo_helpers_inherited_current_ca{#1} %
+ \zerocount}
+
+\def\MPtransparency#1%
+ {\clf_mpcolor
+ \zerocount
+ \zerocount
+ \colo_helpers_inherited_current_ta{#1} }
\def\MPoptions#1%
{\clf_mpoptions
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index b5c4ebf82..72b9eeeb2 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2016.01.18 22:21}
+\newcontextversion{2016.01.28 22:35}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index d8a8910ca..384a2de9b 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2016.01.18 22:21}
+\edef\contextversion{2016.01.28 22:35}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/data-env.lua b/tex/context/base/mkiv/data-env.lua
index 1d7728c22..8bb25036d 100644
--- a/tex/context/base/mkiv/data-env.lua
+++ b/tex/context/base/mkiv/data-env.lua
@@ -139,6 +139,11 @@ local relations = allocate { -- todo: handlers also here
names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
variable = 'FONTCONFIG_PATH',
},
+ pk = {
+ names = { "pk" },
+ variable = 'PKFONTS',
+ suffixes = { 'pk' },
+ },
},
obsolete = {
enc = {
diff --git a/tex/context/base/mkiv/data-res.lua b/tex/context/base/mkiv/data-res.lua
index cbda21ce2..831ad881c 100644
--- a/tex/context/base/mkiv/data-res.lua
+++ b/tex/context/base/mkiv/data-res.lua
@@ -1558,11 +1558,21 @@ local function findfiles(filename,filetype,allresults)
end
function resolvers.findfiles(filename,filetype)
- return findfiles(filename,filetype,true)
+ if not filename or filename == "" then
+ -- weird ... why called then
+ return ""
+ else
+ return findfiles(filename,filetype,true)
+ end
end
function resolvers.findfile(filename,filetype)
- return findfiles(filename,filetype,false)[1] or ""
+ if not filename or filename == "" then
+ -- weird ... why called then
+ return ""
+ else
+ return findfiles(filename,filetype,false)[1] or ""
+ end
end
function resolvers.findpath(filename,filetype)
diff --git a/tex/context/base/mkiv/font-cff.lua b/tex/context/base/mkiv/font-cff.lua
index 0314e4ac4..8c57b473e 100644
--- a/tex/context/base/mkiv/font-cff.lua
+++ b/tex/context/base/mkiv/font-cff.lua
@@ -1384,6 +1384,7 @@ do
width = nominalwidth + width
end
--
+index = index - 1
local glyph = glyphs[index] -- can be autodefined in otr
if not glyph then
glyphs[index] = {
diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua
index cc6befa9d..f4b889b75 100644
--- a/tex/context/base/mkiv/font-otl.lua
+++ b/tex/context/base/mkiv/font-otl.lua
@@ -53,7 +53,7 @@ local report_otf = logs.reporter("fonts","otf loading")
local fonts = fonts
local otf = fonts.handlers.otf
-otf.version = 3.012 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.version = 3.013 -- beware: also sync font-mis.lua and in mtx-fonts
otf.cache = containers.define("fonts", "otl", otf.version, true)
local otfreaders = otf.readers
diff --git a/tex/context/base/mkiv/font-vf.lua b/tex/context/base/mkiv/font-vf.lua
index fd3be3935..7037c6c8b 100644
--- a/tex/context/base/mkiv/font-vf.lua
+++ b/tex/context/base/mkiv/font-vf.lua
@@ -39,18 +39,18 @@ vf.version = 1.000 -- same as tfm
-- if trace_defining then
-- report_defining("locating vf for %a",name)
-- end
--- return findbinfile(name,"ovf")
+-- return findbinfile(name,"ovf") or ""
-- else
-- if trace_defining then
-- report_defining("vf for %a is already taken care of",name)
-- end
--- return nil -- ""
+-- return ""
-- end
-- else
-- if trace_defining then
-- report_defining("locating vf for %a",name)
-- end
--- return findbinfile(name,"ovf")
+-- return findbinfile(name,"ovf") or ""
-- end
-- end
--
diff --git a/tex/context/base/mkiv/l-unicode.lua b/tex/context/base/mkiv/l-unicode.lua
index 70b60324a..3dec80013 100644
--- a/tex/context/base/mkiv/l-unicode.lua
+++ b/tex/context/base/mkiv/l-unicode.lua
@@ -528,9 +528,10 @@ end
-- end, pattern
-- end
-function utf.remapper(mapping,option) -- static also returns a pattern
+function utf.remapper(mapping,option,action) -- static also returns a pattern
local variant = type(mapping)
if variant == "table" then
+ action = action or mapping
if option == "dynamic" then
local pattern = false
table.setmetatablenewindex(mapping,function(t,k,v) rawset(t,k,v) pattern = false end)
@@ -539,16 +540,16 @@ function utf.remapper(mapping,option) -- static also returns a pattern
return ""
else
if not pattern then
- pattern = Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ pattern = Cs((tabletopattern(mapping)/action + p_utf8char)^0)
end
return lpegmatch(pattern,str)
end
end
elseif option == "pattern" then
- return Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ return Cs((tabletopattern(mapping)/action + p_utf8char)^0)
-- elseif option == "static" then
else
- local pattern = Cs((tabletopattern(mapping)/mapping + p_utf8char)^0)
+ local pattern = Cs((tabletopattern(mapping)/action + p_utf8char)^0)
return function(str)
if not str or str == "" then
return ""
diff --git a/tex/context/base/mkiv/lang-rep.lua b/tex/context/base/mkiv/lang-rep.lua
index 0090d7cc3..ec82b7b19 100644
--- a/tex/context/base/mkiv/lang-rep.lua
+++ b/tex/context/base/mkiv/lang-rep.lua
@@ -17,16 +17,22 @@ if not modules then modules = { } end modules ['lang-rep'] = {
local type, tonumber = type, tonumber
local utfbyte, utfsplit = utf.byte, utf.split
-local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
+local P, C, U, Cc, Ct, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.Cs, lpeg.match
local find = string.find
+local zwnj = 0x200C
local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
local splitter = Ct((
- Ct(Cc("discretionary") * grouped * grouped * grouped)
- + Ct(Cc("noligature") * grouped)
+ #P("{") * (
+ P("{}") / function() return zwnj end
+ + Ct(Cc("discretionary") * grouped * grouped * grouped)
+ + Ct(Cc("noligature") * grouped)
+ )
+ U/utfbyte
)^1)
+local stripper = P("{") * Cs((1-P(-2))^0) * P("}") * P(-1)
+
local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
@@ -93,6 +99,7 @@ lists[v_reset].attribute = unsetvalue -- so we discard 0
-- todo: glue kern attr
local function add(root,word,replacement)
+ local replacement = lpegmatch(stripper,replacement) or replacement
local list = utfsplit(word,true)
local size = #list
for i=1,size do
@@ -229,10 +236,16 @@ function replacements.handler(head)
elseif method == "noligature" then
-- not that efficient to copy but ok for testing
local list = codes[2]
- for i=1,#list do
+ if list then
+ for i=1,#list do
+ new = copy_node(last)
+ setchar(new,list[i])
+ setattr(new,a_noligature,1)
+ head, current = insert_after(head,current,new)
+ end
+ else
new = copy_node(last)
- setchar(new,list[i])
- setattr(new,a_noligature,1)
+ setchar(new,zwnj)
head, current = insert_after(head,current,new)
end
else
@@ -247,9 +260,13 @@ function replacements.handler(head)
end
flush_list(list)
elseif oldlength == newlength then -- #old == #new
- for i=1,newlength do
- setchar(current,newcodes[i])
- current = getnext(current)
+ if final.word == final.replacement then
+ -- nothing to do but skip
+ else
+ for i=1,newlength do
+ setchar(current,newcodes[i])
+ current = getnext(current)
+ end
end
elseif oldlength < newlength then -- #old < #new
for i=1,newlength-oldlength do
diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua
index 6bce56b43..2f55fb25d 100644
--- a/tex/context/base/mkiv/lpdf-ini.lua
+++ b/tex/context/base/mkiv/lpdf-ini.lua
@@ -1199,12 +1199,17 @@ end
do
- local f_actual_text_one = formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ %s EMC ET"]
- local f_actual_text_two = formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ %s EMC ET"]
- local f_actual_text = formatters["/Span <</ActualText %s >> BDC"]
+ local f_actual_text_one = formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ %s EMC ET"]
+ local f_actual_text_one_b = formatters["BT /Span << /ActualText <feff%04x> >> BDC [<feff>] TJ "]
+ local f_actual_text_two = formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ %s EMC ET"]
+ local f_actual_text_two_b = formatters["BT /Span << /ActualText <feff%04x%04x> >> BDC [<feff>] TJ "]
+ local s_actual_text_e = " EMC ET"
+ local f_actual_text = formatters["/Span <</ActualText %s >> BDC"]
- local context = context
- local pdfdirect = nodes.pool.pdfdirect
+ local context = context
+ local pdfdirect = nodes.pool.pdfdirect
+
+ -- todo: use tounicode from the font mapper
function codeinjections.unicodetoactualtext(unicode,pdfcode)
if unicode < 0x10000 then
@@ -1214,6 +1219,18 @@ do
end
end
+ function codeinjections.startunicodetoactualtext(unicode)
+ if unicode < 0x10000 then
+ return f_actual_text_one_b(unicode)
+ else
+ return f_actual_text_two_b(unicode/1024+0xD800,unicode%1024+0xDC00)
+ end
+ end
+
+ function codeinjections.stopunicodetoactualtext()
+ return s_actual_text_e
+ end
+
implement {
name = "startactualtext",
arguments = "string",
diff --git a/tex/context/base/mkiv/luat-fio.lua b/tex/context/base/mkiv/luat-fio.lua
index daa6cccb7..43f7d6486 100644
--- a/tex/context/base/mkiv/luat-fio.lua
+++ b/tex/context/base/mkiv/luat-fio.lua
@@ -33,12 +33,49 @@ if not resolvers.instance then
if callback then
- local register = callbacks.register
+ local register = callbacks.register
+
+ local addsuffix = file.addsuffix
+ local join = file.join
+
+ local function findpk(font,dpi)
+ local dpi = dpi or 600 -- could take from resolution
+ -- <font>.pk
+ local name = addsuffix(font,"pk")
+ -- <dpi>/name.pk
+ local temp = join(dpi,name)
+ local okay = findbinfile(temp,"pk")
+ -- print(temp,okay)
+ if okay and okay ~= "" then
+ return okay
+ end
+ -- <dpi>.dpi/name.pk
+ local temp = join(dpi..".dpi",name)
+ local okay = findbinfile(temp,"pk")
+ -- print(temp,okay)
+ if okay and okay ~= "" then
+ return okay
+ end
+ -- <font>.<dpi>pk
+ local name = addsuffix(font,dpi.."pk")
+ -- name.<dpi>pk
+ local temp = name
+ local okay = findbinfile(temp,"pk")
+ -- print(temp,okay)
+ if okay and okay ~= "" then
+ return okay
+ end
+ -- <dpi>.dpi/name.<dpi>pk
+ local temp = join(dpi..".dpi",name)
+ local okay = findbinfile(temp,"pk")
+ -- print(temp,okay)
+ return okay or ""
+ end
-- register('process_jobname' , function(name) return name end, true)
- register('find_read_file' , function(id,name) return findtexfile(name) end, true)
- register('open_read_file' , function( name) return opentexfile(name) end, true)
+ register('find_read_file' , function(id,name) return findtexfile(name) end, true)
+ register('open_read_file' , function( name) return opentexfile(name) end, true)
register('find_data_file' , function(name) return findbinfile(name,"tex") end, true)
register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true)
@@ -48,23 +85,23 @@ if not resolvers.instance then
register('find_map_file' , function(name) return findbinfile(name,"map") end, true)
register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
register('find_output_file' , function(name) return name end, true)
- register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true)
+ register('find_pk_file' , findpk, true)
register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true)
- register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
- register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
- register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
+ register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
+ register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
+ register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
-- format
-- image
- register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
+ register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
-- output
- register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
- register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
- register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
+ register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
+ register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
+ register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
-- register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
-- register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true)
diff --git a/tex/context/base/mkiv/lxml-ent.lua b/tex/context/base/mkiv/lxml-ent.lua
index a5c5bc389..c392713f0 100644
--- a/tex/context/base/mkiv/lxml-ent.lua
+++ b/tex/context/base/mkiv/lxml-ent.lua
@@ -10,6 +10,7 @@ local type, next, tonumber = type, next, tonumber
local byte, format = string.byte, string.format
local utfchar = utf.char
local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
--[[ldx--
<p>We provide (at least here) two entity handlers. The more extensive
@@ -41,17 +42,25 @@ end
if characters and characters.entities then
+ -- the big entity table also has amp, quot, apos, lt, gt in them
+
+ local loaded = false
+
function characters.registerentities(forcecopy)
+ if loaded then
+ return
+ end
if forcecopy then
- table.setmetatableindex(entities,nil)
+ setmetatableindex(entities,nil)
for name, value in next, characters.entities do
if not entities[name] then
entities[name] = value
end
end
else
- table.setmetatableindex(entities,characters.entities)
+ setmetatableindex(entities,characters.entities)
end
+ loaded = true
end
end
diff --git a/tex/context/base/mkiv/lxml-ini.lua b/tex/context/base/mkiv/lxml-ini.lua
index 28f5040ff..f17f28a7c 100644
--- a/tex/context/base/mkiv/lxml-ini.lua
+++ b/tex/context/base/mkiv/lxml-ini.lua
@@ -21,122 +21,117 @@ local implement = interfaces.implement
-- lxml.id
-implement { name = "lxmlid", actions = lxml.getid, arguments = "string" }
-
-implement { name = "xmldoif", actions = lxml.doif, arguments = { "string", "string" } }
-implement { name = "xmldoifnot", actions = lxml.doifnot, arguments = { "string", "string" } }
-implement { name = "xmldoifelse", actions = lxml.doifelse, arguments = { "string", "string" } }
-implement { name = "xmldoiftext", actions = lxml.doiftext, arguments = { "string", "string" } }
-implement { name = "xmldoifnottext", actions = lxml.doifnottext, arguments = { "string", "string" } }
-implement { name = "xmldoifelsetext", actions = lxml.doifelsetext, arguments = { "string", "string" } }
-
-implement { name = "xmldoifempty", actions = lxml.doifempty, arguments = { "string", "string" } }
-implement { name = "xmldoifnotempty", actions = lxml.doifnotempty, arguments = { "string", "string" } }
-implement { name = "xmldoifelseempty", actions = lxml.doifelseempty, arguments = { "string", "string" } }
-implement { name = "xmldoifselfempty", actions = lxml.doifempty, arguments = "string" } -- second arg is not passed (used)
-implement { name = "xmldoifnotselfempty", actions = lxml.doifnotempty, arguments = "string" } -- second arg is not passed (used)
-implement { name = "xmldoifelseselfempty", actions = lxml.doifelseempty, arguments = "string" } -- second arg is not passed (used)
-
---------- { name = "xmlcontent", actions = lxml.content, arguments = "string" }
---------- { name = "xmlflushstripped", actions = lxml.strip, arguments = { "string", true } }
-implement { name = "xmlall", actions = lxml.all, arguments = { "string", "string" } }
-implement { name = "xmlatt", actions = lxml.att, arguments = { "string", "string" } }
-implement { name = "xmllastatt", actions = lxml.lastatt }
-implement { name = "xmlattdef", actions = lxml.att, arguments = { "string", "string", "string" } }
-implement { name = "xmlattribute", actions = lxml.attribute, arguments = { "string", "string", "string" } }
-implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
-implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
-implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
-implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
-implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
-implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
-implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
-implement { name = "xmlconcatrange", actions = lxml.concatrange, arguments = { "string", "string", "string", "string", "string" } } -- \detokenize{#5}
-implement { name = "xmlcontext", actions = lxml.context, arguments = { "string", "string" } }
-implement { name = "xmlcount", actions = lxml.count, arguments = { "string", "string" } }
-implement { name = "xmldelete", actions = lxml.delete, arguments = { "string", "string" } }
-implement { name = "xmldirect", actions = lxml.direct, arguments = "string" }
-implement { name = "xmldirectives", actions = lxml.directives.setup, arguments = "string" }
-implement { name = "xmldirectivesafter", actions = lxml.directives.after, arguments = "string" }
-implement { name = "xmldirectivesbefore", actions = lxml.directives.before, arguments = "string" }
-implement { name = "xmldisplayverbatim", actions = lxml.displayverbatim, arguments = "string" }
-implement { name = "xmlelement", actions = lxml.element, arguments = { "string", "string" } } -- could be integer but now we can alias
-implement { name = "xmlfilter", actions = lxml.filter, arguments = { "string", "string" } }
-implement { name = "xmlfilterlist", actions = lxml.filterlist, arguments = { "string", "string" } }
-implement { name = "xmlfirst", actions = lxml.first, arguments = { "string", "string" } }
-implement { name = "xmlflush", actions = lxml.flush, arguments = "string" }
-implement { name = "xmlflushcontext", actions = lxml.context, arguments = "string" }
-implement { name = "xmlflushlinewise", actions = lxml.flushlinewise, arguments = "string" }
-implement { name = "xmlflushspacewise", actions = lxml.flushspacewise, arguments = "string" }
-implement { name = "xmlfunction", actions = lxml.applyfunction, arguments = { "string", "string" } }
-implement { name = "xmlinclude", actions = lxml.include, arguments = { "string", "string", "string", true } }
-implement { name = "xmlincludeoptions", actions = lxml.include, arguments = { "string", "string", "string", "string" } }
-implement { name = "xmlinclusion", actions = lxml.inclusion, arguments = "string" }
-implement { name = "xmlinclusions", actions = lxml.inclusions, arguments = "string" }
-implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
-implement { name = "xmlindex", actions = lxml.index, arguments = { "string", "string", "string" } } -- can be integer but now we can alias
-implement { name = "xmlinfo", actions = lxml.info, arguments = "string" }
-implement { name = "xmlinlineverbatim", actions = lxml.inlineverbatim, arguments = "string" }
-implement { name = "xmllast", actions = lxml.last, arguments = { "string", "string" } }
-implement { name = "xmlload", actions = lxml.load, arguments = { "string", "string", "string" } }
-implement { name = "xmlloadbuffer", actions = lxml.loadbuffer, arguments = { "string", "string", "string" } }
-implement { name = "xmlloaddata", actions = lxml.loaddata, arguments = { "string", "string", "string" } }
-implement { name = "xmlloaddirectives", actions = lxml.directives.load, arguments = "string" }
-implement { name = "xmlloadregistered", actions = lxml.loadregistered, arguments = "string" }
-implement { name = "xmlmain", actions = lxml.main, arguments = "string" }
-implement { name = "xmlmatch", actions = lxml.match, arguments = "string" }
-implement { name = "xmlname", actions = lxml.name, arguments = "string" }
-implement { name = "xmlnamespace", actions = lxml.namespace, arguments = "string" }
-implement { name = "xmlnonspace", actions = lxml.nonspace, arguments = { "string", "string" } }
-implement { name = "xmlpos", actions = lxml.pos, arguments = "string" }
-implement { name = "xmlraw", actions = lxml.raw, arguments = { "string", "string" } }
-implement { name = "xmlregisterns", actions = xml.registerns, arguments = { "string", "string" } }
-implement { name = "xmlremapname", actions = xml.remapname, arguments = { "lxmlid", "string","string","string" } }
-implement { name = "xmlremapnamespace", actions = xml.renamespace, arguments = { "lxmlid", "string", "string" } }
-implement { name = "xmlsave", actions = lxml.save, arguments = { "string", "string" } }
---------- { name = "xmlsetfunction", actions = lxml.setaction, arguments = { "string", "string", "string" } }
-implement { name = "xmlsetsetup", actions = lxml.setsetup, arguments = { "string", "string", "string" } }
-implement { name = "xmlsnippet", actions = lxml.snippet, arguments = { "string", "string" } }
-implement { name = "xmlstrip", actions = lxml.strip, arguments = { "string", "string" } }
-implement { name = "xmlstripanywhere", actions = lxml.strip, arguments = { "string", "string", true, true } }
-implement { name = "xmlstripnolines", actions = lxml.strip, arguments = { "string", "string", true } }
-implement { name = "xmlstripped", actions = lxml.stripped, arguments = { "string", "string" } }
-implement { name = "xmlstrippednolines", actions = lxml.stripped, arguments = { "string", "string", true } }
-implement { name = "xmltag", actions = lxml.tag, arguments = "string" }
-implement { name = "xmltext", actions = lxml.text, arguments = { "string", "string" } }
-implement { name = "xmltobuffer", actions = lxml.tobuffer, arguments = { "string", "string", "string" } }
-implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true } }
-implement { name = "xmltofile", actions = lxml.tofile, arguments = { "string", "string", "string" } }
-implement { name = "xmltoparameters", actions = lxml.toparameters, arguments = "string" }
-implement { name = "xmlverbatim", actions = lxml.verbatim, arguments = "string" }
-
-implement { name = "xmlstartraw", actions = lxml.startraw }
-implement { name = "xmlstopraw", actions = lxml.stopraw }
-
-implement { name = "xmlprependsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } } -- 2:*
-implement { name = "xmlappendsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } } -- 2:*
-implement { name = "xmlbeforesetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } } -- 2:*
-implement { name = "xmlaftersetup", actions = lxml.installsetup, arguments = { 4, "string", "string", "string" } } -- 2:*
-implement { name = "xmlprependdocumentsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } }
-implement { name = "xmlappenddocumentsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } }
-implement { name = "xmlbeforedocumentsetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } }
-implement { name = "xmlafterdocumentsetup", actions = lxml.installsetup, arguments = { 4, "string", "string", "string" } }
-implement { name = "xmlremovesetup", actions = lxml.removesetup, arguments = { "string", "string" } } -- 1:*
-implement { name = "xmlremovedocumentsetup", actions = lxml.removesetup, arguments = { "string", "string" } }
-implement { name = "xmlflushdocumentsetups", actions = lxml.flushsetups, arguments = { "string", "string", "string" } } -- 2:*
-implement { name = "xmlresetdocumentsetups", actions = lxml.resetsetups, arguments = "string" }
-
-implement { name = "xmlgetindex", actions = lxml.getindex, arguments = { "string", "string" } }
-implement { name = "xmlwithindex", actions = lxml.withindex, arguments = { "string", "string", "string" } }
-
-implement { name = "xmlsetentity", actions = xml.registerentity, arguments = { "string", "string" } }
-implement { name = "xmltexentity", actions = lxml.registerentity, arguments = { "string", "string" } }
-
-implement { name = "xmlsetcommandtotext", actions = lxml.setcommandtotext, arguments = "string" }
-implement { name = "xmlsetcommandtonone", actions = lxml.setcommandtonone, arguments = "string" }
-
-implement { name = "xmlstarttiming", actions = function() statistics.starttiming(lxml) end }
-implement { name = "xmlstoptiming", actions = function() statistics.stoptiming (lxml) end }
+implement { name = "lxmlid", actions = lxml.getid, arguments = "string" }
+
+implement { name = "xmldoif", actions = lxml.doif, arguments = { "string", "string" } }
+implement { name = "xmldoifnot", actions = lxml.doifnot, arguments = { "string", "string" } }
+implement { name = "xmldoifelse", actions = lxml.doifelse, arguments = { "string", "string" } }
+implement { name = "xmldoiftext", actions = lxml.doiftext, arguments = { "string", "string" } }
+implement { name = "xmldoifnottext", actions = lxml.doifnottext, arguments = { "string", "string" } }
+implement { name = "xmldoifelsetext", actions = lxml.doifelsetext, arguments = { "string", "string" } }
+
+implement { name = "xmldoifempty", actions = lxml.doifempty, arguments = { "string", "string" } }
+implement { name = "xmldoifnotempty", actions = lxml.doifnotempty, arguments = { "string", "string" } }
+implement { name = "xmldoifelseempty", actions = lxml.doifelseempty, arguments = { "string", "string" } }
+implement { name = "xmldoifselfempty", actions = lxml.doifempty, arguments = "string" } -- second arg is not passed (used)
+implement { name = "xmldoifnotselfempty", actions = lxml.doifnotempty, arguments = "string" } -- second arg is not passed (used)
+implement { name = "xmldoifelseselfempty", actions = lxml.doifelseempty, arguments = "string" } -- second arg is not passed (used)
+
+--------- { name = "xmlcontent", actions = lxml.content, arguments = "string" }
+--------- { name = "xmlflushstripped", actions = lxml.strip, arguments = { "string", true } }
+implement { name = "xmlall", actions = lxml.all, arguments = { "string", "string" } }
+implement { name = "xmlatt", actions = lxml.att, arguments = { "string", "string" } }
+implement { name = "xmllastatt", actions = lxml.lastatt }
+implement { name = "xmlattdef", actions = lxml.att, arguments = { "string", "string", "string" } }
+implement { name = "xmlattribute", actions = lxml.attribute, arguments = { "string", "string", "string" } }
+implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
+implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
+implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
+implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
+implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
+implement { name = "xmlconcatrange", actions = lxml.concatrange, arguments = { "string", "string", "string", "string", "string" } } -- \detokenize{#5}
+implement { name = "xmlcontext", actions = lxml.context, arguments = { "string", "string" } }
+implement { name = "xmlcount", actions = lxml.count, arguments = { "string", "string" } }
+implement { name = "xmldelete", actions = lxml.delete, arguments = { "string", "string" } }
+implement { name = "xmldirect", actions = lxml.direct, arguments = "string" }
+implement { name = "xmldirectives", actions = lxml.directives.setup, arguments = "string" }
+implement { name = "xmldirectivesafter", actions = lxml.directives.after, arguments = "string" }
+implement { name = "xmldirectivesbefore", actions = lxml.directives.before, arguments = "string" }
+implement { name = "xmldisplayverbatim", actions = lxml.displayverbatim, arguments = "string" }
+implement { name = "xmlelement", actions = lxml.element, arguments = { "string", "string" } } -- could be integer but now we can alias
+implement { name = "xmlfilter", actions = lxml.filter, arguments = { "string", "string" } }
+implement { name = "xmlfilterlist", actions = lxml.filterlist, arguments = { "string", "string" } }
+implement { name = "xmlfirst", actions = lxml.first, arguments = { "string", "string" } }
+implement { name = "xmlflush", actions = lxml.flush, arguments = "string" }
+implement { name = "xmlflushcontext", actions = lxml.context, arguments = "string" }
+implement { name = "xmlflushlinewise", actions = lxml.flushlinewise, arguments = "string" }
+implement { name = "xmlflushspacewise", actions = lxml.flushspacewise, arguments = "string" }
+implement { name = "xmlfunction", actions = lxml.applyfunction, arguments = { "string", "string" } }
+implement { name = "xmlinclude", actions = lxml.include, arguments = { "string", "string", "string", true } }
+implement { name = "xmlincludeoptions", actions = lxml.include, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlinclusion", actions = lxml.inclusion, arguments = "string" }
+implement { name = "xmlinclusions", actions = lxml.inclusions, arguments = "string" }
+implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
+implement { name = "xmlindex", actions = lxml.index, arguments = { "string", "string", "string" } } -- can be integer but now we can alias
+implement { name = "xmlinlineverbatim", actions = lxml.inlineverbatim, arguments = "string" }
+implement { name = "xmllast", actions = lxml.last, arguments = { "string", "string" } }
+implement { name = "xmlload", actions = lxml.load, arguments = { "string", "string", "string" } }
+implement { name = "xmlloadbuffer", actions = lxml.loadbuffer, arguments = { "string", "string", "string" } }
+implement { name = "xmlloaddata", actions = lxml.loaddata, arguments = { "string", "string", "string" } }
+implement { name = "xmlloaddirectives", actions = lxml.directives.load, arguments = "string" }
+implement { name = "xmlmain", actions = lxml.main, arguments = "string" }
+implement { name = "xmlmatch", actions = lxml.match, arguments = "string" }
+implement { name = "xmlname", actions = lxml.name, arguments = "string" }
+implement { name = "xmlnamespace", actions = lxml.namespace, arguments = "string" }
+implement { name = "xmlnonspace", actions = lxml.nonspace, arguments = { "string", "string" } }
+implement { name = "xmlpos", actions = lxml.pos, arguments = "string" }
+implement { name = "xmlraw", actions = lxml.raw, arguments = { "string", "string" } }
+implement { name = "xmlrawtex", actions = lxml.rawtex, arguments = { "string", "string" } }
+implement { name = "xmlregisterns", actions = xml.registerns, arguments = { "string", "string" } }
+implement { name = "xmlremapname", actions = xml.remapname, arguments = { "lxmlid", "string","string","string" } }
+implement { name = "xmlremapnamespace", actions = xml.renamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlsave", actions = lxml.save, arguments = { "string", "string" } }
+implement { name = "xmlsetsetup", actions = lxml.setsetup, arguments = { "string", "string", "string" } }
+implement { name = "xmlsnippet", actions = lxml.snippet, arguments = { "string", "string" } }
+implement { name = "xmlstrip", actions = lxml.strip, arguments = { "string", "string" } }
+implement { name = "xmlstripanywhere", actions = lxml.strip, arguments = { "string", "string", true, true } }
+implement { name = "xmlstripnolines", actions = lxml.strip, arguments = { "string", "string", true } }
+implement { name = "xmlstripped", actions = lxml.stripped, arguments = { "string", "string" } }
+implement { name = "xmlstrippednolines", actions = lxml.stripped, arguments = { "string", "string", true } }
+implement { name = "xmltag", actions = lxml.tag, arguments = "string" }
+implement { name = "xmltext", actions = lxml.text, arguments = { "string", "string" } }
+implement { name = "xmltobuffer", actions = lxml.tobuffer, arguments = { "string", "string", "string" } }
+implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true } }
+implement { name = "xmltofile", actions = lxml.tofile, arguments = { "string", "string", "string" } }
+implement { name = "xmltoparameters", actions = lxml.toparameters, arguments = "string" }
+implement { name = "xmlverbatim", actions = lxml.verbatim, arguments = "string" }
+
+implement { name = "xmlstartraw", actions = lxml.startraw }
+implement { name = "xmlstopraw", actions = lxml.stopraw }
+
+implement { name = "xmlprependsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } } -- 2:*
+implement { name = "xmlappendsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } } -- 2:*
+implement { name = "xmlbeforesetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } } -- 2:*
+implement { name = "xmlaftersetup", actions = lxml.installsetup, arguments = { 4, "string", "string", "string" } } -- 2:*
+implement { name = "xmlremovesetup", actions = lxml.removesetup, arguments = { "string", "string" } } -- 1:*
+implement { name = "xmlflushsetups", actions = lxml.flushsetups, arguments = { "string", "string", "string" } } -- 2:*
+implement { name = "xmlresetsetups", actions = lxml.resetsetups, arguments = "string" }
+
+implement { name = "xmlgetindex", actions = lxml.getindex, arguments = { "string", "string" } }
+implement { name = "xmlwithindex", actions = lxml.withindex, arguments = { "string", "string", "string" } }
+
+implement { name = "xmlsetentity", actions = xml.registerentity, arguments = { "string", "string" } }
+implement { name = "xmltexentity", actions = lxml.registerentity, arguments = { "string", "string" } }
+
+implement { name = "xmlsetcommandtotext", actions = lxml.setcommandtotext, arguments = "string" }
+implement { name = "xmlsetcommandtonone", actions = lxml.setcommandtonone, arguments = "string" }
+
+implement { name = "xmlstarttiming", actions = function() statistics.starttiming(lxml) end }
+implement { name = "xmlstoptiming", actions = function() statistics.stoptiming (lxml) end }
+
+implement { name = "xmlloadentities", actions = characters.registerentities, onceonly = true }
-- kind of special (3rd argument is a function)
diff --git a/tex/context/base/mkiv/lxml-ini.mkiv b/tex/context/base/mkiv/lxml-ini.mkiv
index 8860f62fe..3d3ef8733 100644
--- a/tex/context/base/mkiv/lxml-ini.mkiv
+++ b/tex/context/base/mkiv/lxml-ini.mkiv
@@ -26,84 +26,24 @@
\registerctxluafile{lxml-dir}{1.001} % ctx hacks
\registerctxluafile{lxml-ini}{1.001} % interface
-\unprotect % todo \!!bs \!!es where handy (slower)
+\unprotect
-% todo: { } mandate
-% avoid #
+% todo: { } mandate so that we can alias
+
+% undocumented:
\def\ctxlxml #1{\ctxlua{lxml.#1}}
-%def\xmlall #1#2{\clf_xmlall {#1}{#2}}
-%def\xmlatt #1#2{\clf_xmlatt {#1}{#2}}
-%def\xmlattdef #1#2#3{\clf_xmlattdef {#1}{#2}{#3}}
-%def\xmlattribute #1#2#3{\clf_xmlattribute {#1}{#2}{#3}}
-%def\xmlattributedef #1#2#3#4{\clf_xmlattributedef {#1}{#2}{#3}{#4}}
-%def\xmlchainatt #1#2{\clf_xmlchainatt {#1}{#2}}
-%def\xmlchainattdef #1#2#3{\clf_xmlchainattdef {#1}{#2}{#3}}
-%def\xmlrefatt #1#2{\clf_xmlrefatt {#1}{#2}}
-%def\xmlchecknamespace #1#2#3{\clf_xmlchecknamespace {#1}{#2}{#3}} % element
-%def\xmlcommand #1#2#3{\clf_xmlcommand {#1}{#2}{#3}}
+% for now indirect
+
\def\xmlconcat #1#2#3{\clf_xmlconcat {#1}{#2}{\detokenize{#3}}}
\def\xmlconcatrange #1#2#3#4#5{\clf_xmlconcatrange {#1}{#2}{#3}{#4}{\detokenize{#5}}}
-%def\xmlcontext #1#2{\clf_xmlcontext {#1}{#2}}
-%def\xmlcount #1#2{\clf_xmlcount {#1}{#2}}
-%def\xmldelete #1#2{\clf_xmldelete {#1}{#2}}
-%def\xmldirect #1{\clf_xmldirect {#1}} % in loops, not dt but root
-%def\xmldirectives #1{\clf_xmldirectives {#1}}
-%def\xmldirectivesafter #1{\clf_xmldirectivesafter {#1}}
-%def\xmldirectivesbefore #1{\clf_xmldirectivesbefore {#1}}
-%def\xmldisplayverbatim #1{\clf_xmldisplayverbatim {#1}}
-%def\xmlelement #1#2{\clf_xmlelement {#1}{#2}}
-%def\xmlfilter #1#2{\clf_xmlfilter {#1}{#2}}
-%def\xmlfilterlist #1#2{\clf_xmlfilterlist {#1}{#2}}
-%def\xmlfirst #1#2{\clf_xmlfirst {#1}{#2}}
-%def\xmlflush #1{\clf_xmlflush {#1}}
-%def\xmlflushcontext #1{\clf_xmlflushcontext {#1}}
-%def\xmlflushlinewise #1{\clf_xmlflushlinewise {#1}}
-%def\xmlflushspacewise #1{\clf_xmlflushspacewise {#1}}
-%def\xmlfunction #1#2{\clf_xmlfunction {#1}{#2}}
-%def\xmlinclude #1#2#3{\clf_xmlinclude {#1}{#2}{#3}}
-%def\xmlincludeoptions#1#2#3#4{\clf_xmlincludeoptions {#1}{#2}{#3}{#4}}
-%def\xmlinclusion #1{\clf_xmlinclusion {#1}}
-%def\xmlinclusions #1{\clf_xmlinclusions {#1}}
-%def\xmlbadinclusions #1{\clf_xmlbadinclusions {#1}}
-%def\xmlindex #1#2#3{\clf_xmlindex {#1}{#2}{#3}}
-%let\xmlposition \xmlindex
-%def\xmlinlineverbatim #1{\clf_xmlinlineverbatim {#1}}
-%def\xmllast #1#2{\clf_xmllast {#1}{#2}}
\def\xmlload #1#2{\clf_xmlload {#1}{#2}{\directxmlparameter\c!compress}}
\def\xmlloadbuffer #1#2{\clf_xmlloadbuffer {#1}{#2}{\directxmlparameter\c!compress}}
\def\xmlloaddata #1#2{\clf_xmlloaddata {#1}{#2}{\directxmlparameter\c!compress}}
-%def\xmlloaddirectives #1{\clf_xmlloaddirectives {#1}}
-%def\xmlloadregistered #1{\clf_xmlloadregistered {#1}}
-%def\xmlmain #1{\clf_xmlmain {#1}}
-%def\xmlmatch #1{\clf_xmlmatch {#1}}
-%def\xmlname #1{\clf_xmlname {#1}}
-%def\xmlnamespace #1{\clf_xmlnamespace {#1}}
-%def\xmlnonspace #1#2{\clf_xmlnonspace {#1}{#2}}
-%def\xmlpos #1{\clf_xmlpos {#1}}
-%def\xmlraw #1#2{\clf_xmlraw {#1}{#2}}
-%def\xmlregisterns #1#2{\clf_xmlregisterns {#1}{#2}} % document
-%def\xmlremapname #1#2#3#4{\clf_xmlremapname {#1}{#2}{#3}{#4}} % element
-%def\xmlremapnamespace #1#2#3{\clf_xmlremapnamespace {#1}{#2}{#3}} % document
-%def\xmlsave #1#2{\clf_xmlsave {#1}{#2}}
-%def\xmlsetfunction #1#2#3{\clf_xmlsetfunction {#1}{#2}{#3}}
-%def\xmlsetsetup #1#2#3{\clf_xmlsetsetup {#1}{#2}{#3}}
-%def\xmlsnippet #1#2{\clf_xmlsnippet {#1}{#2}}
-%def\xmlstrip #1#2{\clf_xmlstrip {#1}{#2}}
-%def\xmlstripanywhere #1#2{\clf_xmlstripanywhere {#1}{#2}}
-%def\xmlstripnolines #1#2{\clf_xmlstripnolines {#1}{#2}}
-%def\xmlstripped #1#2{\clf_xmlstripped {#1}{#2}}
-%def\xmlstrippednolines #1#2{\clf_xmlstrippednolines {#1}{#2}}
-%def\xmltag #1{\clf_xmltag {#1}}
-%def\xmltext #1#2{\clf_xmltext {#1}{#2}}
-%def\xmltobuffer #1#2#3{\clf_xmltobuffer {#1}{#2}{#3}} % id pattern name
-%def\xmltobufferverbose #1#2#3{\clf_xmltobufferverbose {#1}{#2}{#3}} % id pattern name
-%def\xmltofile #1#2#3{\clf_xmltofile {#1}{#2}{#3}} % id pattern filename
-%def\xmltoparameters #1{\clf_xmltoparameters {#1}}
-%def\xmlverbatim #1{\clf_xmlverbatim {#1}}
-
-% experiment:
+
+% aliased
+
\let\xmlall \clf_xmlall
\let\xmlatt \clf_xmlatt
\let\xmllastatt \clf_xmllastatt
@@ -115,8 +55,6 @@
\let\xmlrefatt \clf_xmlrefatt
\let\xmlchecknamespace \clf_xmlchecknamespace
\let\xmlcommand \clf_xmlcommand
-% \xmlconcat
-% \xmlconcatrange
\let\xmlcontext \clf_xmlcontext
\let\xmlcount \clf_xmlcount
\let\xmldelete \clf_xmldelete
@@ -143,11 +81,7 @@
\let\xmlposition \clf_xmlindex
\let\xmlinlineverbatim \clf_xmlinlineverbatim
\let\xmllast \clf_xmllast
-% \xmlload
-% \xmlloadbuffer
-% \xmlloaddata
\let\xmlloaddirectives \clf_xmlloaddirectives
-% \xmlloadregistered
\let\xmlmain \clf_xmlmain
\let\xmlmatch \clf_xmlmatch
\let\xmlname \clf_xmlname
@@ -159,7 +93,6 @@
\let\xmlremapname \clf_xmlremapname % element
\let\xmlremapnamespace \clf_xmlremapnamespace % document
\let\xmlsave \clf_xmlsave
-%let\xmlsetfunction \clf_xmlsetfunction
\let\xmlsetsetup \clf_xmlsetsetup
\let\xmlsnippet \clf_xmlsnippet
\let\xmlstrip \clf_xmlstrip
@@ -175,8 +108,8 @@
\let\xmltoparameters \clf_xmltoparameters
\let\xmlverbatim \clf_xmlverbatim
-\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlinfo{#1}]}}
-\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
+\unexpanded\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlname{#1}]}}
+\unexpanded\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
% the next one is handy for mode runs because it enforces a consistent
% #1 indexing (needed when using \xmltext{main:123}{...} like calls
@@ -185,7 +118,7 @@
% we need to pass the last argument as function, so
-\def\xmlsetfunction#1#2#3{\ctxcommand{xmlsetfunction("#1",\!!bs#2\!!es,#3)}}
+\unexpanded\def\xmlsetfunction#1#2#3{\ctxcommand{xmlsetfunction("#1",\!!bs#2\!!es,#3)}}
% goodie:
@@ -199,12 +132,12 @@
% kind of special:
-%def\xmlstartraw{\clf_xmlstartraw}
-%def\xmlstopraw {\clf_xmlstopraw}
-
\let\xmlstartraw\clf_xmlstartraw
\let\xmlstopraw \clf_xmlstopraw
+\let\startxmlraw\clf_xmlstartraw
+\let\stopxmlraw \clf_xmlstopraw
+
% these are expandable! todo: \xmldoifelseattribute
\let\xmldoif \clf_xmldoif
@@ -231,34 +164,30 @@
%
% \xmlprependsetup{xml:include}
-\let\xmlgrab\xmlsetsetup % obsolete
-\let\xmlself\s!unknown % obsolete
+% \let\xmlgrab\xmlsetsetup % obsolete
+% \let\xmlself\s!unknown % obsolete
-%\ef\xmlsetup#1#2{\setupwithargument{#2}{#1}}
\let\xmlsetup\setupwithargumentswapped
-
-\let\xmls\setupwithargumentswapped % hardly any faster
-\let\xmlw\setupwithargument % hardly any faster
-
-\newtoks \registeredxmlsetups
+\let\xmls \setupwithargumentswapped % hardly any faster
+\let\xmlw \setupwithargument % hardly any faster
% todo: 1:xml:whatever always before 3:xml:something
-\unexpanded\def\xmlprependsetup #1{\clf_xmlprependsetup {*}{#1}}
-\unexpanded\def\xmlappendsetup #1{\clf_xmlappendsetup {*}{#1}}
-\unexpanded\def\xmlbeforesetup #1#2{\clf_xmlbeforesetup {*}{#1}{#2}}
-\unexpanded\def\xmlaftersetup #1#2{\clf_xmlaftersetup {*}{#1}{#2}}
-
-\unexpanded\def\xmlprependdocumentsetup #1#2{\clf_xmlprependdocumentsetup{#1}{#2}}
-\unexpanded\def\xmlappenddocumentsetup #1#2{\clf_xmlappenddocumentsetup {#1}{#2}}
-\unexpanded\def\xmlbeforedocumentsetup #1#2#3{\clf_xmlbeforedocumentsetup {#1}{#2}{#3}}
-\unexpanded\def\xmlafterdocumentsetup #1#2#3{\clf_xmlafterdocumentsetup {#1}{#2}{#3}}
+\unexpanded\def\xmlprependsetup #1{\clf_xmlprependsetup{*}{#1}}
+\unexpanded\def\xmlappendsetup #1{\clf_xmlappendsetup {*}{#1}}
+\unexpanded\def\xmlbeforesetup #1#2{\clf_xmlbeforesetup {*}{#1}{#2}}
+\unexpanded\def\xmlaftersetup #1#2{\clf_xmlaftersetup {*}{#1}{#2}}
+\unexpanded\def\xmlremovesetup #1{\clf_xmlremovesetup {*}{#1}}
+\unexpanded\def\xmlresetsetups {\clf_xmlresetsetups {*}}
-\unexpanded\def\xmlremovesetup #1{\clf_xmlremovesetup {*}{#1}}
-\unexpanded\def\xmlremovedocumentsetup #1#2{\clf_xmlremovedocumentsetup {#1}{#2}}
+\unexpanded\def\xmlprependdocumentsetup #1#2{\clf_xmlprependsetup{#1}{#2}}
+\unexpanded\def\xmlappenddocumentsetup #1#2{\clf_xmlappendsetup {#1}{#2}}
+\unexpanded\def\xmlbeforedocumentsetup #1#2#3{\clf_xmlbeforesetup {#1}{#2}{#3}}
+\unexpanded\def\xmlafterdocumentsetup #1#2#3{\clf_xmlaftersetup {#1}{#2}{#3}}
+\unexpanded\def\xmlremovedocumentsetup #1#2{\clf_xmlremovesetup {#1}{#2}}
+\unexpanded\def\xmlresetdocumentsetups #1{\clf_xmlresetsetups {#1}}
-\unexpanded\def\xmlflushdocumentsetups #1#2{\clf_xmlflushdocumentsetups {#1}{*}{#2}} % #1 == id where to apply * and #2
-\unexpanded\def\xmlresetdocumentsetups #1{\clf_xmlresetdocumentsetups {#1}}
+\unexpanded\def\xmlflushdocumentsetups #1#2{\clf_xmlflushsetups {#1}{*}{#2}} % #1 == id where to apply * and #2
\let\xmlregistersetup \xmlappendsetup
\let\xmlregisterdocumentsetup\xmlappenddocumentsetup
@@ -278,8 +207,8 @@
\xmldefaulttotext{#1}% after include
\xmlstoptiming}
-\unexpanded\def\xmlstarttiming{\clf_xmlstarttiming}
-\unexpanded\def\xmlstoptiming {\clf_xmlstoptiming}
+\unexpanded\def\xmlstarttiming{\clf_xmlstarttiming} % undocumented
+\unexpanded\def\xmlstoptiming {\clf_xmlstoptiming} % undocumented
\def\lxml_process#1#2#3#4#5% flag \loader id name what initializersetup
{\begingroup
@@ -296,7 +225,6 @@
\unexpanded\def\xmlprocessfile {\lxml_process\plusone \xmlload}
\unexpanded\def\xmlprocessdata {\lxml_process\zerocount\xmlloaddata}
\unexpanded\def\xmlprocessbuffer {\lxml_process\zerocount\xmlloadbuffer}
-\unexpanded\def\xmlprocessregistered{\lxml_process\zerocount\xmlloadregistered}
\let\xmlprocess \xmlprocessfile
\startxmlsetups xml:flush
@@ -312,24 +240,22 @@
{\xmlload{#1}{#2}%
\xmlregistereddocumentsetups{#1}{#3}}
-% beware: \xmlmain takes the real root, so also processing
-% instructions preceding the root element; well, in some
-% sense that is the root
-
-\unexpanded\def\xmlconnect#1#2#3% inefficient
- {\scratchcounter\xmlcount{#1}{#2}\relax
- \ifcase\scratchcounter \or
- \xmlall{#1}{#2}%
- \else
- \dorecurse \scratchcounter
- {\ifnum\recurselevel>\plusone#3\fi
- \xmlidx{#1}{#2}\recurselevel}%
- \fi}
+% replaced by concat
+%
+% \unexpanded\def\xmlconnect#1#2#3% inefficient
+% {\scratchcounter\xmlcount{#1}{#2}\relax
+% \ifcase\scratchcounter \or
+% \xmlall{#1}{#2}%
+% \else
+% \dorecurse \scratchcounter
+% {\ifnum\recurselevel>\plusone#3\fi
+% \xmlidx{#1}{#2}\recurselevel}%
+% \fi}
\unexpanded\def\xmlcdataobeyedline {\obeyedline}
\unexpanded\def\xmlcdataobeyedspace{\strut\obeyedspace}
-\unexpanded\def\xmlcdatabefore {\bgroup\tt}
-\unexpanded\def\xmlcdataafter {\egroup}
+\unexpanded\def\xmlcdatabefore {\begingroup\tt}
+\unexpanded\def\xmlcdataafter {\endgroup}
% verbatim (dodo:pre/post whitespace, maybe splot verbatim and
% cdata commands), experimental:
@@ -343,37 +269,37 @@
%
% this is experimental!
-\unexpanded\def\startxmldisplayverbatim[#1]%
- {\startpacked % \begingroup
- \edef\currenttyping{xml:#1}%
+\unexpanded\def\startxmldisplayverbatim
+ {\dosingleempty\lxml_start_display_verbatim}
+
+\def\lxml_start_display_verbatim[#1]%
+ {\startpacked
+ \edef\currenttyping{#1}%
+ \ifx\currenttyping\empty
+ \let\currenttyping\v!typing
+ \else % maybe test for existence
+ \edef\currenttyping{xml:\currenttyping}%
+ \fi
\unexpanded\def\stopxmldisplayverbatim
{\endofverbatimlines
- \stoppacked} % \endgroup
+ \stoppacked}
\doinitializeverbatim
\beginofverbatimlines}
-\unexpanded\def\startxmlinlineverbatim[#1]%
+\unexpanded\def\startxmlinlineverbatim
+ {\dosingleempty\lxml_start_inline_verbatim}
+
+\unexpanded\def\lxml_start_inline_verbatim[#1]%
{\begingroup
- \edef\currenttype{xml:#1}%
+ \edef\currenttype{#1}%
+ \ifx\currenttype\empty
+ \let\currenttype\v!type
+ \else % maybe test for existence
+ \edef\currenttype{xml:\currenttype}%
+ \fi
\let\stopxmlinlineverbatim\endgroup
\doinitializeverbatim}
-% will move but is developed for xml
-
-\newtoks \collectingtoks
-
-\unexpanded\def\startcollect#1\stopcollect
- {\collectingtoks\@EA{\the\collectingtoks#1}}
-
-\unexpanded\def\startexpandedcollect#1\stopexpandedcollect
- {\normalexpanded{\collectingtoks{\the\collectingtoks#1}}}
-
-\unexpanded\def\startcollecting{\collectingtoks\emptytoks}
-\unexpanded\def\stopcollecting {\the\collectingtoks}
-
-\def\inlinemessage #1{\dontleavehmode{\tttf#1}}
-\def\displaymessage#1{\blank\inlinemessage{#1}\blank}
-
% processing instructions
\unexpanded\def\xmlinstalldirective#1#2%
@@ -405,13 +331,13 @@
\unexpanded\def\xmldefaulttotext
{\ifcase\xmlprocessingmode
- \expandafter\gobbleoneargument % unset
+ \expandafter\gobbleoneargument % 0 (none)
\or
- \expandafter\clf_xmlsetcommandtotext % 1
+ \expandafter\clf_xmlsetcommandtotext % 1 (normal)
\or
- \expandafter\clf_xmlsetcommandtonone % 2
+ \expandafter\clf_xmlsetcommandtonone % 2 (hidden)
\else
- \expandafter\gobbleoneargument % unset
+ \expandafter\gobbleoneargument % (none)
\fi}
\appendtoks
@@ -421,11 +347,13 @@
\setupxml
[\c!default=, % flush all
\c!compress=\v!no, % strip comment
- \c!entities=\v!yes] % replace entities
+ \c!entities=\v!no] % load big entity file
+
+\appendtoks
+ \doif{\directxmlparameter\c!entities}\clf_xmlloadentities
+\to \everysetupxml
\def\xmlmapvalue #1#2#3{\setvalue{\??xmlmapvalue#1:#2}{#3}} % keep #3 to grab spaces
-%def\xmlvalue #1#2#3{\executeifdefined{\??xmlmapvalue#1:#2}{#3}}
-%def\xmlvalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\csname\??xmlmapvalue#1:#2\expandafter\expandafter\gobbleoneargument\expandafter\endcsname\else\expandafter\firstofoneargument\fi}
\def\xmldoifelsevalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
\def\xmlvalue#1#2% #3
@@ -453,14 +381,18 @@
\def\xmlwithindex #1#2{\clf_xmlwithindex{\xmldocument}{#1}{#2}}
\def\xmlreference #1#2{\string\xmlwithindex{#1}{#2}}
-%D Entities (might change):
-
-\setnewconstant\xmlautoentities\plusone % 0=off, 1=upper, 2=upper,lower
+%D Entities:
+%D
+%D \starttyping
+%D \xmlsetentity{tex}{\TEX{}} % {} needed
+%D \stoptyping
\unexpanded\def\xmlsetentity#1#2{\clf_xmlsetentity{#1}{\detokenize{#2}}}
\unexpanded\def\xmltexentity#1#2{\clf_xmltexentity{#1}{\detokenize{#2}}}
-% \xmlsetentity{tex}{\TEX{}} % {} needed
+%D The following might change (or even disappear) so we keep it undocumented.
+
+\setnewconstant\xmlautoentities\plusone % 0=off, 1=upper, 2=upper,lower
\unexpanded\def\xmle
{\ifcase\xmlautoentities
@@ -492,7 +424,24 @@
#1%
\fi\fi}
-% handy helpers (analogue to MP and LUA and TEX and also MkII)
+% \def\lxml_e_upper#1#2% can be abbreviation
+% {\ifcsname\detokenize{#2}\endcsname
+% \lastnamedcs
+% \else
+% \detokenize{#1}%
+% \fi}
+
+% \def\lxml_e_upperlower#1#2% can be anything, so unsafe
+% {\ifcsname\detokenize{#2}\endcsname
+% \expandafter\lastnamedcs
+% \else\ifcsname\detokenize{#1}\endcsname
+% \doubleexpandafter\lastnamedcs
+% \else
+% \detokenize{#1}%
+% \fi\fi}
+
+%D We keep these around as there are also MP, LUA and TEX variants but
+%D they are not the same as in \MKII.
\unexpanded\def\processXMLbuffer
{\dosingleempty\lxml_process_buffer}
@@ -538,6 +487,8 @@
\let\xmlapplyselectors\clf_xmlapplyselectors
+% \let\xmlcatcodes\notcatcodes
+
\protect \endinput
% \newcount\charactersactiveoffset \charactersactiveoffset="10000
diff --git a/tex/context/base/mkiv/lxml-lpt.lua b/tex/context/base/mkiv/lxml-lpt.lua
index 6df89be20..62250be00 100644
--- a/tex/context/base/mkiv/lxml-lpt.lua
+++ b/tex/context/base/mkiv/lxml-lpt.lua
@@ -518,27 +518,49 @@ local lp_doequal = P("=") / "=="
local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
-local lp_builtin = P (
- P("text") / "(ll.dt[1] or '')" + -- fragile
- P("content") / "ll.dt" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
- P("position") / "l" + -- is element in finalizer
- P("firstindex") / "1" +
- P("lastindex") / "(#ll.__p__.dt or 1)" +
- P("firstelement") / "1" +
- P("lastelement") / "(ll.__p__.en or 1)" +
- P("first") / "1" +
- P("last") / "#list" +
- P("rootposition") / "order" +
- P("order") / "order" +
- P("element") / "(ll.ei or 1)" +
- P("index") / "(ll.ni or 1)" +
- P("match") / "(ll.mi or 1)" +
- -- P("namespace") / "ll.ns" +
- P("ns") / "ll.ns"
- ) * ((spaces * P("(") * spaces * P(")"))/"")
+-- local lp_builtin = (
+-- P("text") / "(ll.dt[1] or '')" + -- fragile
+-- P("content") / "ll.dt" +
+-- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+-- P("tag") / "ll.tg" +
+-- P("position") / "l" + -- is element in finalizer
+-- P("firstindex") / "1" +
+-- P("lastindex") / "(#ll.__p__.dt or 1)" +
+-- P("firstelement") / "1" +
+-- P("lastelement") / "(ll.__p__.en or 1)" +
+-- P("first") / "1" +
+-- P("last") / "#list" +
+-- P("rootposition") / "order" +
+-- P("order") / "order" +
+-- P("element") / "(ll.ei or 1)" +
+-- P("index") / "(ll.ni or 1)" +
+-- P("match") / "(ll.mi or 1)" +
+-- P("namespace") / "ll.ns" +
+-- P("ns") / "ll.ns"
+-- ) * ((spaces * P("(") * spaces * P(")"))/"")
+
+local builtin = {
+ text = "(ll.dt[1] or '')", -- fragile
+ content = "ll.dt",
+ name = "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)",
+ tag = "ll.tg",
+ position = "l", -- is element in finalizer
+ firstindex = "1",
+ firstelement = "1",
+ first = "1",
+ lastindex = "(#ll.__p__.dt or 1)",
+ lastelement = "(ll.__p__.en or 1)",
+ last = "#list",
+ rootposition = "order",
+ order = "order",
+ element = "(ll.ei or 1)",
+ index = "(ll.ni or 1)",
+ match = "(ll.mi or 1)",
+ namespace = "ll.ns",
+ ns = "ll.ns",
+}
+
+local lp_builtin = lpeg.utfchartabletopattern(builtin)/builtin * ((spaces * P("(") * spaces * P(")"))/"")
-- for the moment we keep namespaces with attributes
diff --git a/tex/context/base/mkiv/lxml-tab.lua b/tex/context/base/mkiv/lxml-tab.lua
index e29058eb6..23f424995 100644
--- a/tex/context/base/mkiv/lxml-tab.lua
+++ b/tex/context/base/mkiv/lxml-tab.lua
@@ -14,7 +14,7 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
-- of work so we delay this till we cleanup
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local trace_entities = false trackers .register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
@@ -24,14 +24,6 @@ handles comment and processing instructions, has a different structure, provides
parent access; a first version used different trickery but was less optimized to we
went this route. First we had a find based parser, now we have an <l n='lpeg'/> based one.
The find based parser can be found in l-xml-edu.lua along with other older code.</p>
-
-<p>Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.</p>
-
-<p>I might even decide to reimplement the parser using the latest <l n='lpeg'/> trickery
-as the current variant was written when <l n='lpeg'/> showed up and it's easier now to
-build tables in one go.</p>
--ldx]]--
if lpeg.setmaxstack then lpeg.setmaxstack(1000) end -- deeply nested xml files
@@ -57,10 +49,9 @@ find based solution where we loop over an array of patterns. Less code and
much cleaner.</p>
--ldx]]--
-xml.xmlns = xml.xmlns or { }
+do -- begin of namespace closure (we ran out of locals)
-local check = P(false)
-local parse = check
+xml.xmlns = xml.xmlns or { }
--[[ldx--
<p>The next function associates a namespace prefix with an <l n='url'/>. This
@@ -71,6 +62,9 @@ xml.registerns("mml","mathml")
</typing>
--ldx]]--
+local check = P(false)
+local parse = check
+
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
check = check + C(P(lower(pattern))) / namespace
parse = P { P(check) + 1 * V(1) }
@@ -113,6 +107,8 @@ end
one efficiently by using the <t>xml.xmlns</t> table.</p>
--ldx]]--
+end -- end of namespace closure
+
--[[ldx--
<p>This version uses <l n='lpeg'/>. We follow the same approach as before, stack and top and
such. This version is about twice as fast which is mostly due to the fact that
@@ -158,25 +154,67 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack = { }
-local top = { }
-local dt = { }
-local at = { }
-local xmlns = { }
-local errorstr = nil
-local entities = { }
-local strip = false
-local cleanup = false
-local utfize = false
-local resolve = false
-local resolve_predefined = false
-local unify_predefined = false
-
-local dcache = { }
-local hcache = { }
-local acache = { }
-
-local mt = { }
+local stack, level, top, at, xmlnms, errorstr
+local entities, parameters
+local strip, utfize, resolve, cleanup, resolve_predefined, unify_predefined
+local dcache, hcache, acache
+local mt, dt, nt
+
+local function preparexmlstate(settings)
+ if settings then
+ stack = { }
+ level = 0
+ top = { }
+ at = { }
+ mt = { }
+ dt = { }
+ nt = 0 -- some 5% faster than #dt on cont-en.xml
+ xmlns = { }
+ errorstr = nil
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities -- enable this in order to apply the dtd
+ resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ parameters = { }
+ reported_at_errors = { }
+ dcache = { }
+ hcache = { }
+ acache = { }
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ else
+ stack = nil
+ level = nil
+ top = nil
+ at = nil
+ mt = nil
+ dt = nil
+ nt = nil
+ xmlns = nil
+ errorstr = nil
+ strip = nil
+ utfize = nil
+ resolve = nil
+ resolve_predefined = nil
+ unify_predefined = nil
+ cleanup = nil
+ entities = nil
+ parameters = nil
+ reported_at_errors = nil
+ dcache = nil
+ hcache = nil
+ acache = nil
+ end
+end
local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
@@ -190,8 +228,10 @@ function xml.checkerror(top,toclose)
return "" -- can be set
end
+local checkns = xml.checkns
+
local function add_attribute(namespace,tag,value)
- if cleanup and #value > 0 then
+ if cleanup and value ~= "" then
value = cleanup(value) -- new
end
if tag == "xmlns" then
@@ -200,7 +240,7 @@ local function add_attribute(namespace,tag,value)
elseif namespace == "" then
at[tag] = value
elseif namespace == "xmlns" then
- xml.checkns(tag,value)
+ checkns(tag,value)
at["xmlns:" .. tag] = value
else
-- for the moment this way:
@@ -209,14 +249,23 @@ local function add_attribute(namespace,tag,value)
end
local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
+ if spacing ~= "" then
+ nt = nt + 1
+ dt[nt] = spacing
end
local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = stack[#stack]
+ top = stack[level]
dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
+ nt = #dt + 1
+ local t = {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = { },
+ __p__ = top
+ }
+ dt[nt] = t
setmetatable(t, mt)
if at.xmlns then
remove(xmlns)
@@ -225,24 +274,36 @@ local function add_empty(spacing, namespace, tag)
end
local function add_begin(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
+ if spacing ~= "" then
+ nt = nt + 1
+ dt[nt] = spacing
end
local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
+ top = {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = {},
+ __p__ = stack[level]
+ }
setmetatable(top, mt)
dt = top.dt
- stack[#stack+1] = top
+ nt = #dt
+ level = level + 1
+ stack[level] = top
at = { }
end
local function add_end(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
+ if spacing ~= "" then
+ nt = nt + 1
+ dt[nt] = spacing
end
- local toclose = remove(stack)
- top = stack[#stack]
- if #stack < 1 then
+ local toclose = stack[level]
+ level = level - 1
+ top = stack[level]
+ if level < 1 then
errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
report_xml(errorstr)
elseif toclose.tg ~= tag then -- no namespace check
@@ -250,65 +311,65 @@ local function add_end(spacing, namespace, tag)
report_xml(errorstr)
end
dt = top.dt
- dt[#dt+1] = toclose
+ nt = #dt + 1
+ dt[nt] = toclose
-- dt[0] = top -- nasty circular reference when serializing table
if toclose.at.xmlns then
remove(xmlns)
end
end
--- local function add_text(text)
--- if cleanup and #text > 0 then
--- dt[#dt+1] = cleanup(text)
--- else
--- dt[#dt+1] = text
--- end
--- end
-
-local spaceonly = lpegpatterns.whitespace^0 * P(-1)
-
-local function add_text(text)
- local n = #dt
+-- local spaceonly = lpegpatterns.whitespace^0 * P(-1)
--
-- will be an option: dataonly
--
-- if #text == 0 or lpegmatch(spaceonly,text) then
-- return
-- end
---
- if cleanup and #text > 0 then
- if n > 0 then
- local s = dt[n]
+
+local function add_text(text)
+ if text == "" then
+ return
+ end
+ if cleanup then
+ if nt > 0 then
+ local s = dt[nt]
if type(s) == "string" then
- dt[n] = s .. cleanup(text)
+ dt[nt] = s .. cleanup(text)
else
- dt[n+1] = cleanup(text)
+ nt = nt + 1
+ dt[nt] = cleanup(text)
end
else
+ nt = 1
dt[1] = cleanup(text)
end
else
- if n > 0 then
- local s = dt[n]
+ if nt > 0 then
+ local s = dt[nt]
if type(s) == "string" then
- dt[n] = s .. text
+ dt[nt] = s .. text
else
- dt[n+1] = text
+ nt = nt + 1
+ dt[nt] = text
end
else
+ nt = 1
dt[1] = text
end
end
end
local function add_special(what, spacing, text)
- if #spacing > 0 then
- dt[#dt+1] = spacing
+ if spacing ~= "" then
+ nt = nt + 1
+ dt[nt] = spacing
end
if strip and (what == "@cm@" or what == "@dt@") then
-- forget it
else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
+ nt = nt + 1
+ dt[nt] = { special=true, ns="", tg=what, dt={ text } }
end
end
@@ -316,213 +377,212 @@ local function set_message(txt)
errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
end
-local reported_attribute_errors = { }
-
local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str] = true
+ reported_at_errors[str] = true
at._error_ = str
end
return str
end
local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
+ if not reported_at_errors[str] then
report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str] = true
+ reported_at_errors[str] = true
at._error_ = str
end
return str
end
-local badentity = "&error;"
-local badentity = "&"
+-- these will be set later
-xml.placeholders = {
- unknown_dec_entity = function(str) return str == "" and badentity or formatters["&%s;"](str) end,
- unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end,
- unknown_any_entity = function(str) return formatters["&#x%s;"](str) end,
-}
+local grammar_parsed_text_one
+local grammar_parsed_text_two
-local placeholders = xml.placeholders
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
-local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s), true
+-- in order to overcome lua limitations we wrap entity stuff in a
+-- closure
+
+do
+
+ local badentity = "&" -- was "&error;"
+
+ xml.placeholders = {
+ unknown_dec_entity = function(str) return str == "" and badentity or formatters["&%s;"](str) end,
+ unknown_hex_entity = function(str) return formatters["&#x%s;"](str) end,
+ unknown_any_entity = function(str) return formatters["&#x%s;"](str) end,
+ }
+
+ local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s), true
+ end
end
-end
-local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s), true
+ local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s), true
+ end
end
-end
--- one level expansion (simple case), no checking done
+ local p_rest = (1-P(";"))^0
+ local p_many = P(1)^0
+ local p_char = lpegpatterns.utf8character
-local p_rest = (1-P(";"))^0
-local p_many = P(1)^0
-local p_char = lpegpatterns.utf8character
+ local parsedentity =
+ P("&#") * (P("x")*(p_rest/fromhex) + (p_rest/fromdec)) * P(";") * P(-1) +
+ P ("#") * (P("x")*(p_many/fromhex) + (p_many/fromdec))
-local parsedentity =
- P("&") * (P("#x")*(p_rest/fromhex) + P("#")*(p_rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(p_many/fromhex) + P("#")*(p_many/fromdec))
+ xml.parsedentitylpeg = parsedentity
--- parsing in the xml file
+ -- parsing in the xml file
-local predefined_unified = {
- [38] = "&amp;",
- [42] = "&quot;",
- [47] = "&apos;",
- [74] = "&lt;",
- [76] = "&gt;",
-}
+ local predefined_unified = {
+ [38] = "&amp;",
+ [42] = "&quot;",
+ [47] = "&apos;",
+ [74] = "&lt;",
+ [76] = "&gt;",
+ }
-local predefined_simplified = {
- [38] = "&", amp = "&",
- [42] = '"', quot = '"',
- [47] = "'", apos = "'",
- [74] = "<", lt = "<",
- [76] = ">", gt = ">",
-}
+ local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+ }
-local nofprivates = 0xF0000 -- shared but seldom used
+ local nofprivates = 0xF0000 -- shared but seldom used
-local privates_u = { -- unescaped
- [ [[&]] ] = "&amp;",
- [ [["]] ] = "&quot;",
- [ [[']] ] = "&apos;",
- [ [[<]] ] = "&lt;",
- [ [[>]] ] = "&gt;",
-}
+ local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+ }
-local privates_p = {
-}
+ local privates_p = { -- needed for roundtrip as well as serialize to tex
+ }
-local privates_n = {
- -- keeps track of defined ones
-}
+ local privates_s = { -- for tex
+ [ [["]] ] = "&U+22;",
+ [ [[#]] ] = "&U+23;",
+ [ [[$]] ] = "&U+24;",
+ [ [[%]] ] = "&U+25;",
+ [ [[&]] ] = "&U+26;",
+ [ [[']] ] = "&U+27;",
+ [ [[<]] ] = "&U+3C;",
+ [ [[>]] ] = "&U+3E;",
+ [ [[\]] ] = "&U+5C;",
+ [ [[{]] ] = "&U+7B;",
+ [ [[|]] ] = "&U+7C;",
+ [ [[}]] ] = "&U+7D;",
+ [ [[~]] ] = "&U+7E;",
+ }
--- -- local escaped = utf.remapper(privates_u) -- can't be used as it freezes
--- -- local unprivatized = utf.remapper(privates_p) -- can't be used as it freezes
---
--- local p_privates_u = false
--- local p_privates_p = false
---
--- table.setmetatablenewindex(privates_u,function(t,k,v) rawset(t,k,v) p_privates_u = false end)
--- table.setmetatablenewindex(privates_p,function(t,k,v) rawset(t,k,v) p_privates_p = false end)
---
--- local function escaped(str)
--- if not str or str == "" then
--- return ""
--- else
--- if not p_privates_u then
--- p_privates_u = Cs((lpeg.utfchartabletopattern(privates_u)/privates_u + p_char)^0)
--- end
--- return lpegmatch(p_privates_u,str)
--- end
--- end
---
--- local function unprivatized(str)
--- if not str or str == "" then
--- return ""
--- else
--- if not p_privates_p then
--- p_privates_p = Cs((lpeg.utfchartabletopattern(privates_p)/privates_p + p_char)^0)
--- end
--- return lpegmatch(p_privates_p,str)
--- end
--- end
+ local privates_n = { -- keeps track of defined ones
+ }
-local escaped = utf.remapper(privates_u,"dynamic")
-local unprivatized = utf.remapper(privates_p,"dynamic")
+ local escaped = utf.remapper(privates_u,"dynamic")
+ local unprivatized = utf.remapper(privates_p,"dynamic")
+ local unspecialized = utf.remapper(privates_s,"dynamic")
+
+ xml.unprivatized = unprivatized
+ xml.unspecialized = unspecialized
+ xml.escaped = escaped
+
+ local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ privates_s[p] = s
+ end
+ return p
+ end
-xml.unprivatized = unprivatized
+ xml.privatetoken = unescaped
+ xml.privatecodes = privates_n
+ xml.specialcodes = privates_s
-local function unescaped(s)
- local p = privates_n[s]
- if not p then
- nofprivates = nofprivates + 1
- p = utfchar(nofprivates)
- privates_n[s] = p
- s = "&" .. s .. ";" -- todo: use char-ent to map to hex
- privates_u[p] = s
- privates_p[p] = s
+ function xml.addspecialcode(key,value)
+ privates_s[key] = value or "&" .. s .. ";"
end
- return p
-end
-xml.privatetoken = unescaped
-xml.privatecodes = privates_n
-
-local function handle_hex_entity(str)
- local h = hcache[str]
- if not h then
- local n = tonumber(str,16)
- h = unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity &#x%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
- end
- else
- if trace_entities then
- report_xml("found entity &#x%s;",str)
+ handle_hex_entity = function(str)
+ local h = hcache[str]
+ if not h then
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ elseif utfize then
+ h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity &#x%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity &#x%s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#x%s;",str)
+ end
+ h = "&#x" .. str .. ";"
end
- h = "&#x" .. str .. ";"
+ hcache[str] = h
end
- hcache[str] = h
+ return h
end
- return h
-end
-local function handle_dec_entity(str)
- local d = dcache[str]
- if not d then
- local n = tonumber(str)
- d = unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- elseif utfize then
- d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity &#%s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %a",str,d)
- end
- else
- if trace_entities then
- report_xml("found entity &#%s;",str)
+ handle_dec_entity = function(str)
+ local d = dcache[str]
+ if not d then
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ elseif utfize then
+ d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity &#%s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity &#%s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity &#%s;",str)
+ end
+ d = "&#" .. str .. ";"
end
- d = "&#" .. str .. ";"
+ dcache[str] = d
end
- dcache[str] = d
+ return d
end
- return d
-end
-xml.parsedentitylpeg = parsedentity
-
-local function handle_any_entity(str)
- if resolve then
- local a = acache[str] -- per instance ! todo
- if not a then
- a = resolve_predefined and predefined_simplified[str]
+ handle_any_entity_dtd = function(str)
+ if resolve then
+ local a = resolve_predefined and predefined_simplified[str] -- true by default
if a then
if trace_entities then
report_xml("resolving entity &%s; to predefined %a",str,a)
@@ -565,46 +625,185 @@ local function handle_any_entity(str)
end
end
end
- acache[str] = a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str] = a
+ return a
+ else
+ local a = acache[str]
+ if not a then
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ -- one of the predefined
+ acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a = badentity
+ acache[str] = a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
+ acache[str] = a
+ end
end
+ return a
end
- return a
- else
- local a = acache[str]
- if not a then
- a = resolve_predefined and predefined_simplified[str]
+ end
+
+ handle_any_entity_text = function(str)
+ if resolve then
+ local a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- acache[str] = a
- if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str == "" then
if trace_entities then
- report_xml("invalid entity &%s;",str)
+ report_xml("resolving entity &%s; to predefined %a",str,a)
end
- a = badentity
- acache[str] = a
else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
+ if type(resolve) == "function" then
+ a = resolve(str,entities) or entities[str]
+ else
+ a = entities[str]
+ end
+ if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(grammar_parsed_text_two,a) or a
+ if type(a) == "number" then
+ return ""
+ else
+ a = lpegmatch(parsedentity,a) or a -- for nested
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ end
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = badentity
+ else
+ a = "&" .. str .. ";"
+ end
+ end
+ end
+ end
+ return a
+ else
+ local a = acache[str]
+ if not a then
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ -- one of the predefined
+ acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a = badentity
+ acache[str] = a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
+ acache[str] = a
end
- -- a = "&" .. str .. ";"
- a = unescaped(str)
- acache[str] = a
end
+ return a
+ end
+ end
+
+ -- for tex
+
+ local p_rest = (1-P(";"))^1
+
+ local spec = {
+ [0x23] = "\\Ux{23}", -- #
+ [0x24] = "\\Ux{24}", -- $
+ [0x25] = "\\Ux{25}", -- %
+ [0x5C] = "\\Ux{5C}", -- \
+ [0x7B] = "\\Ux{7B}", -- {
+ [0x7C] = "\\Ux{7C}", -- |
+ [0x7D] = "\\Ux{7D}", -- }
+ [0x7E] = "\\Ux{7E}", -- ~
+ }
+
+ local hash = table.setmetatableindex(spec,function(t,k)
+ local v = utfchar(k)
+ t[k] = v
+ return v
+ end)
+
+ local function fromuni(s)
+ local n = tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["u:%s"](s), true
+ end
+ end
+
+ local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return hash[n]
+ else
+ return formatters["h:%s"](s), true
+ end
+ end
+
+ local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return hash[n]
+ else
+ return formatters["d:%s"](s), true
end
- return a
end
+
+ local reparsedentity =
+ P("U+") * (p_rest/fromuni)
+ + P("#") * (
+ P("x") * (p_rest/fromhex)
+ + p_rest/fromdec
+ )
+
+ xml.reparsedentitylpeg = reparsedentity
+
end
--- local function handle_end_entity(chr)
--- report_xml("error in entity, %a found instead of %a",chr,";")
--- end
+-- we use these later on
+
+local escaped = xml.escaped
+local unescaped = xml.unescaped
+local placeholders = xml.placeholders
+
+--
local function handle_end_entity(str)
report_xml("error in entity, %a found without ending %a",str,";")
@@ -641,13 +840,19 @@ local decentitycontent = R("09")^1
local parsedentity = P("#")/"" * (
P("x")/"" * (hexentitycontent/handle_hex_entity) +
(decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity)
+ ) + (anyentitycontent/handle_any_entity_dtd) -- can be Cc(true)
+local parsedentity_text= P("#")/"" * (
+ P("x")/"" * (hexentitycontent/handle_hex_entity) +
+ (decentitycontent/handle_dec_entity)
+ ) + (anyentitycontent/handle_any_entity_text) -- can be Cc(false)
----- entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-local entity = (ampersand/"") * parsedentity * (semicolon/"")
+local entity = (ampersand/"") * parsedentity * (semicolon/"")
+ + ampersand * (anyentitycontent / handle_end_entity)
+local entity_text = (ampersand/"") * parsedentity_text * (semicolon/"")
+ ampersand * (anyentitycontent / handle_end_entity)
local text_unparsed = C((1-open)^1)
-local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
+local text_parsed = (Cs((1-open-ampersand)^1)/add_text + Cs(entity_text)/add_text)^1
local somespace = space^1
local optionalspace = space^0
@@ -669,7 +874,7 @@ local attribute = (somespace * name * optionalspace * equal * optionalspa
local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-local parsedtext = text_parsed / add_text
+local parsedtext = text_parsed -- / add_text
local unparsedtext = text_unparsed / add_text
local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
@@ -690,9 +895,30 @@ local someinstruction = C((1 - endinstruction)^0)
local somecomment = C((1 - endcomment )^0)
local somecdata = C((1 - endcdata )^0)
-local function normalentity(k,v ) entities[k] = v end
-local function systementity(k,v,n) entities[k] = v end
-local function publicentity(k,v,n) entities[k] = v end
+local function weirdentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","weird",k,v)
+ end
+ parameters[k] = v
+end
+local function normalentity(k,v)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","normal",k,v)
+ end
+ entities[k] = v
+end
+local function systementity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","system",k,v)
+ end
+ entities[k] = v
+end
+local function publicentity(k,v,n)
+ if trace_entities then
+ report_xml("registering %s entity %a as %a","public",k,v)
+ end
+ entities[k] = v
+end
-- todo: separate dtd parser
@@ -700,19 +926,34 @@ local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
local endset = P("]")
+local wrdtypename = C((1-somespace-P(";"))^1)
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
-local normalentitytype = (doctypename * somespace * value)/normalentity
+local weirdentitytype = P("%") * (somespace * doctypename * somespace * value) / weirdentity
+local normalentitytype = (doctypename * somespace * value) / normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
+local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype + weirdentitytype) * optionalspace * close
+
+local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+end
+
+local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+end
+
+local entityresolve = P("%") * (wrdtypename/weirdresolve ) * P(";")
+ + P("&") * (wrdtypename/normalresolve) * P(";")
+
+entitydoctype = entitydoctype + entityresolve
-- we accept comments in doctypes
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + entityresolve + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
@@ -724,13 +965,11 @@ local comment = (spacing * begincomment * somecomment * endcomm
local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
--- local text_unparsed = C((1-open)^1)
--- local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-
local crap_parsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata - ampersand
local crap_unparsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata
-local parsedcrap = Cs((crap_parsed^1 + entity)^1) / handle_crap_error
-local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
+local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
+local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
+local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
-- nicer but slower:
--
@@ -745,8 +984,18 @@ local trailer = space^0 * (text_unparsed/set_message)^0
-- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
-- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-local grammar_parsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
+-- local grammar_parsed_text = P { "preamble",
+-- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
+-- parent = beginelement * V("children")^0 * endelement,
+-- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
+-- }
+
+grammar_parsed_text_one = P { "preamble",
+ preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0,
+}
+
+grammar_parsed_text_two = P { "followup",
+ followup = V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
}
@@ -760,40 +1009,27 @@ local grammar_unparsed_text = P { "preamble",
-- maybe we will add settings to result as well
local function _xmlconvert_(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- --
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
- resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- entities = settings.entities or { }
- --
- if utfize == nil then
- settings.utfize_entities = true
- utfize = true
- end
- if resolve_predefined == nil then
- settings.resolve_predefined_entities = true
- resolve_predefined = true
- end
- --
- stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
- acache, hcache, dcache = { }, { }, { } -- not stored
- reported_attribute_errors = { }
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ preparexmlstate(settings)
if settings.parent_root then
mt = getmetatable(settings.parent_root)
else
initialize_mt(top)
end
- stack[#stack+1] = top
+ level = level + 1
+ stack[level] = top
top.dt = { }
dt = top.dt
+ nt = 0
if not data or data == "" then
errorstr = "empty xml file"
elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
+ local m = lpegmatch(grammar_parsed_text_one,data)
+ if m then
+ m = lpegmatch(grammar_parsed_text_two,data,m)
+ end
+ -- local m = lpegmatch(grammar_parsed_text,data)
+ if m then
-- errorstr = "" can be set!
else
errorstr = "invalid xml file - parsed text"
@@ -810,8 +1046,8 @@ local function _xmlconvert_(data, settings)
local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
-setmetatable(result, mt)
-setmetatable(result.dt[1], mt)
+ setmetatable(result, mt)
+ setmetatable(result.dt[1], mt)
setmetatable(stack, mt)
local errorhandler = settings.error_handler
if errorhandler == false then
@@ -851,16 +1087,13 @@ setmetatable(result.dt[1], mt)
result.statistics = {
errormessage = errorstr,
entities = {
- decimals = dcache,
- hexadecimals = hcache,
- names = acache,
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ intermediates = parameters,
}
}
- strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
- unify_predefined, cleanup, entities = nil, nil, nil
- stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
- acache, hcache, dcache = nil, nil, nil
- reported_attribute_errors, mt, errorhandler = nil, nil, nil
+ preparexmlstate() -- resets
return result
end
@@ -965,15 +1198,37 @@ generic table copier. Since we know what we're dealing with we
can speed up things a bit. The second argument is not to be used!</p>
--ldx]]--
-local function copy(old,tables)
+-- local function copy(old,tables)
+-- if old then
+-- if not tables then
+-- tables = { }
+-- end
+-- local new = { }
+-- if not tables[old] then
+-- tables[old] = new
+-- end
+-- for k,v in next, old do
+-- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
+-- end
+-- local mt = getmetatable(old)
+-- if mt then
+-- setmetatable(new,mt)
+-- end
+-- return new
+-- else
+-- return { }
+-- end
+-- end
+
+local function copy(old)
if old then
- tables = tables or { }
local new = { }
- if not tables[old] then
- tables[old] = new
- end
for k,v in next, old do
- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
+ if type(v) == "table" then
+ new[k] = table.copy(v)
+ else
+ new[k] = v
+ end
end
local mt = getmetatable(old)
if mt then
@@ -1097,7 +1352,7 @@ local function verbose_cdata(e,handlers)
end
local function verbose_doctype(e,handlers)
- handlers.handle("<!DOCTYPE ",e.dt[1],">")
+ handlers.handle("<!DOCTYPE",e.dt[1],">") -- has space at end of string
end
local function verbose_root(e,handlers)
diff --git a/tex/context/base/mkiv/lxml-tex.lua b/tex/context/base/mkiv/lxml-tex.lua
index aad86ed6d..09f1e10f9 100644
--- a/tex/context/base/mkiv/lxml-tex.lua
+++ b/tex/context/base/mkiv/lxml-tex.lua
@@ -15,7 +15,9 @@ local concat, insert, remove, sortedkeys = table.concat, table.insert, table.rem
local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
local lpegmatch = lpeg.match
-local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
+local P, S, C, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs
+local patterns = lpeg.patterns
+local setmetatableindex = table.setmetatableindex
local tex, xml = tex, xml
local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
@@ -43,9 +45,8 @@ local xmlcollect = xml.collect
local xmltext = xml.text
local xmltostring = xml.tostring
local xmlapplylpath = xml.applylpath
-local xmlunprivatized = xml.unprivatized
+local xmlunspecialized = xml.unspecialized
local xmlprivatetoken = xml.privatetoken
-local xmlprivatecodes = xml.privatecodes
local xmlstripelement = xml.stripelement
local xmlinclusion = xml.inclusion
local xmlinclusions = xml.inclusions
@@ -78,21 +79,24 @@ local report_lxml = logs.reporter("lxml","tex")
local report_xml = logs.reporter("xml","tex")
local forceraw = false
-local forceraw = nil
+
+local p_texescape = patterns.texescape
-- tex entities
---
--- todo: unprivatize attributes
lxml.entities = lxml.entities or { }
storage.register("lxml/entities",lxml.entities,"lxml.entities")
--- xml.placeholders.unknown_any_entity = nil -- has to be per xml
+local xmlentities = xml.entities -- these are more or less standard entities
+local texentities = lxml.entities -- these are specific for a tex run
+local parsedentity = xml.reparsedentitylpeg
+
+local useelement = false -- probably no longer needed / used
-local xmlentities = xml.entities
-local texentities = lxml.entities
-local parsedentity = xml.parsedentitylpeg
+directives.register("lxml.entities.useelement",function(v)
+ useelement = v
+end)
function lxml.registerentity(key,value)
texentities[key] = value
@@ -103,6 +107,7 @@ end
function lxml.resolvedentity(str)
if forceraw then
+ -- should not happen as we then can as well bypass this function
if trace_entities then
report_xml("passing entity %a as &%s;",str,str)
end
@@ -151,12 +156,19 @@ function lxml.resolvedentity(str)
report_xml("passing faulty entity %a as %a",str,err)
end
context(err)
- else
+ elseif useelement then
local tag = upperchars(str)
if trace_entities then
report_xml("passing entity %a to \\xmle using tag %a",str,tag)
end
- context.xmle(str,tag) -- we need to use our own upper
+ contextsprint(texcatcodes,"\\xmle{")
+ contextsprint(notcatcodes,e)
+ contextsprint(texcatcodes,"}")
+ else
+ if trace_entities then
+ report_xml("passing entity %a as %a using %a",str,str,"notcatcodes")
+ end
+ contextsprint(notcatcodes,str)
end
end
end
@@ -181,11 +193,10 @@ local texfinalizers = finalizers.tex
-- serialization with entity handling
-local exceptions = false
-
local ampersand = P("&")
local semicolon = P(";")
-local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold
+
+local entity = (ampersand * C((1-semicolon)^1) * semicolon) / lxml.resolvedentity -- context.bold
local _, xmltextcapture_yes = context.newtexthandler {
catcodes = notcatcodes,
@@ -237,7 +248,6 @@ local xmltextcapture = xmltextcapture_yes
local xmlspacecapture = xmlspacecapture_yes
local xmllinecapture = xmllinecapture_yes
local ctxtextcapture = ctxtextcapture_yes
-local prefertexentities = true
directives.register("lxml.entities.escaped",function(v)
if v then
@@ -253,10 +263,6 @@ directives.register("lxml.entities.escaped",function(v)
end
end)
-directives.register("lxml.entities.prefertex",function(v)
- prefertex = v
-end)
-
-- cdata
local toverbatim = context.newverbosehandler {
@@ -468,24 +474,35 @@ function xml.load(filename,settings)
return xmltable
end
--- local function entityconverter(id,str,ent) -- todo ent optional
--- return xmlentities[str] or ent[str] or xmlprivatetoken(str) or "" -- roundtrip handler
--- end
-
local function entityconverter(id,str,ent) -- todo: disable tex entities when raw
- if prefertexentities then
- return xmlentities[str] or (texentities[str] and xmlprivatetoken(str)) or ent[str] or xmlprivatetoken(str) or "" -- roundtrip handler
- else
- return xmlentities[str] or ent[str] or (texentities[str] and xmlprivatetoken(str)) or xmlprivatetoken(str) or "" -- roundtrip handler
+ -- tex driven entity
+ local t = texentities[str]
+ if t then
+ local p = xmlprivatetoken(str)
+-- only once
+-- context.xmlprivate(p,t)
+ return p
+ end
+ -- dtd determined entity
+ local e = ent and ent[str]
+ if e then
+ return e
+ end
+ -- predefined entity (mathml and so)
+ local x = xmlentities[str]
+ if x then
+ return x
end
+ -- keep original somehow
+ return xmlprivatetoken(str)
end
local function lxmlconvert(id,data,compress,currentresource)
local settings = { -- we're now roundtrip anyway
- unify_predefined_entities = true,
- utfize_entities = true,
- resolve_predefined_entities = true,
- resolve_entities = function(str,ent) return entityconverter(id,str,ent) end, -- needed for mathml
+ unify_predefined_entities = false, -- is also default
+ utfize_entities = true, -- is also default
+ resolve_predefined_entities = true, -- is also default
+ resolve_entities = function(str,ent) return entityconverter(id,str,ent) end,
currentresource = tostring(currentresource or id),
}
if compress and compress == variables.yes then
@@ -619,10 +636,6 @@ function lxml.loaddata(id,str,compress)
return xmltable, id
end
-function lxml.loadregistered(id)
- return loaded[id], id
-end
-
-- e.command:
--
-- string : setup
@@ -642,6 +655,21 @@ end
local default_element_handler = xml.gethandlers("verbose").functions["@el@"]
+-- local xmlw = setmetatableindex(function(t,k)
+-- local v = setmetatableindex(function(t,kk)
+-- local v
+-- if kk == false then
+-- v = "\\xmlw{" .. k .. "}{"
+-- else
+-- v = "\\xmlw{" .. k .. "}{" .. kk .. "::"
+-- end
+-- t[kk] = v
+-- return v
+-- end)
+-- t[k]= v
+-- return v
+-- end)
+
local function tex_element(e,handlers)
local command = e.command
if command == nil then
@@ -662,9 +690,11 @@ local function tex_element(e,handlers)
end
-- faster than context.xmlw
contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
+ -- contextsprint(ctxcatcodes,xmlw[command][rootname],ix,"}")
else
report_lxml("fatal error: no index for %a",command)
contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
+ -- contextsprint(ctxcatcodes,xmlw[command][false],ix or 0,"}")
end
elseif tc == "function" then
command(e)
@@ -734,11 +764,16 @@ local function tex_cdata(e,handlers)
end
end
+-- we could try to merge the conversion and flusher but we don't gain much and it makes tracing
+-- harder: xunspecialized = utf.remapper(xml.specialcodes,"dynamic",lxml.resolvedentity)
+
local function tex_text(e)
- e = xmlunprivatized(e)
+ e = xmlunspecialized(e)
lpegmatch(xmltextcapture,e)
end
+--
+
local function ctx_text(e) -- can be just context(e) as we split there
lpegmatch(ctxtextcapture,e)
end
@@ -767,7 +802,7 @@ lxml.xmltexhandler = xmltexhandler
-- begin of test
local function tex_space(e)
- e = xmlunprivatized(e)
+ e = xmlunspecialized(e)
lpegmatch(xmlspacecapture,e)
end
@@ -785,7 +820,7 @@ local xmltexspacehandler = xml.newhandlers {
}
local function tex_line(e)
- e = xmlunprivatized(e)
+ e = xmlunspecialized(e)
lpegmatch(xmllinecapture,e)
end
@@ -841,13 +876,13 @@ local function sprint(root) -- check rawroot usage
local tr = type(root)
if tr == "string" then -- can also be result of lpath
-- rawroot = false -- ?
- root = xmlunprivatized(root)
+ root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
elseif tr == "table" then
if forceraw then
rawroot = root
-- contextsprint(ctxcatcodes,xmltostring(root)) -- goes wrong with % etc
- root = xmlunprivatized(xmltostring(root))
+ root = xmlunspecialized(xmltostring(root))
lpegmatch(xmltextcapture,root) -- goes to toc
else
xmlserialize(root,xmltexhandler)
@@ -868,7 +903,7 @@ local function tprint(root) -- we can move sprint inline
end
end
elseif tr == "string" then
- root = xmlunprivatized(root)
+ root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
end
end
@@ -879,14 +914,14 @@ local function cprint(root) -- content
-- quit
elseif type(root) == 'string' then
-- rawroot = false
- root = xmlunprivatized(root)
+ root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
else
local rootdt = root.dt
if forceraw then
rawroot = root
-- contextsprint(ctxcatcodes,xmltostring(rootdt or root))
- root = xmlunprivatized(xmltostring(root))
+ root = xmlunspecialized(xmltostring(root))
lpegmatch(xmltextcapture,root) -- goes to toc
else
xmlserialize(rootdt or root,xmltexhandler)
@@ -1261,6 +1296,9 @@ local function index(collected,n)
contextsprint(ctxcatcodes,0) -- why ctxcatcodes
end
+-- the number of commands is often relative small but there can be many calls
+-- to this finalizer
+
local function command(collected,cmd,otherwise)
local n = collected and #collected
if n and n > 0 then
@@ -1286,6 +1324,45 @@ local function command(collected,cmd,otherwise)
end
end
+-- local wildcards = setmetatableindex(function(t,k)
+-- local v = false
+-- if find(k,"%*") then
+-- v = setmetatableindex(function(t,kk)
+-- local v = gsub(k,"%*",kk)
+-- t[k] = v
+-- -- report_lxml("wildcard %a key %a value %a",kk,k,v)
+-- return v
+-- end)
+-- end
+-- t[k] = v
+-- return v
+-- end)
+--
+-- local function command(collected,cmd,otherwise)
+-- local n = collected and #collected
+-- if n and n > 0 then
+-- local wildcard = wildcards[cmd]
+-- for c=1,n do -- maybe optimize for n=1
+-- local e = collected[c]
+-- local ix = e.ix
+-- local name = e.name
+-- if name and not ix then
+-- addindex(name,false,true)
+-- ix = e.ix
+-- end
+-- if not ix or not name then
+-- report_lxml("no valid node index for element %a using command %s",name or "?",cmd)
+-- elseif wildcard then
+-- contextsprint(ctxcatcodes,"\\xmlw{",wildcard[e.tg],"}{",name,"::",ix,"}")
+-- else
+-- contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}")
+-- end
+-- end
+-- elseif otherwise then
+-- contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
+-- end
+-- end
+
local function attribute(collected,a,default)
if collected and #collected > 0 then
local at = collected[1].at
@@ -1557,7 +1634,22 @@ end
function lxml.raw(id,pattern) -- the content, untouched by commands
local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
if collected and #collected > 0 then
- contextsprint(notcatcodes,xmltostring(collected[1].dt))
+ local s = xmltostring(collected[1].dt)
+ if s ~= "" then
+ contextsprint(notcatcodes,s)
+ end
+ end
+end
+
+-- templates
+
+function lxml.rawtex(id,pattern) -- the content, untouched by commands
+ local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
+ if collected and #collected > 0 then
+ local s = xmltostring(collected[1].dt)
+ if s ~= "" then
+ contextsprint(notcatcodes,lpegmatch(p_texescape,s) or s)
+ end
end
end
@@ -1681,6 +1773,8 @@ do
elseif default and default ~= "" then
att = default
contextsprint(notcatcodes,default)
+ else
+ att = ""
end
end
@@ -1690,14 +1784,16 @@ do
local at = e.at
if at then
att = at[a]
- if str and str ~= "" then
- str = gsub(str,"^#+","")
- if str ~= "" then
- contextsprint(notcatcodes,str)
+ if att and att ~= "" then
+ att = gsub(att,"^#+","")
+ if att ~= "" then
+ contextsprint(notcatcodes,att)
+ return
end
end
end
end
+ att = ""
end
function lxml.lastatt()
@@ -1706,7 +1802,7 @@ do
end
-function lxml.name(id) -- or remapped name? -> lxml.info, combine
+function lxml.name(id)
local e = getid(id)
if e then
local ns = e.rn or e.ns
@@ -1718,7 +1814,7 @@ function lxml.name(id) -- or remapped name? -> lxml.info, combine
end
end
-function lxml.match(id) -- or remapped name? -> lxml.info, combine
+function lxml.match(id)
local e = getid(id)
contextsprint(ctxcatcodes,e and e.mi or 0)
end
@@ -1733,7 +1829,7 @@ function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
end
end
-function lxml.namespace(id) -- or remapped name?
+function lxml.namespace(id)
local e = getid(id)
if e then
local ns = e.rn or e.ns
@@ -1803,10 +1899,6 @@ function lxml.elements(id,pattern,reverse)
return xmlelements(getid(id),pattern,reverse)
end
--- obscure ones
-
-lxml.info = lxml.name
-
-- testers
local found, empty = xml.found, xml.empty
@@ -2014,7 +2106,7 @@ xml.pihandlers["injector"] = function(category,rest,e)
end
end
-local pattern = P("context-") * C((1-lpeg.patterns.whitespace)^1) * C(P(1)^1)
+local pattern = P("context-") * C((1-patterns.whitespace)^1) * C(P(1)^1)
function lxml.applyselectors(id)
local root = getid(id)
diff --git a/tex/context/base/mkiv/mlib-lua.lua b/tex/context/base/mkiv/mlib-lua.lua
index e7f8f9cc5..baf9346c4 100644
--- a/tex/context/base/mkiv/mlib-lua.lua
+++ b/tex/context/base/mkiv/mlib-lua.lua
@@ -296,16 +296,14 @@ function metapost.runscript(code)
if result then
local t = type(result)
if t == "number" then
- t = f_numeric(result)
- elseif t == "string" then
- t = result
- else
- t = tostring(result)
+ result = f_numeric(result)
+ elseif t ~= "string" then
+ result = tostring(result)
end
if trace then
- report_luarun("result: %s",code)
+ report_luarun("result: %s",result)
end
- return t
+ return result
elseif trace then
report_luarun("no result")
end
diff --git a/tex/context/base/mkiv/mult-dim.mkvi b/tex/context/base/mkiv/mult-dim.mkvi
index 6e2b22038..0224e3dbb 100644
--- a/tex/context/base/mkiv/mult-dim.mkvi
+++ b/tex/context/base/mkiv/mult-dim.mkvi
@@ -145,12 +145,16 @@
\def\assign_width_direct#value#dimension#content#extra{#dimension=#value\relax}
+% line is like fit but can be used later as signal for ...
+
\setvalue{\??dimensionwidth }#value#dimension#content#extra{\setbox\b_assign_width\hbox{#content}#dimension\wd\b_assign_width
\setbox\b_assign_width\emptybox}
\setvalue{\??dimensionwidth\v!fit }#value#dimension#content#extra{\setbox\b_assign_width\hbox{#content}#dimension\wd\b_assign_width
\setbox\b_assign_width\emptybox}
\setvalue{\??dimensionwidth\v!broad }#value#dimension#content#extra{\setbox\b_assign_width\hbox{#content}#dimension\dimexpr\wd\b_assign_width+#extra\relax
\setbox\b_assign_width\emptybox}
+\setvalue{\??dimensionwidth\v!line }#value#dimension#content#extra{\setbox\b_assign_width\hbox{#content}#dimension\wd\b_assign_width
+ \setbox\b_assign_width\emptybox}
\letvalue{\??dimensionwidth\s!unknown}\assign_width_direct
% \unexpanded\def\assignwidth#value%
diff --git a/tex/context/base/mkiv/mult-ini.lua b/tex/context/base/mkiv/mult-ini.lua
index c51c4c3d4..7763994d6 100644
--- a/tex/context/base/mkiv/mult-ini.lua
+++ b/tex/context/base/mkiv/mult-ini.lua
@@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['mult-ini'] = {
local format, gmatch, match = string.format, string.gmatch, string.match
local lpegmatch = lpeg.match
local serialize, concat = table.serialize, table.concat
+local rawget, type = rawget, type
local context = context
local commands = commands
@@ -156,7 +157,7 @@ function interfaces.getmessage(category,tag,default)
end
function interfaces.doifelsemessage(category,tag)
- return formats[fulltag(category,tag)]
+ return rawget(formats,fulltag(category,tag))
end
local splitter = lpeg.splitat(",")
diff --git a/tex/context/base/mkiv/mult-ini.mkiv b/tex/context/base/mkiv/mult-ini.mkiv
index 57403ea4f..9d7db394c 100644
--- a/tex/context/base/mkiv/mult-ini.mkiv
+++ b/tex/context/base/mkiv/mult-ini.mkiv
@@ -389,6 +389,9 @@
\let\doifmessageelse\doifelsemessage
+\unexpanded\def\inlinemessage #1{\dontleavehmode{\tttf#1}}
+\unexpanded\def\displaymessage#1{\blank\inlinemessage{#1}\blank}
+
%D \macros
%D {ifshowwarnings, ifshowmessages}
%D
@@ -840,7 +843,7 @@
\def\contextversionnumber{0}
\else
%\def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi#2\ifnum#3<10 0\fi#3 #4:#5}
- \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi\purenumber{#2}\ifnum#3<10 0\fi\purenumber{#3} #4:#5}
+ \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi\purenumber{#2}\ifnum#3<10 0\fi\purenumber{#3} #4:#5}
\edef\contextversionnumber{\expandafter\contextversionnumber\contextversion\relax\space\contextmark}
\fi
diff --git a/tex/context/base/mkiv/mult-low.lua b/tex/context/base/mkiv/mult-low.lua
index 9b363d2d6..3ad62e2f0 100644
--- a/tex/context/base/mkiv/mult-low.lua
+++ b/tex/context/base/mkiv/mult-low.lua
@@ -131,6 +131,8 @@ return {
"figurespace", "punctuationspace", "hairspace",
"zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
"optionalspace", "asciispacechar",
+ --
+ "Ux",
},
["helpers"] = {
--
@@ -409,7 +411,7 @@ return {
--
"Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
--
- "nobreak", "allowbreak", "goodbreak",
+ "break", "nobreak", "allowbreak", "goodbreak",
--
"nospace", "nospacing", "dospacing",
--
diff --git a/tex/context/base/mkiv/mult-prm.lua b/tex/context/base/mkiv/mult-prm.lua
index 4363e8d5e..4c222548a 100644
--- a/tex/context/base/mkiv/mult-prm.lua
+++ b/tex/context/base/mkiv/mult-prm.lua
@@ -415,7 +415,9 @@ return {
"pdfimageresolution",
"pdfincludechars",
"pdfinclusioncopyfonts",
+ "pdfignoreunknownimages",
"pdfinclusionerrorlevel",
+ "pdfignoreunknownimages",
"pdfinfo",
"pdfinsertht",
"pdflastannot",
@@ -448,6 +450,7 @@ return {
"pdfpagewidth",
"pdfpkmode",
"pdfpkresolution",
+ "pdfpkfixeddpi",
"pdfprimitive",
"pdfprotrudechars",
"pdfpxdimen",
@@ -1034,6 +1037,7 @@ return {
"pdfincludechars",
"pdfinclusioncopyfonts",
"pdfinclusionerrorlevel",
+ "pdfignoreunknownimages",
"pdfinfo",
"pdfinsertht",
"pdflastannot",
@@ -1066,6 +1070,7 @@ return {
"pdfpagewidth",
"pdfpkmode",
"pdfpkresolution",
+ "pdfpkfixeddpi",
"pdfprimitive",
"pdfprotrudechars",
"pdfpxdimen",
diff --git a/tex/context/base/mkiv/node-acc.lua b/tex/context/base/mkiv/node-acc.lua
index 407e808cd..31359d936 100644
--- a/tex/context/base/mkiv/node-acc.lua
+++ b/tex/context/base/mkiv/node-acc.lua
@@ -72,6 +72,7 @@ local function injectspaces(head)
setchar(g,32)
setlink(p,g)
setlink(g,n)
+-- we could cache as we now create many nodes
setfield(n,"width",getfield(n,"width") - getfield(g,"width"))
if a then
setattr(g,a_characters,a)
diff --git a/tex/context/base/mkiv/pack-rul.mkiv b/tex/context/base/mkiv/pack-rul.mkiv
index 5fa938bda..ef59209e0 100644
--- a/tex/context/base/mkiv/pack-rul.mkiv
+++ b/tex/context/base/mkiv/pack-rul.mkiv
@@ -1114,6 +1114,10 @@
\def\c!fr!analyze{fr:analyze} % private option
+\let\delayedbegstrut\relax
+\let\delayedendstrut\relax
+\let\delayedstrut \relax
+
\unexpanded\def\pack_framed_process_indeed
{\d_framed_frameoffset\framedparameter\c!frameoffset
\edef\p_framed_backgroundoffset{\framedparameter\c!backgroundoffset}%
diff --git a/tex/context/base/mkiv/sort-ini.lua b/tex/context/base/mkiv/sort-ini.lua
index b21308657..3266425cb 100644
--- a/tex/context/base/mkiv/sort-ini.lua
+++ b/tex/context/base/mkiv/sort-ini.lua
@@ -509,10 +509,11 @@ end
local pattern = nil
-local function prepare()
+local function prepare() -- todo: test \Ux{hex}
pattern = Cs( (
characters.tex.toutfpattern()
+ lpeg.patterns.whitespace / "\000"
+ + (P("\\Ux{") / "" * ((1-P("}"))^1/function(s) return utfchar(tonumber(s,16)) end) * (P("}")/""))
+ (P("\\") / "") * R("AZ")^0 * (P(-1) + #(1-R("AZ")))
+ (P("\\") * P(1) * R("az","AZ")^0) / ""
+ S("[](){}$\"'") / ""
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index 59a62ce24..392bf9a1f 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 9064b3f05..f7b9aa479 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/strc-con.mkvi b/tex/context/base/mkiv/strc-con.mkvi
index 792f3b156..be6e9d359 100644
--- a/tex/context/base/mkiv/strc-con.mkvi
+++ b/tex/context/base/mkiv/strc-con.mkvi
@@ -301,7 +301,9 @@
\ifdim\constructionsheaddistance=\zeropoint
\ifx\p_strc_constructions_width\v!broad
\constructionsheaddistance\emwidth
- \fi
+ \else\ifx\p_strc_constructions_width\v!line
+ \constructionsheaddistance\emwidth
+ \fi\fi
\fi
\fi
% inefficient and not always needed, for instance not with margins so we will make checkers
@@ -752,6 +754,7 @@
\stopsetups
\startsetups[\??constructionrenderings:\v!serried]
+ % already set?
\edef\p_strc_constructions_width{\constructionparameter\c!width}% CHECK ! ! ! wrong parameter namespace
\doifelsesetups{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
\directsetup{\??constructionrenderings:\v!serried:\p_strc_constructions_width}
@@ -776,7 +779,7 @@
\flushconstructionheadbox
\ifconditional\c_strc_constructions_distance_none \else
\nobreak
- \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
+ \hskip\constructionsheaddistance \s!plus .5\constructionsheaddistance \s!minus .25\constructionsheaddistance\relax
\fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -824,7 +827,13 @@
\s!plus .25\scratchdistance
\s!minus.25\scratchdistance
\fi
- \allowbreak % new
+ \ifhmode
+ \ifx\p_strc_constructions_width\v!line
+ \hfill\break
+ \else
+ \allowbreak
+ \fi
+ \fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
diff --git a/tex/context/base/mkiv/strc-ini.lua b/tex/context/base/mkiv/strc-ini.lua
index 7a6153096..dffbe3fcd 100644
--- a/tex/context/base/mkiv/strc-ini.lua
+++ b/tex/context/base/mkiv/strc-ini.lua
@@ -233,6 +233,13 @@ local tags = {
--
-- local command = formatters["\\xmlprocessbuffer{%s}{%s}{}"](metadata.xmlroot or "main",tag)
+local overload_catcodes = true
+
+directives.register("typesetters.processors.overloadcatcodes",function(v)
+ -- number | true | false | string
+ overload_catcodes = v
+end)
+
local experiment = true
function helpers.title(title,metadata) -- coding is xml is rather old and not that much needed now
@@ -265,12 +272,7 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
ctx_xmlsetup(title,metadata.xmlsetup)
else
local catcodes = metadata.catcodes
- if catcodes == notcatcodes or catcodes == xmlcatcodes then
- if trace_processors then
- report_processors("catcodetable %a, overloads %a, text %a",ctxcatcodes,catcodes,title)
- end
- context(title) -- nasty
- else
+ if overload_catcodes == false then
if trace_processors then
report_processors("catcodetable %a, text %a",catcodes,title)
end
@@ -283,10 +285,30 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
ctx_pushcatcodes(catcodes)
context(title)
ctx_popcatcodes()
+ elseif overload_catcodes == true then
+ if catcodes == notcatcodes or catcodes == xmlcatcodes then
+ -- when was this needed
+ if trace_processors then
+ report_processors("catcodetable %a, overloads %a, text %a",ctxcatcodes,catcodes,title)
+ end
+ context(title)
+ else
+ ctx_pushcatcodes(catcodes)
+ context(title)
+ ctx_popcatcodes()
+ end
+ else
+ if trace_processors then
+ report_processors("catcodetable %a, overloads %a, text %a",catcodes,overload_catcodes,title)
+ end
+ ctx_pushcatcodes(overload_catcodes)
+ context(title)
+ ctx_popcatcodes()
end
end
else
- context(title) -- no catcode switch, was: texsprint(title)
+ -- no catcode switch, was: texsprint(title)
+ context(title)
end
end
end
diff --git a/tex/context/base/mkiv/syst-aux.mkiv b/tex/context/base/mkiv/syst-aux.mkiv
index 5b7059ea9..726b3d644 100644
--- a/tex/context/base/mkiv/syst-aux.mkiv
+++ b/tex/context/base/mkiv/syst-aux.mkiv
@@ -4904,121 +4904,79 @@
% no longer \def but \let to target toks .. the space gobbling \relax will go
-\unexpanded\def\appendtoks {\syst_helpers_append_toks \relax}
-\unexpanded\def\prependtoks {\syst_helpers_prepend_toks \relax}
-\unexpanded\def\appendtoksonce {\syst_helpers_append_toks_once \relax}
-\unexpanded\def\prependtoksonce{\syst_helpers_prepend_toks_once\relax}
+% \unexpanded\def\appendtoks {\syst_helpers_append_toks \relax}
+% \unexpanded\def\prependtoks {\syst_helpers_prepend_toks \relax}
+% \unexpanded\def\appendtoksonce {\syst_helpers_append_toks_once \relax}
+% \unexpanded\def\prependtoksonce{\syst_helpers_prepend_toks_once\relax}
+%
+% \def\syst_helpers_append_toks_indeed
+% {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\m_syst_helpers_scratch\the\t_syst_helpers_scratch}}
+%
+% \def\syst_helpers_prepend_toks_indeed
+% {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\t_syst_helpers_scratch\the\m_syst_helpers_scratch}}
+%
+% \def\syst_helpers_append_toks#1\to#2%
+% {\let\m_syst_helpers_scratch#2%
+% \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+% \syst_helpers_append_toks_indeed}
+%
+% \def\syst_helpers_prepend_toks#1\to#2%
+% {\let\m_syst_helpers_scratch#2%
+% \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+% \syst_helpers_prepend_toks_indeed}
+%
+% \def\syst_helpers_append_toks_once#1\to#2%
+% {\let\m_syst_helpers_scratch#2%
+% \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+% \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
+% \donothing
+% \syst_helpers_append_toks_indeed}
+%
+% \def\syst_helpers_prepend_toks_once#1\to#2%
+% {\let\m_syst_helpers_scratch#2%
+% \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+% \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
+% \donothing
+% \syst_helpers_prepend_toks_indeed}
+
+\unexpanded\def\appendtoks#1\to#2%
+ {\toksapp#2{#1}%
+ \ifx\dodoglobal\relax\else
+ \global#2#2%
+ \fi}
+
+\unexpanded\def\prependtoks#1\to#2%
+ {\tokspre#2{#1}%
+ \ifx\dodoglobal\relax\else
+ \global#2#2%
+ \fi}
\def\syst_helpers_append_toks_indeed
- {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\m_syst_helpers_scratch\the\t_syst_helpers_scratch}}
+ {\toksapp\m_syst_helpers_scratch\t_syst_helpers_scratch
+ \ifx\dodoglobal\relax\else
+ \global\m_syst_helpers_scratch\m_syst_helpers_scratch
+ \fi}
\def\syst_helpers_prepend_toks_indeed
- {\dodoglobal\m_syst_helpers_scratch\doubleexpandafter{\expandafter\the\expandafter\t_syst_helpers_scratch\the\m_syst_helpers_scratch}}
-
-\def\syst_helpers_append_toks#1\to#2%
- {\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \syst_helpers_append_toks_indeed}
-
-\def\syst_helpers_prepend_toks#1\to#2%
- {\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \syst_helpers_prepend_toks_indeed}
+ {\tokspre\m_syst_helpers_scratch\t_syst_helpers_scratch
+ \ifx\dodoglobal\relax\else
+ \global\m_syst_helpers_scratch\m_syst_helpers_scratch
+ \fi}
-\def\syst_helpers_append_toks_once#1\to#2%
+\unexpanded\def\appendtoksonce#1\to#2%
{\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+ \t_syst_helpers_scratch{#1}%
\doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
\donothing
\syst_helpers_append_toks_indeed}
-\def\syst_helpers_prepend_toks_once#1\to#2%
+\unexpanded\def\prependtoksonce#1\to#2%
{\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
+ \t_syst_helpers_scratch{#1}%
\doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
\donothing
\syst_helpers_prepend_toks_indeed}
-\ifdefined\toksapp
-
- % \def\syst_helpers_append_toks#1\to#2%
- % {\toksapp#2\expandafter{\gobbleoneargument#1}%
- % \ifx\dodoglobal\relax\else
- % \global#2#2%
- % \fi}
- %
- % \def\syst_helpers_prepend_toks#1\to#2%
- % {\tokspre#2\expandafter{\gobbleoneargument#1}%
- % \ifx\dodoglobal\relax\else
- % \global#2#2%
- % \fi}
- %
- % \def\syst_helpers_append_toks_indeed
- % {\toksapp\m_syst_helpers_scratch\t_syst_helpers_scratch
- % \ifx\dodoglobal\relax\else
- % \global\m_syst_helpers_scratch\m_syst_helpers_scratch
- % \fi}
- %
- % \def\syst_helpers_prepend_toks_indeed
- % {\tokspre\m_syst_helpers_scratch\t_syst_helpers_scratch
- % \ifx\dodoglobal\relax\else
- % \global\m_syst_helpers_scratch\m_syst_helpers_scratch
- % \fi}
- %
- % \def\syst_helpers_append_toks_once#1\to#2%
- % {\let\m_syst_helpers_scratch#2%
- % \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- % \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
- % \donothing
- % \syst_helpers_append_toks_indeed}
- %
- % \def\syst_helpers_prepend_toks_once#1\to#2%
- % {\let\m_syst_helpers_scratch#2%
- % \t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- % \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
- % \donothing
- % \syst_helpers_prepend_toks_indeed}
-
- \unexpanded\def\appendtoks#1\to#2%
- {\toksapp#2{#1}%
- \ifx\dodoglobal\relax\else
- \global#2#2%
- \fi}
-
- \unexpanded\def\prependtoks#1\to#2%
- {\tokspre#2{#1}%
- \ifx\dodoglobal\relax\else
- \global#2#2%
- \fi}
-
- \def\syst_helpers_append_toks_indeed
- {\toksapp\m_syst_helpers_scratch\t_syst_helpers_scratch
- \ifx\dodoglobal\relax\else
- \global\m_syst_helpers_scratch\m_syst_helpers_scratch
- \fi}
-
- \def\syst_helpers_prepend_toks_indeed
- {\tokspre\m_syst_helpers_scratch\t_syst_helpers_scratch
- \ifx\dodoglobal\relax\else
- \global\m_syst_helpers_scratch\m_syst_helpers_scratch
- \fi}
-
- \unexpanded\def\appendtoksonce#1\to#2%
- {\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch{#1}%
- \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
- \donothing
- \syst_helpers_append_toks_indeed}
-
- \unexpanded\def\prependtoksonce#1\to#2%
- {\let\m_syst_helpers_scratch#2%
- \t_syst_helpers_scratch{#1}%
- \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch
- \donothing
- \syst_helpers_prepend_toks_indeed}
-
-\fi
-
%D The test macro:
\unexpanded\def\doifelseintoks#1#2% #1 en #2 zijn toks
@@ -5028,6 +4986,36 @@
\let\doifintokselse\doifelseintoks
+%D Moved from \type{lxml-ini.tex} to here. This one is for generators that
+%D collect stuff piecewise, which is sometimes hard on mechanisms that
+%D grab content using delimiters:
+%D
+%D \starttyping
+%D \startcollecting
+%D \startcollect \bTABLE \stopcollect
+%D \startcollect \bTR \stopcollect
+%D \startcollect \bTD \stopcollect
+%D \startcollect foo\stopcollect
+%D \startcollect \eTD \stopcollect
+%D \startcollect \bTD \stopcollect
+%D \startcollect bar\stopcollect
+%D \startcollect \eTD \stopcollect
+%D \startcollect \eTR \stopcollect
+%D \startcollect \eTABLE \stopcollect
+%D \stopcollecting
+%D \stoptyping
+
+\newtoks \collectingtoks
+
+\unexpanded\def\startcollect #1\stopcollect {\toksapp \collectingtoks{#1}}
+\unexpanded\def\startexpandedcollect#1\stopexpandedcollect{\etoksapp\collectingtoks{#1}}
+
+\unexpanded\def\startcollecting{\collectingtoks\emptytoks}
+\unexpanded\def\stopcollecting {\the\collectingtoks}
+
+\unexpanded\def\collect {\toksapp \collectingtoks}
+\unexpanded\def\collectexpanded{\etoksapp\collectingtoks}
+
%D A nice one too:
% {\scratchtoks{abc} \removetoks b\from\scratchtoks [\the\scratchtoks]}
@@ -5044,28 +5032,24 @@
%D Also:
-\unexpanded\def\appendetoks #1\to{\normalexpanded{\appendtoks #1}\to}
-\unexpanded\def\prependetoks#1\to{\normalexpanded{\prependtoks#1}\to}
-
-\ifdefined\toksapp
-
- \def\appendetoks#1\to#2%
- {\etoksapp#2{#1}%
- \ifx\dodoglobal\relax\else
- \global#2#2%
- \fi}
+% \unexpanded\def\appendetoks #1\to{\normalexpanded{\appendtoks #1}\to}
+% \unexpanded\def\prependetoks#1\to{\normalexpanded{\prependtoks#1}\to}
- \def\prependetoks#1\to#2%
- {\etokspre#2{#1}%
- \ifx\dodoglobal\relax\else
- \global#2#2%
- \fi}
+\unexpanded\def\appendetoks#1\to#2%
+ {\etoksapp#2{#1}%
+ \ifx\dodoglobal\relax\else
+ \global#2#2%
+ \fi}
-\fi
+\unexpanded\def\prependetoks#1\to#2%
+ {\etokspre#2{#1}%
+ \ifx\dodoglobal\relax\else
+ \global#2#2%
+ \fi}
%D Hm.
-\unexpanded\def\flushtoks#1% nb: can reassing to #1 again, hence the indirectness
+\unexpanded\def\flushtoks#1% nb: can reassign to #1 again, hence the indirectness
{\t_syst_helpers_scratch#1\relax
\dodoglobal#1\emptytoks
\the\t_syst_helpers_scratch\relax}
diff --git a/tex/context/base/mkiv/syst-ini.mkiv b/tex/context/base/mkiv/syst-ini.mkiv
index 93c21c6ce..e165ee8a3 100644
--- a/tex/context/base/mkiv/syst-ini.mkiv
+++ b/tex/context/base/mkiv/syst-ini.mkiv
@@ -301,105 +301,6 @@
%normalprotected\def\newhelp#1#2{\newtoks#1#1\expandafter{\csname#2\endcsname}}
\normalprotected\def\newhelp#1#2{\newtoks#1#1\expandafter{\detokenize{#2}}}
-% For the moment we define some \pdf... helpers but this will move to the backend
-% module after which the official interfaces have to be used. This is needed for
-% modules not made by ctx developers.
-
-\ifdefined\pdfextension
-
- \normalprotected\def\pdfliteral {\pdfextension literal }
- \normalprotected\def\pdfcolorstack {\pdfextension colorstack }
- \normalprotected\def\pdfsetmatrix {\pdfextension setmatrix }
- \normalprotected\def\pdfsave {\pdfextension save\relax}
- \normalprotected\def\pdfrestore {\pdfextension restore\relax}
- \normalprotected\def\pdfobj {\pdfextension obj }
- \normalprotected\def\pdfrefobj {\pdfextension refobj }
- \normalprotected\def\pdfannot {\pdfextension annot }
- \normalprotected\def\pdfstartlink {\pdfextension startlink }
- \normalprotected\def\pdfendlink {\pdfextension endlink\relax}
- \normalprotected\def\pdfoutline {\pdfextension outline }
- \normalprotected\def\pdfdest {\pdfextension dest }
- \normalprotected\def\pdfthread {\pdfextension thread }
- \normalprotected\def\pdfstartthread {\pdfextension startthread }
- \normalprotected\def\pdfendthread {\pdfextension endthread\relax}
- \normalprotected\def\pdfinfo {\pdfextension info }
- \normalprotected\def\pdfcatalog {\pdfextension catalog }
- \normalprotected\def\pdfnames {\pdfextension names }
- \normalprotected\def\pdfincludechars {\pdfextension includechars }
- \normalprotected\def\pdffontattr {\pdfextension fontattr }
- \normalprotected\def\pdfmapfile {\pdfextension mapfile }
- \normalprotected\def\pdfmapline {\pdfextension mapline }
- \normalprotected\def\pdftrailer {\pdfextension trailer }
- \normalprotected\def\pdfglyphtounicode {\pdfextension glyphtounicode }
-
-\fi
-
-\ifdefined\pdffeedback
-
- \def\pdftexversion {\numexpr\pdffeedback version}
- \def\pdftexrevision {\pdffeedback revision}
- \def\pdflastlink {\numexpr\pdffeedback lastlink}
- \def\pdfretval {\numexpr\pdffeedback retval}
- \def\pdflastobj {\numexpr\pdffeedback lastobj}
- \def\pdflastannot {\numexpr\pdffeedback lastannot}
- \def\pdfxformname {\numexpr\pdffeedback xformname}
- \def\pdfcreationdate {\pdffeedback creationdate}
- \def\pdffontname {\numexpr\pdffeedback fontname}
- \def\pdffontobjnum {\numexpr\pdffeedback fontobjnum}
- \def\pdffontsize {\dimexpr\pdffeedback fontsize}
- \def\pdfpageref {\numexpr\pdffeedback pageref}
- \def\pdfcolorstackinit {\pdffeedback colorstackinit}
-
-\fi
-
-\ifdefined\pdfxform \else
- \let\pdfxform \saveboxresource
- \let\pdflastxform \lastsavedboxresourceindex
- \let\pdfrefxform \useboxresource
- \let\pdfximage \saveimageresource
- \let\pdflastximage \lastsavedimageresourceindex
- \let\pdflastximagepages \lastsavedimageresourcepages
- \let\pdfrefximage \useimageresource
- \let\pdfsavepos \savepos
- \let\pdflastxpos \lastxpos
- \let\pdflastypos \lastypos
-\fi
-
-\ifdefined\pdfvariable
-
- \edef\pdfcompresslevel {\pdfvariable compresslevel} \pdfcompresslevel 9
- \edef\pdfobjcompresslevel {\pdfvariable objcompresslevel} \pdfobjcompresslevel 1
- \edef\pdfdecimaldigits {\pdfvariable decimaldigits} \pdfdecimaldigits 6 % for ctx, 4 is default
- \edef\pdfgamma {\pdfvariable gamma} \pdfgamma 1000
- \edef\pdfimageresolution {\pdfvariable imageresolution} \pdfimageresolution 71
- \edef\pdfimageapplygamma {\pdfvariable imageapplygamma} \pdfimageapplygamma 0
- \edef\pdfimagegamma {\pdfvariable imagegamma} \pdfimagegamma 2200
- \edef\pdfimagehicolor {\pdfvariable imagehicolor} \pdfimagehicolor 1
- \edef\pdfimageaddfilename {\pdfvariable imageaddfilename} \pdfimageaddfilename 1
- \edef\pdfpkresolution {\pdfvariable pkresolution} \pdfpkresolution 72
- \edef\pdfinclusioncopyfonts {\pdfvariable inclusioncopyfonts} \pdfinclusioncopyfonts 0
- \edef\pdfinclusionerrorlevel {\pdfvariable inclusionerrorlevel} \pdfinclusionerrorlevel 0
- \edef\pdfgentounicode {\pdfvariable gentounicode} \pdfgentounicode 0
- \edef\pdfpagebox {\pdfvariable pagebox} \pdfpagebox 0
- \edef\pdfminorversion {\pdfvariable minorversion} \pdfminorversion 4
- \edef\pdfuniqueresname {\pdfvariable uniqueresname} \pdfuniqueresname 0
-
- \edef\pdfhorigin {\pdfvariable horigin} \pdfhorigin 1in
- \edef\pdfvorigin {\pdfvariable vorigin} \pdfvorigin 1in
- \edef\pdflinkmargin {\pdfvariable linkmargin} \pdflinkmargin 0pt
- \edef\pdfdestmargin {\pdfvariable destmargin} \pdfdestmargin 0pt
- \edef\pdfthreadmargin {\pdfvariable threadmargin} \pdfthreadmargin 0pt
- % \edef\pdfxformmargin {\pdfvariable xformmargin} \pdfxformmargin 0pt
-
- \edef\pdfpagesattr {\pdfvariable pagesattr}
- \edef\pdfpageattr {\pdfvariable pageattr}
- \edef\pdfpageresources {\pdfvariable pageresources}
- \edef\pdfxformattr {\pdfvariable xformattr}
- \edef\pdfxformresources {\pdfvariable xformresources}
- \edef\pdfpkmode {\pdfvariable pkmode}
-
-\fi
-
%D \macros
%D {scratchcounter,
%D scratchdimen,scratchskip,scratchmuskip,
@@ -1043,19 +944,94 @@
\spanomit \advance\mscount\minusone
\repeat}
-%D We need to make sure that we start up in \DVI\ mode, so, after testing for running
-%D \PDFTEX, we default to \DVI. Why?
+%D Backend:
-\outputmode \zerocount
-\pdfminorversion \plusseven
-\pdfgentounicode \plusone
-\pdfinclusioncopyfonts \plusone
-\pdfinclusionerrorlevel \zerocount
-\pdfdecimaldigits \plussix
-\pdfhorigin 1 true in
-\pdfvorigin \pdfhorigin
-\pdfimageresolution 300
-\pdfpkresolution 600
+% For the moment we define some \pdf... helpers but this will move to the backend
+% module after which the official interfaces have to be used. This is needed for
+% modules not made by ctx developers.
+
+\normalprotected\def\pdfliteral {\pdfextension literal }
+\normalprotected\def\pdfcolorstack {\pdfextension colorstack }
+\normalprotected\def\pdfsetmatrix {\pdfextension setmatrix }
+\normalprotected\def\pdfsave {\pdfextension save\relax}
+\normalprotected\def\pdfrestore {\pdfextension restore\relax}
+\normalprotected\def\pdfobj {\pdfextension obj }
+\normalprotected\def\pdfrefobj {\pdfextension refobj }
+\normalprotected\def\pdfannot {\pdfextension annot }
+\normalprotected\def\pdfstartlink {\pdfextension startlink }
+\normalprotected\def\pdfendlink {\pdfextension endlink\relax}
+\normalprotected\def\pdfoutline {\pdfextension outline }
+\normalprotected\def\pdfdest {\pdfextension dest }
+\normalprotected\def\pdfthread {\pdfextension thread }
+\normalprotected\def\pdfstartthread {\pdfextension startthread }
+\normalprotected\def\pdfendthread {\pdfextension endthread\relax}
+\normalprotected\def\pdfinfo {\pdfextension info }
+\normalprotected\def\pdfcatalog {\pdfextension catalog }
+\normalprotected\def\pdfnames {\pdfextension names }
+\normalprotected\def\pdfincludechars {\pdfextension includechars }
+\normalprotected\def\pdffontattr {\pdfextension fontattr }
+\normalprotected\def\pdfmapfile {\pdfextension mapfile }
+\normalprotected\def\pdfmapline {\pdfextension mapline }
+\normalprotected\def\pdftrailer {\pdfextension trailer }
+\normalprotected\def\pdfglyphtounicode {\pdfextension glyphtounicode }
+
+\def\pdftexversion {\numexpr\pdffeedback version}
+\def\pdftexrevision {\pdffeedback revision}
+\def\pdflastlink {\numexpr\pdffeedback lastlink}
+\def\pdfretval {\numexpr\pdffeedback retval}
+\def\pdflastobj {\numexpr\pdffeedback lastobj}
+\def\pdflastannot {\numexpr\pdffeedback lastannot}
+\def\pdfxformname {\numexpr\pdffeedback xformname}
+\def\pdfcreationdate {\pdffeedback creationdate}
+\def\pdffontname {\numexpr\pdffeedback fontname}
+\def\pdffontobjnum {\numexpr\pdffeedback fontobjnum}
+\def\pdffontsize {\dimexpr\pdffeedback fontsize}
+\def\pdfpageref {\numexpr\pdffeedback pageref}
+\def\pdfcolorstackinit {\pdffeedback colorstackinit}
+
+\let\pdfxform \saveboxresource
+\let\pdflastxform \lastsavedboxresourceindex
+\let\pdfrefxform \useboxresource
+\let\pdfximage \saveimageresource
+\let\pdflastximage \lastsavedimageresourceindex
+\let\pdflastximagepages \lastsavedimageresourcepages
+\let\pdfrefximage \useimageresource
+\let\pdfsavepos \savepos
+\let\pdflastxpos \lastxpos
+\let\pdflastypos \lastypos
+
+\edef\pdfcompresslevel {\pdfvariable compresslevel} \pdfcompresslevel \plusnine
+\edef\pdfobjcompresslevel {\pdfvariable objcompresslevel} \pdfobjcompresslevel \plusone
+\edef\pdfdecimaldigits {\pdfvariable decimaldigits} \pdfdecimaldigits \plussix
+\edef\pdfgamma {\pdfvariable gamma} \pdfgamma \plusthousand
+\edef\pdfimageresolution {\pdfvariable imageresolution} \pdfimageresolution 300
+\edef\pdfimageapplygamma {\pdfvariable imageapplygamma} \pdfimageapplygamma \zerocount
+\edef\pdfimagegamma {\pdfvariable imagegamma} \pdfimagegamma 2200
+\edef\pdfimagehicolor {\pdfvariable imagehicolor} \pdfimagehicolor \plusone
+\edef\pdfimageaddfilename {\pdfvariable imageaddfilename} \pdfimageaddfilename \plusone
+\edef\pdfpkresolution {\pdfvariable pkresolution} \pdfpkresolution 1200
+\edef\pdfpkfixeddpi {\pdfvariable pkfixeddpi} \pdfpkfixeddpi 1
+\edef\pdfinclusioncopyfonts {\pdfvariable inclusioncopyfonts} \pdfinclusioncopyfonts \plusone
+\edef\pdfinclusionerrorlevel {\pdfvariable inclusionerrorlevel} \pdfinclusionerrorlevel \zerocount
+\edef\pdfignoreunknownimages {\pdfvariable ignoreunknownimages} \pdfignoreunknownimages \zerocount
+\edef\pdfgentounicode {\pdfvariable gentounicode} \pdfgentounicode \plusone
+\edef\pdfpagebox {\pdfvariable pagebox} \pdfpagebox \zerocount
+\edef\pdfminorversion {\pdfvariable minorversion} \pdfminorversion \plusseven
+\edef\pdfuniqueresname {\pdfvariable uniqueresname} \pdfuniqueresname \zerocount
+
+\edef\pdfhorigin {\pdfvariable horigin} \pdfhorigin 1in
+\edef\pdfvorigin {\pdfvariable vorigin} \pdfvorigin \pdfhorigin
+\edef\pdflinkmargin {\pdfvariable linkmargin} \pdflinkmargin \zeropoint
+\edef\pdfdestmargin {\pdfvariable destmargin} \pdfdestmargin \zeropoint
+\edef\pdfthreadmargin {\pdfvariable threadmargin} \pdfthreadmargin \zeropoint
+\edef\pdfxformmargin {\pdfvariable xformmargin} \pdfxformmargin \zeropoint
+
+\edef\pdfpagesattr {\pdfvariable pagesattr}
+\edef\pdfpageattr {\pdfvariable pageattr}
+\edef\pdfpageresources {\pdfvariable pageresources}
+\edef\pdfxformattr {\pdfvariable xformattr}
+\edef\pdfxformresources {\pdfvariable xformresources}
+\edef\pdfpkmode {\pdfvariable pkmode}
\normalprotected\def\nopdfcompression {\pdfobjcompresslevel\zerocount \pdfcompresslevel\zerocount}
\normalprotected\def\maximumpdfcompression{\pdfobjcompresslevel\plusnine \pdfcompresslevel\plusnine }
@@ -1063,6 +1039,8 @@
\normalpdfcompression
+\outputmode \zerocount % we generate the format in this mode
+
%D Basic status stuff.
\newif\ifproductionrun
diff --git a/tex/context/base/mkiv/util-tab.lua b/tex/context/base/mkiv/util-tab.lua
index d6f3d6731..a6239adf4 100644
--- a/tex/context/base/mkiv/util-tab.lua
+++ b/tex/context/base/mkiv/util-tab.lua
@@ -833,5 +833,5 @@ end
table.serialize = serialize
if setinspector then
- setinspector("table",function(v) if type(v) == "table" then print(serialize(v,"table")) return true end end)
+ setinspector("table",function(v) if type(v) == "table" then print(serialize(v,"table",{})) return true end end)
end
diff --git a/tex/context/modules/mkiv/m-scite.mkiv b/tex/context/modules/mkiv/m-scite.mkiv
index 38c2f249e..9cf96253f 100644
--- a/tex/context/modules/mkiv/m-scite.mkiv
+++ b/tex/context/modules/mkiv/m-scite.mkiv
@@ -203,10 +203,11 @@ end
\installscitecommands
\tt
\dontcomplain
- \setcatcodetable\ctxcatcodes % needed in xml
+ \startcontextcode
\startscite
\getbuffer[lex]%
\stopscite
+ \stopcontextcode
\stop}
\unexpanded\def\scitebuffer
diff --git a/tex/context/modules/mkiv/x-mathml.lua b/tex/context/modules/mkiv/x-mathml.lua
index 50369407f..7d0b42d21 100644
--- a/tex/context/modules/mkiv/x-mathml.lua
+++ b/tex/context/modules/mkiv/x-mathml.lua
@@ -62,7 +62,7 @@ local ctx_right = context.right
-- todo: handle opening/closing mo's here ... presentation mml is such a mess ...
-characters.registerentities()
+-- characters.registerentities()
local doublebar = utfchar(0x2016)
diff --git a/tex/context/modules/mkiv/x-mathml.mkiv b/tex/context/modules/mkiv/x-mathml.mkiv
index ec56aa3df..7b474275b 100644
--- a/tex/context/modules/mkiv/x-mathml.mkiv
+++ b/tex/context/modules/mkiv/x-mathml.mkiv
@@ -39,6 +39,8 @@
\registerctxluafile{x-mathml}{}
+\setupxml[\c!entities=\v!yes] % load big entities table
+
\def\ctxmodulemathml#1{\ctxlua{moduledata.mathml.#1}}
\startxmlsetups xml:mml:define
@@ -1018,7 +1020,7 @@
\stopxmlsetups
\startxmlsetups mml:minus:body
- % we can slso use concat here
+ % we can also use concat here
\advance\mmlminuscounter\plusone
\ifnum\mmlminuscounter>\plusone
-
diff --git a/tex/context/modules/mkiv/x-set-11.mkiv b/tex/context/modules/mkiv/x-set-11.mkiv
index a75874191..91979cbbe 100644
--- a/tex/context/modules/mkiv/x-set-11.mkiv
+++ b/tex/context/modules/mkiv/x-set-11.mkiv
@@ -84,15 +84,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: markering
sectionblock: sectieblok
row: rij
column: kolom
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: eerste
+ last: laatste
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages english library: setup
@@ -145,15 +149,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages german library: setup
@@ -206,15 +214,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages czech library: setup
@@ -267,15 +279,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages italian library: setup
@@ -328,15 +344,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages romanian library: setup
@@ -389,15 +409,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\startmessages french library: setup
@@ -450,15 +474,19 @@
node: node
lpath: lpath
xmlsetup: xmlsetup
+ luafunction: luafunction
marking: marking
sectionblock: sectionblock
row: row
column: column
url: url
- noargument: \texescape...
- oneargument: \texescape...\texthash1
- twoarguments: \texescape...\texthash1\texthash2
- threearguments: \texescape...\texthash1\texthash2\texthash3
+ first: first
+ last: last
+ setup: setup
+ buffer: buffer
+ true: true
+ false: false
+ category: category
\stopmessages
\unprotect
@@ -519,7 +547,7 @@
[\c!width=\hsize,
\c!height=\v!fit,
\c!align=\v!right,
- \c!offset=0.75em]
+ \c!offset=0.75\emwidth]
\popmacro\setuptext
@@ -534,11 +562,11 @@
\let\currentSETUPprefix\empty
}
\edef\currentSETUPname{\xmlatt{#1}{name}}
- \doifelse {\xmlatt{#1}{generated}} {yes} {
- \def\currentSETUPgenerated{*}
- } {
+% \doifelse {\xmlatt{#1}{generated}} {yes} {
+% \def\currentSETUPgenerated{*}
+% } {
\let\currentSETUPgenerated\empty
- }
+% }
\doifelsenothing {\xmlatt{#1}{variant}} {
\let\currentSETUPvariant\empty
} {
@@ -580,16 +608,37 @@
lxml.flush(definitions[name])
end
+ function xml.finalizers.s_count(collected)
+ local n = 0
+ for i=1,#collected do
+ local c = collected[i]
+ if c.tg == "resolve" then
+ local d = definitions[c.at.name]
+ n = n + xml.count(d,"/*")
+ else
+ n = n + 1
+ end
+ end
+ context(n)
+ end
+
\stopluacode
+% \startxmlsetups xml:setups:basics
+% \xmlinclude{#1}{include}{filename}%
+% \xmlsetsetup {#1} {*} {-}
+% \xmlsetsetup {#1} {
+% sequence|string|variable|assignments|keywords|content|displaymath|index|math|argument|
+% nothing|file|position|reference|csname|destination|triplet|word|
+% resolve|define|
+% parameter|constant|inherit|parameter
+% } {xml:setups:*}
+% \xmlfunction{#1}{setups_define}
+% \stopxmlsetups
+
\startxmlsetups xml:setups:basics
\xmlinclude{#1}{include}{filename}%
- \xmlsetsetup {#1} {
- sequence|string|variable|assignments|keywords|content|displaymath|index|math|argument|
- nothing|file|position|reference|csname|destination|triplet|word|
- resolve|define|
- parameter|constant|inherit|parameter
- } {xml:setups:*}
+ \xmlsetsetup {#1} {*} {xml:setups:*}
\xmlfunction{#1}{setups_define}
\stopxmlsetups
@@ -606,17 +655,21 @@
{\doifsomething{#1}
{\doonlyonce{setups:#1}
{\doglobal\prependtocommalist{setups:#1}\loadedsetups % last overloads first
+% \setupxml
+% [\c!default=\v!hidden, % ignore elements that are not defined
+% \c!compress=\v!yes]
\xmlloadonly{setups:#1}{#1}{setups}%
\xmlfilter{setups:#1}{/interface/command/command(xml:setups:register)}}}} % qualified path saves > 50% runtime
-\newif\ifshortsetup
+\newconstant\kindofsetup
-\unexpanded\def\setup {\shortsetupfalse\doshowsetup}
-\unexpanded\def\showsetup {\shortsetupfalse\doshowsetup}
-\unexpanded\def\shortsetup{\shortsetuptrue \doshowsetup}
+\unexpanded\def\basicsetup{\kindofsetup\zerocount\doshowsetup}
+\unexpanded\def\shortsetup{\kindofsetup\plusone \doshowsetup}
+\unexpanded\def\setup {\kindofsetup\plustwo \doshowsetup}
+\unexpanded\def\showsetup {\kindofsetup\plustwo \doshowsetup}
-%unexpanded\def\showsetupinlist#1#2#3{\shortsetupfalse\showsetupindeed{#3}\par}
-\unexpanded\def\showsetupinlist#1#2#3{\shortsetupfalse\xmlsetup{#3}{xml:setups:typeset}\par}
+%unexpanded\def\showsetupinlist#1#2#3{\kindofsetup\plustwo\showsetupindeed{#3}\par}
+\unexpanded\def\showsetupinlist#1#2#3{\kindofsetup\plustwo\xmlsetup{#3}{xml:setups:typeset}\par}
% todo: only references in lists
@@ -636,7 +689,8 @@
\def\showsetupindeed#1%
{\startelement[setup][name=#1]%
- \startelement[noexport][comment={setup definition #1}]
+ \startelement[noexport][comment={setup definition #1}]%
+% \nospaces\plusone
\xmlsetup{\rawsynonymname{texcommand}{stp:x:#1}}{xml:setups:typeset}
% \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and '\e!start' or '') .. @name]/command(xml:setups:typeset)}%
\stopelement
@@ -649,10 +703,9 @@
%D Typesetting:
-\setupxml
- [\c!default=\v!hidden, % ignore elements that are not defined
- \c!compress=\v!yes, % strip comment
- \c!entities=\v!yes] % replace entities
+% \setupxml
+% [\c!default=\v!hidden, % ignore elements that are not defined
+% \c!compress=\v!yes]
\newcounter\currentSETUPargument
\newcounter\maximumSETUPargument
@@ -660,70 +713,159 @@
\def\currentSETUPwidth{0pt}
\startxmlsetups xml:setups:typeset
- \getvalue{\e!start setuptext}
- \tttf
- \nohyphens
- \veryraggedright
- \doglobal\newcounter\currentSETUPargument
- \xdef\maximumSETUPargument{\xmlcount{#1}{/arguments/*}}
- \edef\currentSETUPhash{\xmlatt{#1}{hash}}
+ \doifelsenothing {#1} {
+ \xmlsetup{#1}{xml:setups:typeset:nop}
+ } {
+ \xmlsetup{#1}{xml:setups:typeset:yes}
+ }
+\stopxmlsetups
+
+\startxmlsetups xml:setups:typeset:nop
+ \blank
+ \type {MISSING SETUP}
+ \blank
+\stopxmlsetups
+
+\startxmlsetups xml:setups:typeset:line
+ \ttbf
+ \nohyphens
+ \edef\currentSETUPhash{\xmlatt{#1}{hash}}
+ \bgroup
+ \enablemode[setups-pass-one]%
+ \doif {\xmlatt{#1}{generated}} {yes} {
+ \ttbs
+ }
+ \letterbackslash
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \doifsomethingelse {\xmlatt{#1}{begin}} {
+ \xmllastatt
+ } {
+ \e!start
+ }
+ }
+ \xmldoifelseempty{#1}{/sequence} {
+ \xmlatt{#1}{name}
+ } {
+ \xmlfilter{#1}{/sequence/first()}
+ }
+ \ignorespaces
+ \egroup
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-one]
+ \doglobal\newcounter\currentSETUPargument
+ \ignorespaces
+ \xmlfilter{#1}{/arguments/text()}
+ \egroup
+ }
+ \doif {\xmlatt{#1}{type}} {environment} {
\bgroup
\enablemode[setups-pass-one]%
+ \hskip.5em\unknown\hskip.5em
\doif {\xmlatt{#1}{generated}} {yes} {
- \ttsl
+ \bssl
}
\letterbackslash
- \doif {\xmlatt{#1}{type}} {environment} {
- \doifsomethingelse {\xmlatt{#1}{begin}} {
- \xmllastatt
- } {
- \e!start
- }
+ \doifsomethingelse {\xmlatt{#1}{end}} {
+ \xmllastatt
+ } {
+ \e!stop
+ }
+ \xmldoifelseempty{#1}{/sequence} {
+ \xmlatt{#1}{name}
+ } {
+ \xmlfilter{#1}{/sequence/first()}
+ }
+ \ignorespaces
+ \egroup
+ }
+\stopxmlsetups
+
+\startxmlsetups xml:setups:typeset:raw
+ \tttf
+ \nohyphens
+ \veryraggedright
+ \doglobal\newcounter\currentSETUPargument
+ \xdef\maximumSETUPargument{\xmlfilter{#1}{/arguments/*/s_count()}}
+ \edef\currentSETUPhash{\xmlatt{#1}{hash}}
+ \bgroup
+ \enablemode[setups-pass-one]%
+ \doif {\xmlatt{#1}{generated}} {yes} {
+ \ttsl
+ }
+ \letterbackslash
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \doifsomethingelse {\xmlatt{#1}{begin}} {
+ \xmllastatt
+ } {
+ \e!start
}
+ }
+ \xmldoifelseempty{#1}{/sequence} {
+ \xmlatt{#1}{name}
+ } {
\xmlfilter{#1}{/sequence/first()}
+ }
+ \ignorespaces
+ \egroup
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-one]
+ \doglobal\newcounter\currentSETUPargument
\ignorespaces
+ \xmlfilter{#1}{/arguments/text()}
\egroup
- \ifshortsetup
- % nothing
- \else
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-one]
- \doglobal\newcounter\currentSETUPargument
- \ignorespaces
- \xmlfilter{#1}{/arguments/text()}
- \egroup
+ }
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \bgroup
+ \enablemode[setups-pass-one]%
+ \hskip.5em\unknown\hskip.5em
+ \doif {\xmlatt{#1}{generated}} {yes} {
+ \ttsl
}
- \doif {\xmlatt{#1}{type}} {environment} {
- \bgroup
- \enablemode[setups-pass-one]%
- \hskip.5em\unknown\hskip.5em
- \doif {\xmlatt{#1}{generated}} {yes} {
- \ttsl
- }
- \letterbackslash
- \doifsomethingelse {\xmlatt{#1}{end}} {
- \xmllastatt
- } {
- \e!stop
- }
- \xmlfilter{#1}{/sequence/first()}
- \ignorespaces
- \egroup
+ \letterbackslash
+ \doifsomethingelse {\xmlatt{#1}{end}} {
+ \xmllastatt
+ } {
+ \e!stop
}
- \endgraf
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-two]
- \doglobal\newcounter\currentSETUPargument
- %\blank[\v!line] % packed mode (we could do \startunpacked ...)
- \godown[.75\lineheight]
- \switchtobodyfont[\v!small]
- \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
- \egroup
+ \xmldoifelseempty{#1}{/sequence} {
+ \xmlatt{#1}{name}
+ } {
+ \xmlfilter{#1}{/sequence/first()}
}
- \fi
- \getvalue{\e!stop setuptext}
+ \ignorespaces
+ \egroup
+ }
+\stopxmlsetups
+
+\startxmlsetups xml:setups:typeset:detail
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-two]
+ \doglobal\newcounter\currentSETUPargument
+ %\blank[\v!line] % packed mode (we could do \startunpacked ...)
+ \godown[.75\lineheight]
+ \switchtobodyfont[\v!small]
+ \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
+ \egroup
+ }
+\stopxmlsetups
+
+\startxmlsetups xml:setups:typeset:yes
+ \ifcase\kindofsetup
+ \xmlsetup{#1}{xml:setups:typeset:line}
+ \or
+ \getvalue{\e!start setuptext}
+ \xmlsetup{#1}{xml:setups:typeset:raw}
+ \getvalue{\e!stop setuptext}
+ \or
+ \getvalue{\e!start setuptext}
+ \xmlsetup{#1}{xml:setups:typeset:raw}
+ \endgraf
+ \xmlsetup{#1}{xml:setups:typeset:detail}
+ \getvalue{\e!stop setuptext}
+ \fi
\stopxmlsetups
\setupsetup
@@ -869,6 +1011,10 @@
\xmlmapvalue{setups:method}{apply}{->}
\xmlmapvalue{setups:method}{none} {}
+\startxmlsetups xml:setups:constant:value
+ \c!setup!reserved!{\xmlatt{#1}{type}}
+\stopxmlsetups
+
\startxmlsetups xml:setups:constant
\doifelsemode {setups-pass-one} {
} {
@@ -913,31 +1059,62 @@
\blank[\v!halfline]
\ignorespaces}
-\c!setup!definereserved {cd:command} {\c!setup!internal!{\getmessage{setup}{command}}}
-\c!setup!definereserved {cd:dimension} {\c!setup!internal!{\getmessage{setup}{dimension}}}
-\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
-\c!setup!definereserved {cd:name} {\c!setup!internal!{\getmessage{setup}{identifier}}}
-\c!setup!definereserved {cd:character} {\c!setup!internal!{\getmessage{setup}{character}}}
-\c!setup!definereserved {cd:mark} {\c!setup!internal!{\getmessage{setup}{mark}}}
-\c!setup!definereserved {cd:number} {\c!setup!internal!{\getmessage{setup}{number}}}
-\c!setup!definereserved {cd:reference} {\c!setup!internal!{\getmessage{setup}{reference}}}
-\c!setup!definereserved {cd:plural} {\c!setup!internal!{\getmessage{setup}{plural}}}
-\c!setup!definereserved {cd:singular} {\c!setup!internal!{\getmessage{setup}{singular}}}
-\c!setup!definereserved {cd:text} {\c!setup!internal!{\getmessage{setup}{text}}}
-\c!setup!definereserved {cd:formula} {\c!setup!internal!{\getmessage{setup}{formula}}}
-\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
-\c!setup!definereserved {cd:matrix} {\c!setup!internal!{\getmessage{setup}{matrix}}}
-\c!setup!definereserved {cd:list} {\c!setup!internal!{\getmessage{setup}{list}}}
-\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
-\c!setup!definereserved {cd:language} {\c!setup!internal!{\getmessage{setup}{language}}}
-\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
-
-\c!setup!definereserved {cd:noargument} {\c!setup!command! {}}
-\c!setup!definereserved {cd:oneargument} {\c!setup!command! {\#1}}
-\c!setup!definereserved {cd:twoarguments} {\c!setup!command! {\#1\#2}}
-\c!setup!definereserved {cd:threearguments} {\c!setup!command! {\#1\#2\#3}}
-
-\c!setup!definereserved {cd:sign} {[-+]}
+\c!setup!definereserved {cd:command} {\c!setup!internal!{\getmessage{setup}{command}}}
+\c!setup!definereserved {cd:dimension} {\c!setup!internal!{\getmessage{setup}{dimension}}}
+\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
+\c!setup!definereserved {cd:buffer} {\c!setup!internal!{\getmessage{setup}{buffer}}}
+\c!setup!definereserved {cd:name} {\c!setup!internal!{\getmessage{setup}{identifier}}}
+\c!setup!definereserved {cd:character} {\c!setup!internal!{\getmessage{setup}{character}}}
+\c!setup!definereserved {cd:mark} {\c!setup!internal!{\getmessage{setup}{mark}}}
+\c!setup!definereserved {cd:number} {\c!setup!internal!{\getmessage{setup}{number}}}
+\c!setup!definereserved {cd:first} {\c!setup!internal!{\getmessage{setup}{first}}}
+\c!setup!definereserved {cd:last} {\c!setup!internal!{\getmessage{setup}{last}}}
+\c!setup!definereserved {cd:reference} {\c!setup!internal!{\getmessage{setup}{reference}}}
+\c!setup!definereserved {cd:plural} {\c!setup!internal!{\getmessage{setup}{plural}}}
+\c!setup!definereserved {cd:singular} {\c!setup!internal!{\getmessage{setup}{singular}}}
+\c!setup!definereserved {cd:text} {\c!setup!internal!{\getmessage{setup}{text}}}
+\c!setup!definereserved {cd:formula} {\c!setup!internal!{\getmessage{setup}{formula}}}
+\c!setup!definereserved {cd:file} {\c!setup!internal!{\getmessage{setup}{file}}}
+\c!setup!definereserved {cd:matrix} {\c!setup!internal!{\getmessage{setup}{matrix}}}
+\c!setup!definereserved {cd:list} {\c!setup!internal!{\getmessage{setup}{list}}}
+\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
+\c!setup!definereserved {cd:language} {\c!setup!internal!{\getmessage{setup}{language}}}
+\c!setup!definereserved {cd:section} {\c!setup!internal!{\getmessage{setup}{section}}}
+\c!setup!definereserved {cd:language} {\c!setup!internal!{\getmessage{setup}{language}}}
+\c!setup!definereserved {cd:processor} {\c!setup!internal!{\getmessage{setup}{processor}}}
+\c!setup!definereserved {cd:style} {\c!setup!internal!{\getmessage{setup}{style}}}
+\c!setup!definereserved {cd:font} {\c!setup!internal!{\getmessage{setup}{font}}}
+\c!setup!definereserved {cd:character} {\c!setup!internal!{\getmessage{setup}{character}}}
+\c!setup!definereserved {cd:userdata} {\c!setup!internal!{\getmessage{setup}{userdata}}}
+\c!setup!definereserved {cd:key} {\c!setup!internal!{\getmessage{setup}{key}}}
+\c!setup!definereserved {cd:value} {\c!setup!internal!{\getmessage{setup}{value}}}
+\c!setup!definereserved {cd:color} {\c!setup!internal!{\getmessage{setup}{color}}}
+\c!setup!definereserved {cd:template} {\c!setup!internal!{\getmessage{setup}{template}}}
+\c!setup!definereserved {cd:node} {\c!setup!internal!{\getmessage{setup}{node}}}
+\c!setup!definereserved {cd:lpath} {\c!setup!internal!{\getmessage{setup}{lpath}}}
+\c!setup!definereserved {cd:setup} {\c!setup!internal!{\getmessage{setup}{setup}}}
+\c!setup!definereserved {cd:xmlsetup} {\c!setup!internal!{\getmessage{setup}{xmlsetup}}}
+\c!setup!definereserved {cd:luafunction} {\c!setup!internal!{\getmessage{setup}{luafunction}}}
+\c!setup!definereserved {cd:marking} {\c!setup!internal!{\getmessage{setup}{marking}}}
+\c!setup!definereserved {cd:sectionblock} {\c!setup!internal!{\getmessage{setup}{sectionblock}}}
+\c!setup!definereserved {cd:row} {\c!setup!internal!{\getmessage{setup}{row}}}
+\c!setup!definereserved {cd:column} {\c!setup!internal!{\getmessage{setup}{column}}}
+\c!setup!definereserved {cd:url} {\c!setup!internal!{\getmessage{setup}{url}}}
+\c!setup!definereserved {cd:true} {\c!setup!internal!{\getmessage{setup}{true}}}
+\c!setup!definereserved {cd:false} {\c!setup!internal!{\getmessage{setup}{false}}}
+\c!setup!definereserved {cd:category} {\c!setup!internal!{\getmessage{setup}{category}}}
+
+\c!setup!definereserved {cd:noargument} {\c!setup!command! {\texescape}}
+\c!setup!definereserved {cd:oneargument} {\c!setup!command! {\texescape\texthash1}}
+\c!setup!definereserved {cd:twoarguments} {\c!setup!command! {\texescape\texthash1\texthash2}}
+\c!setup!definereserved {cd:threearguments} {\c!setup!command! {\texescape\texthash1\texthash2\texthash3}}
+
+\c!setup!definereserved {cd:noargumentwrapped} {\c!setup!command! {\texescape...}}
+\c!setup!definereserved {cd:oneargumentwrapped} {\c!setup!command! {\texescape...\texthash1}}
+\c!setup!definereserved {cd:twoargumentswrapped} {\c!setup!command! {\texescape...\texthash1\texthash2}}
+\c!setup!definereserved {cd:threeargumentswrapped} {\c!setup!command! {\texescape...\texthash1\texthash2\texthash3}}
+
+\c!setup!definereserved {cd:sign} {[-+]}
%D Auxiliary.
@@ -964,32 +1141,58 @@
\hss}%
\ht2\ht\strutbox
\dp4\dp\strutbox
- \hskip.5\emwidth\hsmash{\box0}\hsmash{\box4}\box2\relax
+ \hskip.5\emwidth
+ \wd0\zeropoint
+ \box0
+ \wd2\zeropoint
+ \box2
+ \box4%
\egroup
\ignorespaces}
+\unexpanded\def\showSETUPline#1%
+ {\kern.25\emwidth
+ #1%
+ \ignorespaces}
+
\unexpanded\def\showSETUPnumber
{\doglobal\increment\currentSETUPargument
- \hbox to 2em
+ \hbox to 2\emwidth
{\ifcase\maximumSETUPargument\relax
\or*\else\currentSETUPargument
\fi
\hss}}
\unexpanded\def\showSETUPassignment#1%
- {\showSETUP{#1}
- {[.\lower.5ex\hbox{=}.]}
- {[..,.\lower.5ex\hbox{=}.,..]}}
+ {\ifcase\kindofsetup
+ \showSETUPline{[.=.]}%
+ \else
+ \showSETUP{#1}
+ {[.=.]}
+ {[..,.=.,..]}%
+ \fi}
\unexpanded\def\showSETUPkeyword#1%
- {\showSETUP{#1}
- {[...]}
- {[...,...]}}
+ {\ifcase\kindofsetup
+ \showSETUPline{[...]}%
+ \else
+ \showSETUP{#1}
+ {[...]}
+ {[...,...]}%
+ \fi}
\unexpanded\def\showSETUPargument#1%
- {\showSETUP{#1}
- {\letterleftbrace..\letterrightbrace}
- {\letterleftbrace..,...,..\letterrightbrace}}
+ {\ifcase\kindofsetup
+ \showSETUPline{%
+ \letterleftbrace
+ \xmlfilter{#1}{/cd:constant/command(xml:setups:constant:value)}% always one
+ \letterrightbrace
+ }
+ \else
+ \showSETUP{#1}
+ {\letterleftbrace..\letterrightbrace}
+ {\letterleftbrace..,...,..\letterrightbrace}%
+ \fi}
\unexpanded\def\showSETUPdisplaymath#1%
{\showSETUP{#1}
@@ -1168,4 +1371,13 @@
\xmlsetup{\loadedsetups}{xml:setups:parameters:values}
\stoptexdefinition
+% official interface
+
+\unexpanded\def\cmdinternal#1%
+ {{\tttf\c!setup!reserved!{#1}}} % todo color .. highlight
+
+\let\cmdbasicsetup\basicsetup
+\let\cmdshortsetup\shortsetup
+\let\cmdfullsetup \showsetup
+
\protect \endinput
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 2b82ed01a..89137591a 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date : 01/18/16 22:21:50
+-- merge date : 01/28/16 22:35:09
do -- begin closure to overcome local limits and interference
diff --git a/tex/generic/context/luatex/luatex-gadgets.lua b/tex/generic/context/luatex/luatex-gadgets.lua
index 9bf423fda..8c835babb 100644
--- a/tex/generic/context/luatex/luatex-gadgets.lua
+++ b/tex/generic/context/luatex/luatex-gadgets.lua
@@ -73,3 +73,32 @@ function marking.remove(str)
end
end
end
+
+-- local imgscan = img.scan
+--
+-- local valid = {
+-- ["png"] = "^" .. string.char(0x89,0x50,0x4E,0x47,0x0D,0x0A,0x1A,0x0A),
+-- ["jpg"] = "^" .. string.char(0xFF,0xD8,0xFF),
+-- ["jp2"] = "^" .. string.char(0x00,0x00,0x00,0x0C,0x6A,0x50,0x20,0x20,0x0D,0x0A),
+-- ["pdf"] = "^" .. ".-%%PDF",
+-- }
+--
+-- function img.scan(t)
+-- if t and t.filename then
+-- local f = io.open(t.filename,"rb")
+-- if f then
+-- local d = f:read(4096)
+-- for k, v in next,valid do
+-- if string.find(d,v) then
+-- f:close() -- be nice
+-- return imgscan(t)
+-- end
+-- end
+-- f:close() -- be nice
+-- end
+-- end
+-- end
+--
+-- print(img.scan({filename = "hacker1b.tif"}))
+-- print(img.scan({filename = "cow.pdf"}))
+-- print(img.scan({filename = "mill.png"}))
diff --git a/web2c/contextcnf.lua b/web2c/contextcnf.lua
index af65f41d0..dee9170ef 100644
--- a/web2c/contextcnf.lua
+++ b/web2c/contextcnf.lua
@@ -83,6 +83,7 @@ return {
ENCFONTS = ".;$TEXMF/fonts/data//;$TEXMF/fonts/enc/{dvips,pdftex}//",
VFFONTS = ".;$TEXMF/fonts/{data,vf}//",
TFMFONTS = ".;$TEXMF/fonts/{data,tfm}//",
+ PKFONTS = ".;$TEXMF/fonts/{data,pk}//",
T1FONTS = ".;$TEXMF/fonts/{data,type1}//;$OSFONTDIR",
AFMFONTS = ".;$TEXMF/fonts/{data,afm}//;$OSFONTDIR",
TTFONTS = ".;$TEXMF/fonts/{data,truetype}//;$OSFONTDIR",