summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2015-05-09 14:15:05 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2015-05-09 14:15:05 +0200
commitb2720858f841530581e009ae380e39b4267a3d5d (patch)
tree4b1e59aacc68d05cbeabd0d2fb30c34dd6f6e754
parent09c6bc4b280905c198d7e40b3b6c3addc6f975ca (diff)
downloadcontext-b2720858f841530581e009ae380e39b4267a3d5d.tar.gz
2015-05-09 13:43:00
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-context.lua2
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-lua.lua19
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer-tex.lua22
-rw-r--r--context/data/scite/context/lexers/scite-context-lexer.lua229
-rw-r--r--context/data/scite/context/scite-context-data-context.properties28
-rw-r--r--tex/context/base/back-exp.lua118
-rw-r--r--tex/context/base/back-exp.mkiv6
-rw-r--r--tex/context/base/char-ini.lua3
-rw-r--r--tex/context/base/char-utf.lua14
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4188 -> 4181 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-uti.lua2
-rw-r--r--tex/context/base/font-odv.lua18
-rw-r--r--tex/context/base/lpdf-tag.lua56
-rw-r--r--tex/context/base/lpdf-wid.lua44
-rw-r--r--tex/context/base/lxml-ini.lua1
-rw-r--r--tex/context/base/lxml-ini.mkiv2
-rw-r--r--tex/context/base/lxml-tex.lua16
-rw-r--r--tex/context/base/m-visual.mkiv4
-rw-r--r--tex/context/base/math-ini.lua37
-rw-r--r--tex/context/base/math-ini.mkiv171
-rw-r--r--tex/context/base/mult-def.mkiv6
-rw-r--r--tex/context/base/mult-low.lua2
-rw-r--r--tex/context/base/publ-aut.lua269
-rw-r--r--tex/context/base/publ-imp-apa.mkvi20
-rw-r--r--tex/context/base/publ-imp-aps.mkvi15
-rw-r--r--tex/context/base/publ-imp-author.mkvi4
-rw-r--r--tex/context/base/publ-imp-cite.mkvi13
-rw-r--r--tex/context/base/publ-imp-default.mkvi20
-rw-r--r--tex/context/base/publ-imp-list.mkvi6
-rw-r--r--tex/context/base/publ-ini.lua82
-rw-r--r--tex/context/base/publ-ini.mkiv44
-rw-r--r--tex/context/base/publ-sor.lua180
-rw-r--r--tex/context/base/spac-ali.mkiv4
-rw-r--r--tex/context/base/status-files.pdfbin24439 -> 24449 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin250844 -> 250941 bytes
-rw-r--r--tex/context/base/tabl-ntb.mkiv80
-rw-r--r--tex/context/base/x-asciimath.lua56
-rw-r--r--tex/context/interface/cont-cs.xml1
-rw-r--r--tex/context/interface/cont-de.xml1
-rw-r--r--tex/context/interface/cont-en.xml1
-rw-r--r--tex/context/interface/cont-fr.xml1
-rw-r--r--tex/context/interface/cont-it.xml1
-rw-r--r--tex/context/interface/cont-nl.xml1
-rw-r--r--tex/context/interface/cont-pe.xml1
-rw-r--r--tex/context/interface/cont-ro.xml1
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
48 files changed, 1162 insertions, 445 deletions
diff --git a/context/data/scite/context/lexers/data/scite-context-data-context.lua b/context/data/scite/context/lexers/data/scite-context-data-context.lua
index 0dc4c4af0..0fe56100b 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-context.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-context.lua
@@ -1,4 +1,4 @@
return {
- ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar" },
+ ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "zeromuskip", "onemuskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "ctdcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "hyphenasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "muquad", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode", "startmodeset", "stopmodeset", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startnotext", "stopnotext", "startdocument", "stopdocument", "documentvariable", "setupdocument", "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule", "setupmodule", "currentmoduleparameter", "moduleparameter", "everystarttext", "everystoptext", "startTEXpage", "stopTEXpage", "enablemode", "disablemode", "preventmode", "definemode", "globalenablemode", "globaldisablemode", "globalpreventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile", "luastringsep", "!!bs", "!!es", "lefttorightmark", "righttoleftmark", "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace", "ideographicspace", "ideographichalffillspace", "twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace", "figurespace", "punctuationspace", "hairspace", "zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj", "optionalspace", "asciispacechar" },
["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "startcontextcode", "stopcontextcode", "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "begcsname", "strippedcsname", "checkedstrippedcsname", "firstargumentfalse", "firstargumenttrue", "secondargumentfalse", "secondargumenttrue", "thirdargumentfalse", "thirdargumenttrue", "fourthargumentfalse", "fourthargumenttrue", "fifthargumentfalse", "fifthsargumenttrue", "sixthargumentfalse", "sixtsargumenttrue", "doglobal", "dodoglobal", "redoglobal", "resetglobal", "donothing", "dontcomplain", "forgetall", "donetrue", "donefalse", "inlineordisplaymath", "indisplaymath", "forcedisplaymath", "startforceddisplaymath", "stopforceddisplaymath", "reqno", "htdp", "unvoidbox", "hfilll", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledmbox", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "normalbaselineskip", "normallineskip", "normallineskiplimit", "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset", "scratchxposition", "scratchyposition", "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "scratchnx", "scratchny", "scratchmx", "scratchmy", "scratchunicode", "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifelseinset", "doifinsetelse", "doifelsenextchar", "doifnextcharelse", "doifelsenextoptional", "doifnextoptionalelse", "doifelsenextoptionalcs", "doifnextoptionalcselse", "doifelsefastoptionalcheck", "doiffastoptionalcheckelse", "doifelsenextbgroup", "doifnextbgroupelse", "doifelsenextbgroupcs", "doifnextbgroupcselse", "doifelsenextparenthesis", "doifnextparenthesiselse", "doifelseundefined", "doifundefinedelse", "doifelsedefined", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifnothingelse", "doifelsesomething", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifvaluenothingelse", "doifelsedimension", "doifdimensionelse", "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber", "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse", "doifelseassignment", "doifassignmentelse", "docheckassignment", "tracingall", "tracingnone", "loggingall", "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp", "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces", "wait", "writestatus", "define", "defineexpandable", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured", "installcorenamespace", "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters", "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter", "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "firstoftwounexpanded", "secondoftwounexpanded", "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith", "newconstant", "setnewconstant", "setconstant", "setconstantvalue", "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue", "newmacro", "setnewmacro", "newfraction", "newsignal", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "permitspacesbetweengroups", "dontpermitspacesbetweengroups", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse", "startnointerference", "stopnointerference", "twodigits", "threedigits", "leftorright", "offinterlineskip", "oninterlineskip", "nointerlineskip", "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight", "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing", "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing", "opordspacing", "opopspacing", "opbinspacing", "oprelspacing", "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing", "binordspacing", "binopspacing", "binbinspacing", "binrelspacing", "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing", "relordspacing", "relopspacing", "relbinspacing", "relrelspacing", "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing", "openordspacing", "openopspacing", "openbinspacing", "openrelspacing", "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing", "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing", "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing", "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing", "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing", "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing", "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing", "normalreqno", "startimath", "stopimath", "normalstartimath", "normalstopimath", "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath", "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette", "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox", "mathtext", "setmathsmalltextbox", "setmathtextbox", "pushmathstyle", "popmathstyle", "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle", "triggeruncrampedstyle", "triggercrampedstyle", "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle", "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle", "luaexpr", "expelsedoif", "expdoif", "expdoifnot", "expdoifelsecommon", "expdoifcommonelse", "expdoifelseinset", "expdoifinsetelse", "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport", "ctxlua", "luacode", "lateluacode", "directluacode", "registerctxluafile", "ctxloadluafile", "luaversion", "luamajorversion", "luaminorversion", "ctxluacode", "luaconditional", "luaexpanded", "startluaparameterset", "stopluaparameterset", "luaparameterset", "definenamedlua", "obeylualines", "obeyluatokens", "startluacode", "stopluacode", "startlua", "stoplua", "startctxfunction", "stopctxfunction", "ctxfunction", "startctxfunctiondefinition", "stopctxfunctiondefinition", "installctxfunction", "carryoverpar", "assumelongusagecs", "Umathbotaccent", "righttolefthbox", "lefttorighthbox", "righttoleftvbox", "lefttorightvbox", "righttoleftvtop", "lefttorightvtop", "rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop", "autodirhbox", "autodirvbox", "autodirvtop", "lefttoright", "righttoleft", "synchronizelayoutdirection", "synchronizedisplaydirection", "synchronizeinlinedirection", "lesshyphens", "morehyphens", "nohyphens", "dohyphens", "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath", "nobreak", "allowbreak", "goodbreak" },
} \ No newline at end of file
diff --git a/context/data/scite/context/lexers/scite-context-lexer-lua.lua b/context/data/scite/context/lexers/scite-context-lexer-lua.lua
index 3d5d18fc8..9bee74845 100644
--- a/context/data/scite/context/lexers/scite-context-lexer-lua.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-lua.lua
@@ -165,6 +165,7 @@ lexer.embed_lexer(lualexer, stringlexer, token("quote",longtwostart), token("str
local integer = P("-")^-1 * (patterns.hexadecimal + patterns.decimal)
local number = token("number", patterns.float + integer)
+ * (token("error",R("AZ","az","__")^1))^0
-- officially 127-255 are ok but not utf so useless
@@ -197,12 +198,20 @@ local gotolabel = token("keyword", P("::"))
* (spacing + shortcomment)^0
* token("keyword", P("::"))
-local p_keywords = exact_match(keywords)
-local p_functions = exact_match(functions)
-local p_constants = exact_match(constants)
+----- p_keywords = exact_match(keywords)
+----- p_functions = exact_match(functions)
+----- p_constants = exact_match(constants)
+----- p_internals = P("__")
+----- * exact_match(internals)
+
+local p_finish = #(1-R("az","AZ","__"))
+local p_keywords = lexer.helpers.utfchartabletopattern(keywords) * p_finish -- exact_match(keywords)
+local p_functions = lexer.helpers.utfchartabletopattern(functions) * p_finish -- exact_match(functions)
+local p_constants = lexer.helpers.utfchartabletopattern(constants) * p_finish -- exact_match(constants)
local p_internals = P("__")
- * exact_match(internals)
-local p_csnames = just_match(csnames)
+ * lexer.helpers.utfchartabletopattern(internals) * p_finish -- exact_match(internals)
+
+local p_csnames = lexer.helpers.utfchartabletopattern(csnames) * p_finish -- just_match(csnames)
local keyword = token("keyword", p_keywords)
local builtin = token("plain", p_functions)
local constant = token("data", p_constants)
diff --git a/context/data/scite/context/lexers/scite-context-lexer-tex.lua b/context/data/scite/context/lexers/scite-context-lexer-tex.lua
index b109630ce..ad73f4217 100644
--- a/context/data/scite/context/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer-tex.lua
@@ -214,10 +214,16 @@ local p_rest = any
local p_preamble = knownpreamble
local p_comment = commentline
-local p_command = backslash * knowncommand
-local p_constant = backslash * exact_match(constants)
-local p_helper = backslash * exact_match(helpers)
-local p_primitive = backslash * exact_match(primitives)
+----- p_command = backslash * knowncommand
+----- p_constant = backslash * exact_match(constants)
+----- p_helper = backslash * exact_match(helpers)
+----- p_primitive = backslash * exact_match(primitives)
+
+local p_command = backslash * lexer.helpers.utfchartabletopattern(currentcommands) * #(1-cstoken)
+local p_constant = backslash * lexer.helpers.utfchartabletopattern(constants) * #(1-cstoken)
+local p_helper = backslash * lexer.helpers.utfchartabletopattern(helpers) * #(1-cstoken)
+local p_primitive = backslash * lexer.helpers.utfchartabletopattern(primitives) * #(1-cstoken)
+
local p_ifprimitive = P("\\if") * cstoken^1
local p_csname = backslash * (cstoken^1 + P(1))
local p_grouping = S("{$}")
@@ -422,7 +428,10 @@ local metafunenvironment = metafuncall -- ( P("use") + P("reusable") + P("un
local startmetafun = P("\\start") * metafunenvironment
local stopmetafun = P("\\stop") * metafunenvironment -- todo match start
-local xmlmacro = token("embedded", P("\\xml") * R("az")^1)
+----- subsystem = token("embedded", P("\\xml") * R("az")^1 + (P("\\st") * (P("art") + P("op")) * P("xmlsetups")))
+local subsystemtags = P("xml") + P("btx") -- will be pluggable or maybe even a proper list of valid commands
+local subsystemmacro = P("\\") * (subsystemtags * R("az")^1 + (R("az")-subsystemtags)^1 * subsystemtags * R("az")^1)
+local subsystem = token("embedded", subsystemmacro)
local openargument = token("special", P("{"))
local closeargument = token("special", P("}"))
@@ -446,12 +455,13 @@ contextlexer._rules = {
{ "text", text }, -- non words
{ "comment", comment },
{ "constant", constant },
- { "xmlmacro", xmlmacro },
+ -- { "subsystem", subsystem },
{ "callers", callers },
{ "helper", helper },
{ "command", command },
{ "primitive", primitive },
{ "ifprimitive", ifprimitive },
+ { "subsystem", subsystem },
{ "reserved", reserved },
{ "csname", csname },
-- { "whatever", specialword }, -- not yet, crashes
diff --git a/context/data/scite/context/lexers/scite-context-lexer.lua b/context/data/scite/context/lexers/scite-context-lexer.lua
index 6335af911..4d401ac7b 100644
--- a/context/data/scite/context/lexers/scite-context-lexer.lua
+++ b/context/data/scite/context/lexers/scite-context-lexer.lua
@@ -13,6 +13,8 @@ local info = {
-- todo: make sure we can run in one state .. copies or shared?
-- todo: auto-nesting
+if lpeg.setmaxstack then lpeg.setmaxstack(1000) end
+
local log = false
local trace = false
local detail = false
@@ -200,11 +202,13 @@ local inspect = false -- can save some 15% (maybe easier on scintilla)
-- reload the word lists each time. (In the past I assumed a shared instance and took
-- some precautions.)
+-- todo: make sure we don't overload context definitions when used in context
+
local lpeg = require("lpeg")
local global = _G
local find, gmatch, match, lower, upper, gsub, sub, format = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub, string.sub, string.format
-local concat = table.concat
+local concat, sort = table.concat, table.sort
local type, next, setmetatable, rawset, tonumber, tostring = type, next, setmetatable, rawset, tonumber, tostring
local R, P, S, V, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
local lpegmatch = lpeg.match
@@ -263,7 +267,9 @@ end
local lexers = { }
local context = { }
+local helpers = { }
lexers.context = context
+lexers.helpers = helpers
local patterns = { }
context.patterns = patterns -- todo: lexers.patterns
@@ -279,6 +285,21 @@ if resolvers then
-- todo: set report
end
+local function sortedkeys(hash) -- simple version, good enough for here
+ local t, n = { }, 0
+ for k, v in next, hash do
+ t[#t+1] = k
+ local l = #tostring(k)
+ if l > n then
+ n = l
+ end
+ end
+ sort(t)
+ return t, n
+end
+
+helpers.sortedkeys = sortedkeys
+
local usedlexers = { }
local parent_lexer = nil
@@ -548,19 +569,6 @@ end
context.toproperty = toproperty
context.tostyles = tostyles
-local function sortedkeys(hash)
- local t, n = { }, 0
- for k, v in next, hash do
- t[#t+1] = k
- local l = #tostring(k)
- if l > n then
- n = l
- end
- end
- table.sort(t)
- return t, n
-end
-
-- If we had one instance/state of Lua as well as all regular libraries
-- preloaded we could use the context base libraries. So, let's go poor-
-- mans solution now.
@@ -1834,43 +1842,194 @@ do
context.utfchar = utfchar
- -- a helper from l-lpeg:
+ -- -- the next one is good enough for use here but not perfect (see context for a
+ -- -- better one)
+ --
+ -- local function make(t)
+ -- local p
+ -- for k, v in next, t do
+ -- if not p then
+ -- if next(v) then
+ -- p = P(k) * make(v)
+ -- else
+ -- p = P(k)
+ -- end
+ -- else
+ -- if next(v) then
+ -- p = p + P(k) * make(v)
+ -- else
+ -- p = p + P(k)
+ -- end
+ -- end
+ -- end
+ -- return p
+ -- end
+ --
+ -- function lpeg.utfchartabletopattern(list)
+ -- local tree = { }
+ -- for i=1,#list do
+ -- local t = tree
+ -- for c in gmatch(list[i],".") do
+ -- if not t[c] then
+ -- t[c] = { }
+ -- end
+ -- t = t[c]
+ -- end
+ -- end
+ -- return make(tree)
+ -- end
+
+ helpers.utfcharpattern = P(1) * R("\128\191")^0 -- unchecked but fast
+
+ local p_false = P(false)
+ local p_true = P(true)
local function make(t)
- local p
- for k, v in next, t do
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
+ local function making(t)
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
+ else
+ p = p + P(k) * making(v)
+ end
end
- else
- if next(v) then
- p = p + P(k) * make(v)
+ end
+ if t[""] then
+ p = p + p_true
+ end
+ return p
+ end
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
+ local v = t[k]
+ if v == true then
+ p = p + P(k) * p_true
+ elseif v == false then
+ -- can't happen
else
- p = p + P(k)
+ p = p + P(k) * making(v)
end
end
end
return p
end
- function lpeg.utfchartabletopattern(list)
+ local function collapse(t,x)
+ if type(t) ~= "table" then
+ return t, x
+ else
+ local n = next(t)
+ if n == nil then
+ return t, x
+ elseif next(t,n) == nil then
+ -- one entry
+ local k = n
+ local v = t[k]
+ if type(v) == "table" then
+ return collapse(v,x..k)
+ else
+ return v, x .. k
+ end
+ else
+ local tt = { }
+ for k, v in next, t do
+ local vv, kk = collapse(v,k)
+ tt[kk] = vv
+ end
+ return tt, x
+ end
+ end
+ end
+
+ function helpers.utfchartabletopattern(list)
local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
+ local n = #list
+ if n == 0 then
+ for s in next, list do
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
+ end
+ end
+ else
+ for i=1,n do
+ local s = list[i]
+ local t = tree
+ local p, pk
+ for c in gmatch(s,".") do
+ if t == true then
+ t = { [c] = true, [""] = true }
+ p[pk] = t
+ p = t
+ t = false
+ elseif t == false then
+ t = { [c] = false }
+ p[pk] = t
+ p = t
+ t = false
+ else
+ local tc = t[c]
+ if not tc then
+ tc = false
+ t[c] = false
+ end
+ p = t
+ t = tc
+ end
+ pk = c
+ end
+ if t == false then
+ p[pk] = true
+ elseif t == true then
+ -- okay
+ else
+ t[""] = true
end
- t = t[c]
end
end
+ collapse(tree,"")
+ -- inspect(tree)
return make(tree)
end
- patterns.invisibles = lpeg.utfchartabletopattern {
+ patterns.invisibles = helpers.utfchartabletopattern {
utfchar(0x00A0), -- nbsp
utfchar(0x2000), -- enquad
utfchar(0x2001), -- emquad
@@ -1895,7 +2054,7 @@ do
end
--- The following helpers are not used, partyally replace by other mechanism and
+-- The following helpers are not used, partially replaced by other mechanisms and
-- when needed I'll first optimize them. I only made them somewhat more readable.
function lexers.delimited_range(chars, single_line, no_escape, balanced) -- unchanged
diff --git a/context/data/scite/context/scite-context-data-context.properties b/context/data/scite/context/scite-context-data-context.properties
index 6cd49449c..717b08c80 100644
--- a/context/data/scite/context/scite-context-data-context.properties
+++ b/context/data/scite/context/scite-context-data-context.properties
@@ -51,20 +51,20 @@ stopproduct product startproject stopproject project \
starttext stoptext startnotext stopnotext startdocument \
stopdocument documentvariable setupdocument startmodule stopmodule \
usemodule usetexmodule useluamodule setupmodule currentmoduleparameter \
-moduleparameter everystarttext startTEXpage stopTEXpage enablemode \
-disablemode preventmode definemode globalenablemode globaldisablemode \
-globalpreventmode pushmode popmode typescriptone typescripttwo \
-typescriptthree mathsizesuffix mathordcode mathopcode mathbincode \
-mathrelcode mathopencode mathclosecode mathpunctcode mathalphacode \
-mathinnercode mathnothingcode mathlimopcode mathnolopcode mathboxcode \
-mathchoicecode mathaccentcode mathradicalcode constantnumber constantnumberargument \
-constantdimen constantdimenargument constantemptyargument continueifinputfile luastringsep \
-!!bs !!es lefttorightmark righttoleftmark breakablethinspace \
-nobreakspace nonbreakablespace narrownobreakspace zerowidthnobreakspace ideographicspace \
-ideographichalffillspace twoperemspace threeperemspace fourperemspace fiveperemspace \
-sixperemspace figurespace punctuationspace hairspace zerowidthspace \
-zerowidthnonjoiner zerowidthjoiner zwnj zwj optionalspace \
-asciispacechar
+moduleparameter everystarttext everystoptext startTEXpage stopTEXpage \
+enablemode disablemode preventmode definemode globalenablemode \
+globaldisablemode globalpreventmode pushmode popmode typescriptone \
+typescripttwo typescriptthree mathsizesuffix mathordcode mathopcode \
+mathbincode mathrelcode mathopencode mathclosecode mathpunctcode \
+mathalphacode mathinnercode mathnothingcode mathlimopcode mathnolopcode \
+mathboxcode mathchoicecode mathaccentcode mathradicalcode constantnumber \
+constantnumberargument constantdimen constantdimenargument constantemptyargument continueifinputfile \
+luastringsep !!bs !!es lefttorightmark righttoleftmark \
+breakablethinspace nobreakspace nonbreakablespace narrownobreakspace zerowidthnobreakspace \
+ideographicspace ideographichalffillspace twoperemspace threeperemspace fourperemspace \
+fiveperemspace sixperemspace figurespace punctuationspace hairspace \
+zerowidthspace zerowidthnonjoiner zerowidthjoiner zwnj zwj \
+optionalspace asciispacechar
keywordclass.context.helpers=\
startsetups stopsetups startxmlsetups stopxmlsetups \
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 9e127cb09..33b6aa1e8 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -68,6 +68,7 @@ local attributes = attributes
local variables = interfaces.variables
local v_yes = variables.yes
local v_no = variables.no
+local v_hidden = variables.hidden
local implement = interfaces.implement
@@ -202,6 +203,8 @@ local p_attribute = lpeg.replacer(attribentities)
local p_stripper = lpeg.patterns.stripper
local p_escaped = lpeg.patterns.xml.escaped
+local f_tagid = formatters["%s-%04i"]
+
-- local alignmapping = {
-- flushright = "right",
-- middle = "center",
@@ -1002,10 +1005,10 @@ end
do
- local automathrows = true directives.register("backend.export.math.autorows", function(v) automathrows = v end)
- local automathapply = true directives.register("backend.export.math.autoapply", function(v) automathapply = v end)
- local automathnumber = true directives.register("backend.export.math.autonumber", function(v) automathnumber = v end)
- local automathstrip = true directives.register("backend.export.math.autostrip", function(v) automathstrip = v end)
+ local automathrows = true directives.register("export.math.autorows", function(v) automathrows = v end)
+ local automathapply = true directives.register("export.math.autoapply", function(v) automathapply = v end)
+ local automathnumber = true directives.register("export.math.autonumber", function(v) automathnumber = v end)
+ local automathstrip = true directives.register("export.math.autostrip", function(v) automathstrip = v end)
local functions = mathematics.categories.functions
@@ -1838,7 +1841,7 @@ do
result[#result+1] = "</p>\n"
end
- local function emptytag(result,element,nature,di) -- currently only break but at some point
+ local function emptytag(result,embedded,element,nature,di) -- currently only break but at some point
local a = di.attributes -- we might add detail etc
if a then -- happens seldom
if nature == "display" then
@@ -1859,7 +1862,7 @@ do
end
end
- local function begintag(result,element,nature,di,skip)
+ local function begintag(result,embedded,element,nature,di,skip)
local index = di.n
local fulltag = di.fulltag
local specification = specifications[fulltag] or { } -- we can have a dummy
@@ -1882,6 +1885,13 @@ do
elseif skip then
-- ignore
else
+
+ -- if embedded then
+ -- if element == "math" then
+ -- embedded[f_tagid(element,index)] = #result+1
+ -- end
+ -- end
+
local n = 0
local r = { } -- delay this
if detail then
@@ -1991,7 +2001,7 @@ do
end
end
- local function endtag(result,element,nature,di,skip)
+ local function endtag(result,embedded,element,nature,di,skip)
if skip == "comment" then
if show_comment then
if nature == "display" and (inline == 0 or inline == 1) then
@@ -2022,10 +2032,18 @@ do
inline = inline - 1
result[#result+1] = f_end_inline(namespaced[element])
end
+
+ -- if embedded then
+ -- if element == "math" then
+ -- local id = f_tagid(element,di.n) -- index)
+ -- local tx = concat(result,"",embedded[id],#result)
+ -- embedded[id] = "<?xml version='1.0' standalone='yes'?>" .. "\n" .. tx
+ -- end
+ -- end
end
end
- local function flushtree(result,data,nature)
+ local function flushtree(result,embedded,data,nature)
local nofdata = #data
for i=1,nofdata do
local di = data[i]
@@ -2033,7 +2051,7 @@ do
-- whatever
else
local content = di.content
--- also optimize for content == "" : trace that first
+ -- also optimize for content == "" : trace that first
if content then
-- already has breaks
local content = lpegmatch(p_entity,content)
@@ -2057,23 +2075,23 @@ do
if not element then
-- skip
elseif element == "break" then -- or element == "pagebreak"
- emptytag(result,element,nature,di)
+ emptytag(result,embedded,element,nature,di)
elseif element == "" or di.skip == "ignore" then
-- skip
else
if di.before then
- flushtree(result,di.before,nature)
+ flushtree(result,embedded,di.before,nature)
end
local natu = di.nature
local skip = di.skip
if di.breaknode then
- emptytag(result,"break","display",di)
+ emptytag(result,embedded,"break","display",di)
end
- begintag(result,element,natu,di,skip)
- flushtree(result,di.data,natu)
- endtag(result,element,natu,di,skip)
+ begintag(result,embedded,element,natu,di,skip)
+ flushtree(result,embedded,di.data,natu)
+ endtag(result,embedded,element,natu,di,skip)
if di.after then
- flushtree(result,di.after,nature)
+ flushtree(result,embedded,di.after,nature)
end
end
end
@@ -2976,14 +2994,15 @@ local htmltemplate = [[
return concat(result,"\n\n")
end
- local function allcontent(tree)
- local result = { }
- flushtree(result,tree.data,"display") -- we need to collect images
+ local function allcontent(tree,embed)
+ local result = { }
+ local embedded = embed and { }
+ flushtree(result,embedded,tree.data,"display") -- we need to collect images
result = concat(result)
-- no need to lpeg .. fast enough
result = gsub(result,"\n *\n","\n")
result = gsub(result,"\n +([^< ])","\n%1")
- return result
+ return result, embedded
end
-- local xhtmlpreamble = [[
@@ -3217,6 +3236,9 @@ local htmltemplate = [[
local addsuffix = file.addsuffix
local joinfile = file.join
+ local embedfile = false directives.register("export.embed",function(v) embedfile = v end)
+ local embedmath = false
+
local function stopexport(v)
starttiming(treehash)
@@ -3359,9 +3381,39 @@ local htmltemplate = [[
end
end
- local result = allcontent(tree)
+ local result, embedded = allcontent(tree,embedmath) -- embedfile is for testing
- local results = concat {
+ local attach = backends.nodeinjections.attachfile
+
+ if embedfile and attach then
+ -- only for testing
+ attach {
+ data = concat{ wholepreamble(true), result },
+ name = file.basename(xmlfilename),
+ registered = "export",
+ title = "raw xml export",
+ method = v_hidden,
+ mimetype = "application/mathml+xml",
+ }
+ end
+ -- if embedmath and attach then
+ -- local refs = { }
+ -- for k, v in sortedhash(embedded) do
+ -- attach {
+ -- data = v,
+ -- file = file.basename(k),
+ -- name = file.addsuffix(k,"xml"),
+ -- registered = k,
+ -- reference = k,
+ -- title = "xml export snippet: " .. k,
+ -- method = v_hidden,
+ -- mimetype = "application/mathml+xml",
+ -- }
+ -- refs[k] = 0
+ -- end
+ -- end
+
+ result = concat {
wholepreamble(true),
x_styles, -- adds to files
result,
@@ -3372,7 +3424,7 @@ local htmltemplate = [[
-- we're now ready for saving the result in the xml file
report_export("saving xml data in %a",xmlfilename)
- io.savedata(xmlfilename,results)
+ io.savedata(xmlfilename,result)
report_export("saving css image definitions in %a",imagefilename)
io.savedata(imagefilename,wrapups.allusedimages(basename))
@@ -3387,7 +3439,7 @@ local htmltemplate = [[
report_export("saving xhtml variant in %a",xhtmlfilename)
- local xmltree = cleanxhtmltree(xml.convert(results))
+ local xmltree = cleanxhtmltree(xml.convert(result))
xml.save(xmltree,xhtmlfilename)
@@ -3465,8 +3517,6 @@ local htmltemplate = [[
end
end
-
-
local function startexport(v)
if v and not exporting then
report_export("enabling export to xml")
@@ -3477,8 +3527,15 @@ local htmltemplate = [[
enableaction("math", "noads.handlers.tags")
-- appendaction("finalizers","lists","builders.paragraphs.tag")
-- enableaction("finalizers","builders.paragraphs.tag")
- luatex.registerstopactions(function() stopexport(v) end)
- exporting = true
+ luatex.registerstopactions(structurestags.finishexport)
+ exporting = v
+ end
+ end
+
+ function structurestags.finishexport()
+ if exporting then
+ stopexport(exporting)
+ exporting = false
end
end
@@ -3516,6 +3573,11 @@ implement {
}
implement {
+ name = "finishexport",
+ actions = structurestags.finishexport,
+}
+
+implement {
name = "settagitemgroup",
actions = structurestags.setitemgroup,
arguments = { "boolean", "integer", "string" }
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index bddb6de4d..a4ebe38aa 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -240,6 +240,12 @@
\doifsomething{\backendparameter\c!export}\dosynchronizeexport
\to \everystarttext
+% better (before pdf gets closed, so we can embed), but it needs testing:
+
+\appendtoks
+ \clf_finishexport
+\to \everystoptext
+
\appendtoks
\doifsomething{\backendparameter\c!export}\dosynchronizeexport % in case it is done inside \starttext
\to \everysetupdocument
diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua
index c6192cc9b..4559fa28c 100644
--- a/tex/context/base/char-ini.lua
+++ b/tex/context/base/char-ini.lua
@@ -828,6 +828,9 @@ local categories = allocate() characters.categories = categories -- lazy table
setmetatableindex(categories, function(t,u) if u then local c = data[u] c = c and c.category or u t[u] = c return c end end)
+-- todo: overloads (these register directly in the tables as number and string) e.g. for greek
+-- todo: for string do a numeric lookup in the table itself
+
local lccodes = allocate() characters.lccodes = lccodes -- lazy table
local uccodes = allocate() characters.uccodes = uccodes -- lazy table
local shcodes = allocate() characters.shcodes = shcodes -- lazy table
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index 9ed861be0..381602ede 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -478,11 +478,15 @@ function utffilters.addgrapheme(result,first,second) -- can be U+ 0x string or u
end
end
-interfaces.implement {
- name = "addgrapheme",
- actions = utffilters.addgrapheme,
- arguments = { "string", "string", "string" }
-}
+if interfaces then -- eventually this goes to char-ctx.lua
+
+ interfaces.implement {
+ name = "addgrapheme",
+ actions = utffilters.addgrapheme,
+ arguments = { "string", "string", "string" }
+ }
+
+end
-- --
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index fc87c4863..6cb826614 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2015.05.04 19:00}
+\newcontextversion{2015.05.09 13:41}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 298f490b5..39bde2fe6 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 9a2ae1bde..59e7faead 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2015.05.04 19:00}
+\edef\contextversion{2015.05.09 13:41}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index 346dec89e..a4b747680 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -38,7 +38,7 @@ local report_passes = logs.reporter("job","passes")
job = job or { }
local job = job
-job.version = 1.29
+job.version = 1.30
job.packversion = 1.02
-- some day we will implement loading of other jobs and then we need
diff --git a/tex/context/base/font-odv.lua b/tex/context/base/font-odv.lua
index ca1b9ddf0..2ef1aabe7 100644
--- a/tex/context/base/font-odv.lua
+++ b/tex/context/base/font-odv.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['font-odv'] = {
license = "see context related readme files"
}
+-- One day I'll speed this up ... char swapping and properties.
+
-- A few remarks:
--
-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
@@ -155,18 +157,26 @@ replace_all_nbsp = function(head) -- delayed definition
return replace_all_nbsp(head)
end
-local fontprocesses = fonts.hashes.processes
local xprocesscharacters = nil
-xprocesscharacters = function(head,font)
- xprocesscharacters = nodes.handlers.nodepass
- return xprocesscharacters(head,font)
+if context then
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.characters
+ return xprocesscharacters(head,font)
+ end
+else
+ xprocesscharacters = function(head,font)
+ xprocesscharacters = nodes.handlers.nodepass -- generic
+ return xprocesscharacters(head,font)
+ end
end
local function processcharacters(head,font)
return tonut(xprocesscharacters(tonode(head)))
end
+-- local fontprocesses = fonts.hashes.processes
+--
-- function processcharacters(head,font)
-- local processors = fontprocesses[font]
-- for i=1,#processors do
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index dbe4d3f1e..79ccfe075 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -90,9 +90,23 @@ local lasttaginchain = structurestags.lastinchain
local usedmapping = { }
--- local tagsplitter = structurestags.patterns.splitter
+----- tagsplitter = structurestags.patterns.splitter
-local add_ids = false -- true
+-- local embeddedtags = false -- true will id all, for tracing
+-- local f_tagid = formatters["%s-%04i"]
+-- local embeddedfilelist = pdfarray() -- /AF crap
+--
+-- directives.register("structures.tags.embedmath",function(v)
+-- if not v then
+-- -- only enable
+-- elseif embeddedtags == true then
+-- -- already all tagged
+-- elseif embeddedtags then
+-- embeddedtags.math = true
+-- else
+-- embeddedtags = { math = true }
+-- end
+-- end)
-- function codeinjections.maptag(original,target,kind)
-- mapping[original] = { target, kind or "inline" }
@@ -110,7 +124,7 @@ local function finishstructure()
}
-- we need to split names into smaller parts (e.g. alphabetic or so)
-- we already have code for that somewhere
- if add_ids then
+ if #names > 0 then
local kids = pdfdictionary {
Limits = pdfarray { names[1], names[#names-1] },
Names = names,
@@ -130,7 +144,7 @@ local function finishstructure()
Type = pdfconstant("StructTreeRoot"),
K = pdfreference(pdfflushobject(structure_kids)),
ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)),
- IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil,
+ IDTree = #names > 0 and pdfreference(pdfflushobject(idtree)) or nil,
RoleMap = rolemap,
}
pdfflushobject(structure_ref,structuretree)
@@ -140,6 +154,7 @@ local function finishstructure()
Marked = pdfboolean(true),
-- UserProperties = pdfboolean(true),
-- Suspects = pdfboolean(true),
+ -- AF = #embeddedfilelist > 0 and pdfreference(pdfflushobject(embeddedfilelist)) or nil,
}
addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
--
@@ -204,13 +219,31 @@ local function makeelement(fulltag,parent)
--
usedmapping[tag] = true
--
+ -- specification.attribute is unique
+ --
+ local id = nil
+ -- local af = nil
+ -- if embeddedtags then
+ -- local tagname = specification.tagname
+ -- local tagindex = specification.tagindex
+ -- if embeddedtags == true or embeddedtags[tagname] then
+ -- id = f_tagid(tagname,tagindex)
+ -- af = job.fileobjreferences.collected[id]
+ -- if af then
+ -- local r = pdfreference(af)
+ -- af = pdfarray { r }
+ -- -- embeddedfilelist[#embeddedfilelist+1] = r
+ -- end
+ -- end
+ -- end
+ --
local k = pdfarray()
local r = pdfreserveobject()
local t = usedlabels[tag] or tag
local d = pdfdictionary {
Type = pdf_struct_element,
S = pdfconstant(t),
- ID = (add_ids and fulltag) or nil,
+ ID = id,
T = detail and detail or nil,
P = parent.pref,
Pg = pageref,
@@ -218,15 +251,22 @@ local function makeelement(fulltag,parent)
A = a and makeattribute(a) or nil,
-- Alt = " Who cares ",
-- ActualText = " Hi Hans ",
+ AF = af,
}
local s = pdfreference(pdfflushobject(d))
- if add_ids then
- names[#names+1] = fulltag
+ if id then
+ names[#names+1] = id
names[#names+1] = s
end
local kids = parent.kids
kids[#kids+1] = s
- local e = { tag = t, pref = s, kids = k, knum = r, pnum = pagenum }
+ local e = {
+ tag = t,
+ pref = s,
+ kids = k,
+ knum = r,
+ pnum = pagenum
+ }
elements[fulltag] = e
return e
end
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index 895bbd3ff..22971c2b7 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -177,10 +177,27 @@ local function analyzetransparency(transparencyvalue)
end
-- Attachments
+local nofattachments = 0
+local attachments = { }
+local filestreams = { }
+local referenced = { }
+local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+local tobesavedobjrefs = utilities.storage.allocate()
+local collectedobjrefs = utilities.storage.allocate()
+
+local fileobjreferences = {
+ collected = collectedobjrefs,
+ tobesaved = tobesavedobjrefs,
+}
+
+job.fileobjreferences = fileobjreferences
-local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { }
+local function initializer()
+ collectedobjrefs = job.fileobjreferences.collected or { }
+ tobesavedobjrefs = job.fileobjreferences.tobesaved or { }
+end
-local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+job.register('job.fileobjreferences.collected', tobesavedobjrefs, initializer)
local function flushembeddedfiles()
if next(filestreams) then
@@ -209,6 +226,7 @@ function codeinjections.embedfile(specification)
local hash = specification.hash or filename
local keepdir = specification.keepdir -- can change
local usedname = specification.usedname
+ local filetype = specification.filetype
if filename == "" then
filename = nil
end
@@ -246,11 +264,20 @@ function codeinjections.embedfile(specification)
end
end
end
- usedname = usedname ~= "" and usedname or filename
+ -- needs to cleaned up:
+ usedname = usedname ~= "" and usedname or filename or name
local basename = keepdir == true and usedname or file.basename(usedname)
-local basename = gsub(basename,"%./","")
- local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file
- local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") }
+ local basename = gsub(basename,"%./","")
+ local savename = name ~= "" and name or basename
+ if not filetype or filetype == "" then
+ filetype = name and (filename and file.suffix(filename)) or "txt"
+ end
+ savename = file.addsuffix(savename,filetype) -- type is mandate for proper working in viewer
+ local mimetype = specification.mimetype
+ local a = pdfdictionary {
+ Type = pdfconstant("EmbeddedFile"),
+ Subtype = mimetype and mimetype ~= "" and pdfconstant(mimetype) or nil,
+ }
local f
if data then
f = pdfflushstreamobject(data,a)
@@ -265,6 +292,7 @@ local basename = gsub(basename,"%./","")
UF = pdfstring(savename),
EF = pdfdictionary { F = pdfreference(f) },
Desc = title ~= "" and pdfunicode(title) or nil,
+ -- AFRelationship = pdfconstant("Source"), -- some day maybe, not mandate
}
local r = pdfreference(pdfflushobject(d))
filestreams[hash] = r
@@ -318,6 +346,10 @@ function nodeinjections.attachfile(specification)
aref = codeinjections.embedfile(specification)
attachments[registered] = aref
end
+ local reference = specification.reference
+ if reference and aref then
+ tobesavedobjrefs[reference] = aref[1]
+ end
if not aref then
report_attachment("skipping attachment, registered %a",registered)
-- already reported
diff --git a/tex/context/base/lxml-ini.lua b/tex/context/base/lxml-ini.lua
index 2f63c857f..5879de3a4 100644
--- a/tex/context/base/lxml-ini.lua
+++ b/tex/context/base/lxml-ini.lua
@@ -46,6 +46,7 @@ implement { name = "xmlattribute", actions = lxml.attribute,
implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
+implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index 5ef4245a2..ac5a63f88 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -40,6 +40,7 @@
%def\xmlattributedef #1#2#3#4{\clf_xmlattributedef {#1}{#2}{#3}{#4}}
%def\xmlchainatt #1#2{\clf_xmlchainatt {#1}{#2}}
%def\xmlchainattdef #1#2#3{\clf_xmlchainattdef {#1}{#2}{#3}}
+%def\xmlrefatt #1#2{\clf_xmlrefatt {#1}{#2}}
%def\xmlchecknamespace #1#2#3{\clf_xmlchecknamespace {#1}{#2}{#3}} % element
%def\xmlcommand #1#2#3{\clf_xmlcommand {#1}{#2}{#3}}
\def\xmlconcat #1#2#3{\clf_xmlconcat {#1}{#2}{\detokenize{#3}}}
@@ -110,6 +111,7 @@
\let\xmlattributedef \clf_xmlattributedef
\let\xmlchainatt \clf_xmlchainatt
\let\xmlchainattdef \clf_xmlchainattdef
+\let\xmlrefatt \clf_xmlrefatt
\let\xmlchecknamespace \clf_xmlchecknamespace
\let\xmlcommand \clf_xmlcommand
% \xmlconcat
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 9e540fe7c..550a06a18 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -1597,6 +1597,22 @@ function lxml.att(id,a,default)
end
end
+function lxml.refatt(id,a)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ local str = at[a]
+ if str and str ~= "" then
+ str = gsub(str,"^#+","")
+ if str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ end
+ end
+ end
+end
+
function lxml.name(id) -- or remapped name? -> lxml.info, combine
local e = getid(id)
if e then
diff --git a/tex/context/base/m-visual.mkiv b/tex/context/base/m-visual.mkiv
index 3c57691ec..d50215966 100644
--- a/tex/context/base/m-visual.mkiv
+++ b/tex/context/base/m-visual.mkiv
@@ -192,8 +192,8 @@
{\dimen0\zeropoint
\getrandomcount\scratchcounter{3}{6}%
\dorecurse\scratchcounter
- {\getrandomdimen\scratchdimen{1em}{3em}%
- \mathinner{\red\fakerule\scratchdimen}%
+ {\getrandomdimen\scratchdimen{0.5em}{1.5em}%
+ \mathord{\red\fakerule\scratchdimen}%
\ifnum\recurselevel<\scratchcounter+\fi
\advance\scratchdimen\dimen0}%
=\mathinner{\red\fakerule\scratchdimen}}
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 88159e107..e6a35c39e 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -655,24 +655,27 @@ local noffunctions = 1000 -- offset
categories.functions = functions
implement {
- name = "taggedmathfunction",
- arguments = { "string", "string", "string" },
- actions = function(tag,label,apply)
- local delta = toboolean(apply) and 1000 or 0
- if toboolean(label) then
- local n = functions[tag]
- if not n then
- noffunctions = noffunctions + 1
- functions[noffunctions] = tag
- functions[tag] = noffunctions
- texsetattribute(a_mathcategory,noffunctions + delta)
- else
- texsetattribute(a_mathcategory,n + delta)
- end
- context.mathlabeltext(tag)
+ name = "tagmfunctiontxt",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
+ texsetattribute(a_mathcategory,1000 + delta)
+ end
+}
+
+implement {
+ name = "tagmfunctionlab",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
+ local n = functions[tag]
+ if not n then
+ noffunctions = noffunctions + 1
+ functions[noffunctions] = tag
+ functions[tag] = noffunctions
+ texsetattribute(a_mathcategory,noffunctions + delta)
else
- texsetattribute(a_mathcategory,1000 + delta)
- context(tag)
+ texsetattribute(a_mathcategory,n + delta)
end
end
}
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index 2d6090c03..0607f008d 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -184,12 +184,103 @@
\definemathematics[\v!default] % not needed, but nicer when nesting back to normal
-% Normally this is applied to only one character.
+% Now we redefine \type {\mathematics} and \type {\m}:
+
+\unexpanded\def\mathematics
+ {\doifelsenextoptionalcs\math_m_yes\math_m_nop}
+
+\def\math_m_yes[#1]#2%
+ {\relax
+ \ifmmode
+ #2%
+ \else
+ \normalstartimath
+ \edef\currentmathematics{#1}% check for valid
+ \the\everyswitchmathematics\relax
+ #2%
+ \normalstopimath
+ \fi}
+
+\def\math_m_nop#1%
+ {\relax
+ \ifmmode
+ #1%
+ \else
+ \normalstartimath
+ #1%
+ \normalstopimath
+ \fi}
+
+\let\m\mathematics
+
+\newmuskip\defaultthickmuskip \defaultthickmuskip 5mu plus 5mu
+\newmuskip\defaultmedmuskip \defaultmedmuskip 4mu plus 2mu minus 4mu
+\newmuskip\defaultthinmuskip \defaultthinmuskip 3mu
+
+\newmuskip\halfthickmuskip \halfthickmuskip 2.5mu plus 2.5mu
+\newmuskip\halfmedmuskip \halfmedmuskip 2.0mu plus 1.0mu minus 2.0mu
+\newmuskip\halfthinmuskip \halfthinmuskip 1.5mu
+
+\newcount \defaultrelpenalty \defaultrelpenalty 500
+\newcount \defaultbinoppenalty \defaultbinoppenalty 700
+
+
+\startsetups math:spacing:default
+ \thickmuskip \defaultthickmuskip
+ \medmuskip \defaultmedmuskip
+ \thinmuskip \defaultthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:half
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:tight
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \maxdimen
+\stopsetups
+
+\startsetups math:spacing:fixed
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \maxdimen
+ \binoppenalty \maxdimen
+\stopsetups
+
+% \dorecurse{80}{test \m[i:tight]{\red \fakeformula} test }
+
+\definemathematics[i:default][\c!setups=math:spacing:equal]
+\definemathematics[i:half] [\c!setups=math:spacing:half]
+\definemathematics[i:tight] [\c!setups=math:spacing:tight]
+\definemathematics[i:fixed] [\c!setups=math:spacing:fixed]
+
+% Normally the next is applied to only one character.
%
% $ABC$ $\cal ABC$ $\mathaltcal ABC$
% todo: only in mmode
-
% these commands are semi-public but should not be used directly (lua names wil change)
\unexpanded\def\math_set_attribute #1#2{\ifmmode\clf_setmathattribute{#1}{#2}\fi}
@@ -524,19 +615,32 @@
\unexpanded\def\math_tags_mn#1{\begingroup\mathupright#1\endgroup}
\unexpanded\def\math_tags_ms#1{\begingroup\mathupright#1\endgroup}
-\unexpanded\def\mfunction #1{{\mathupright\math_tags_function{#1}}}
-\unexpanded\def\mfunctionlabeltext#1{{\mathupright\math_tags_functionlabeltext{#1}}}
-
% Once this is stable we can store the number at the tex end which is
% faster. Functions getnumbers >= 1000.
-\expanded\def\math_tags_mathfunction_indeed #1{\clf_taggedmathfunction{#1}{false}{\ifconditional\c_apply_function true\else false\fi}}
-\expanded\def\math_tags_mathfunctionlabeltext_indeed#1{\clf_taggedmathfunction{#1} {true}{\ifconditional\c_apply_function true\else false\fi}}
+\setupmathematics
+ [\c!functionstyle=\mr,
+ \c!functioncolor=]
+
+\unexpanded\def\mfunction#1%
+ {\begingroup
+ \math_tags_mfunctiontxt{#1}\c_apply_function
+ \mathoptext{\usemathematicsstyleandcolor\c!functionstyle\c!functioncolor#1}%
+ \endgroup}
+
+\unexpanded\def\mfunctionlabeltext#1%
+ {\begingroup
+ \math_tags_mfunctionlab{#1}\c_apply_function
+ \mathoptext{\usemathematicsstyleandcolor\c!functionstyle\c!functioncolor\mathlabeltext{#1}}%
+ \endgroup}
+
+\let\math_tags_mfunctiontxt\gobbletwoarguments
+\let\math_tags_mfunctionlab\gobbletwoarguments
-\expanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
-\expanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
-\expanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
-\expanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup}
+\unexpanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
+\unexpanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
+\unexpanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
+\unexpanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup} % todo: mathoptext
\newconditional\c_apply_function
@@ -551,12 +655,12 @@
\endgroup}
\appendtoks
- \let\math_tags_function \math_tags_mathfunction_indeed
- \let\math_tags_functionlabeltext\math_tags_mathfunctionlabeltext_indeed
- \let\math_tags_mo \math_tags_mo_indeed
- \let\math_tags_mi \math_tags_mi_indeed
- \let\math_tags_mn \math_tags_mn_indeed
- \let\math_tags_ms \math_tags_ms_indeed
+ \let\math_tags_mfunctiontxt\clf_tagmfunctiontxt
+ \let\math_tags_mfunctionlab\clf_tagmfunctionlab
+ \let\math_tags_mo \math_tags_mo_indeed
+ \let\math_tags_mi \math_tags_mi_indeed
+ \let\math_tags_mn \math_tags_mn_indeed
+ \let\math_tags_ms \math_tags_ms_indeed
\to \everyenableelements
\appendtoks
@@ -570,21 +674,21 @@
% \def\mlimitsfunction #1{\mathlimopcomm{{\mr#1}}
% \def\mnolimitsfunction#1{\mathnolopcomm{{\mr#1}}
-%D Taco posted this solution as response to a mail by Olivier, so let's integrate
-%D it here.
-
-\def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
-
-\unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
-\unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
-\unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
-
-\unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
- {\doifsomething{#1}
- {\def\currentmscaledstyle{#1}%
- \let\mathopnolimits \math_function_style_opnolimits
- \let\mfunction \math_function_style_mfunction
- \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
+% %D Taco posted this solution as response to a mail by Olivier, so let's integrate
+% %D it here.
+%
+% \def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
+%
+% \unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
+% \unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
+% \unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
+%
+% \unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
+% {\doifsomething{#1}
+% {\def\currentmscaledstyle{#1}%
+% \let\mathopnolimits \math_function_style_opnolimits
+% \let\mfunction \math_function_style_mfunction
+% \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
\unexpanded\def\mscaledtext#1%
{\mathchoice
@@ -593,6 +697,9 @@
{\hbox{\csname\currentmscaledstyle\endcsname\tfx #1}}
{\hbox{\csname\currentmscaledstyle\endcsname\tfxx#1}}}
+\unexpanded\def\setmathfunctionstyle#1%
+ {\setupmathematics[\c!functionstyle=#1]} % for old times sake
+
%D We can force the way functions are typeset by manipulating the text option:
%D
%D \starttyping
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 607f76a16..cea2a6a6a 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -34,6 +34,11 @@
% start todo in mult-def.lua:
+\def\c!functionstyle {functionstyle}
+\def\c!functioncolor {functioncolor}
+
+\def\v!extremestretch {extremestretch}
+
\def\v!alphabetic {alphabetic}
\def\v!Alphabetic {Alphabetic}
@@ -152,6 +157,7 @@
\def\c!etallimit {etallimit}
\def\c!etaldisplay{etaldisplay}
\def\c!etaltext {etaltext}
+\def\c!etaloption {etaloption}
\ifdefined\v!simplelist\else \def\v!simplelist{simplelist} \fi
\ifdefined\v!sorting \else \def\v!sorting {sorting} \fi
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index 1ad0f9ebb..9a05e59d9 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -102,7 +102,7 @@ return {
"startproject", "stopproject", "project",
"starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
"startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule","setupmodule","currentmoduleparameter","moduleparameter",
- "everystarttext",
+ "everystarttext", "everystoptext",
--
"startTEXpage", "stopTEXpage",
-- "startMPpage", "stopMPpage", -- already catched by nested lexer
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
index 4ab8dd4bc..4c95d0025 100644
--- a/tex/context/base/publ-aut.lua
+++ b/tex/context/base/publ-aut.lua
@@ -21,6 +21,7 @@ local formatters = string.formatters
local P, S, C, V, Cs, Ct, Cg, Cf, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Cc
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local settings_to_hash = utilities.parsers.settings_to_hash
local context = context
----- commands = commands
@@ -42,6 +43,8 @@ local trace_hashing = false trackers.register("publications.authorhash", func
local report = logs.reporter("publications","authors")
local report_cite = logs.reporter("publications","cite")
+local v_last = interfaces.variables.last
+
-- local function makesplitter(separator)
-- return Ct { "start",
-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
@@ -279,11 +282,18 @@ local function splitauthorstring(str)
return { authors } -- we assume one author
end
+ -- we could cache these too but it can become messy .. leave that for later
- local authors = lpegmatch(andsplitter,str)
- for i=1,#authors do
+ local authors = lpegmatch(andsplitter,str)
+ local nofauthors = #authors
+ for i=1,nofauthors do
authors[i] = splitauthor(authors[i])
end
+ if nofauthors > 1 and authors[nofauthors].original == "others" then
+ -- only the last one is looked at
+ authors[nofauthors] = nil
+ authors.others = true
+ end
return authors
end
@@ -396,8 +406,11 @@ local function btxauthor(dataset,tag,field,settings)
return
-- error
end
+ local absmax = max
local etallimit = tonumber(settings.etallimit) or 1000
local etaldisplay = tonumber(settings.etaldisplay) or etallimit
+ local etaloption = settings_to_hash(settings.etaloption or "")
+ local etallast = etaloption[v_last]
local combiner = settings.combiner
local symbol = settings.symbol
local index = settings.index
@@ -408,16 +421,22 @@ local function btxauthor(dataset,tag,field,settings)
symbol = "."
end
local ctx_btxsetup = settings.kind == "cite" and ctx_btxciteauthorsetup or ctx_btxlistauthorsetup
- if max > etallimit and etaldisplay < max then
+ if max > etallimit and (etaldisplay+(etallast and 1 or 0)) < max then
max = etaldisplay
+ else
+ etallast = false
end
currentauthordata = split
currentauthorsymbol = symbol
- local function oneauthor(i)
+ local function oneauthor(i,last,justone)
local author = split[i]
if index then
ctx_btxstartauthor(i,1,0)
+ elseif last then
+ ctx_btxstartauthor(i,1,0)
+ ctx_btxsetconcat(0)
+ ctx_btxsetauthorvariant(combiner)
else
local state = author.state or 0
ctx_btxstartauthor(i,max,state)
@@ -445,9 +464,13 @@ local function btxauthor(dataset,tag,field,settings)
ctx_btxsetjuniors() -- (concat(juniors," "))
end
if not index and i == max then
- local overflow = #split - max
- if overflow > 0 then
- ctx_btxsetoverflow(overflow)
+ if split.others then
+ ctx_btxsetoverflow(1)
+ else
+ local overflow = #split - max
+ if overflow > 0 then
+ ctx_btxsetoverflow(overflow)
+ end
end
end
ctx_btxsetup(combiner)
@@ -455,10 +478,15 @@ local function btxauthor(dataset,tag,field,settings)
end
if index then
oneauthor(index)
+ elseif max == 1 then
+ oneauthor(1,false,true)
else
for i=1,max do
oneauthor(i)
end
+ if etallast then
+ oneauthor(absmax,true)
+ end
end
else
report("ignored field %a of tag %a, used field %a is no author",field,tag,usedfield)
@@ -483,6 +511,7 @@ implement {
{ "kind" },
{ "etallimit" },
{ "etaldisplay" },
+ { "etaloption" },
{ "symbol" },
}
}
@@ -564,23 +593,27 @@ publications.authorhashers = authorhashers
-- todo: some hashing
local function name(authors)
- local n = #authors
- if n == 0 then
- return ""
- end
- local result = { }
- local nofresult = 0
- for i=1,n do
- local author = authors[i]
- local surnames = author.surnames
- if surnames and #surnames > 0 then
- for j=1,#surnames do
- nofresult = nofresult + 1
- result[nofresult] = surnames[j]
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local surnames = author.surnames
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
end
end
+ return concat(result," ")
+ else
+ return authors
end
- return concat(result," ")
end
table.setmetatableindex(authorhashers,function(t,k)
@@ -589,86 +622,94 @@ table.setmetatableindex(authorhashers,function(t,k)
end)
authorhashers.normal = function(authors)
- local n = #authors
- if n == 0 then
- return ""
- end
- local result = { }
- local nofresult = 0
- for i=1,n do
- local author = authors[i]
- local vons = author.vons
- local surnames = author.surnames
- local firstnames = author.firstnames
- local juniors = author.juniors
- if vons and #vons > 0 then
- for j=1,#vons do
- nofresult = nofresult + 1
- result[nofresult] = vons[j]
- end
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
end
- if surnames and #surnames > 0 then
- for j=1,#surnames do
- nofresult = nofresult + 1
- result[nofresult] = surnames[j]
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
end
- end
- if firstnames and #firstnames > 0 then
- for j=1,#firstnames do
- nofresult = nofresult + 1
- result[nofresult] = firstnames[j]
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
end
- end
- if juniors and #juniors > 0 then
- for j=1,#juniors do
- nofresult = nofresult + 1
- result[nofresult] = juniors[j]
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
end
end
+ return concat(result," ")
+ else
+ return authors
end
- return concat(result," ")
end
authorhashers.normalshort = function(authors)
- local n = #authors
- if n == 0 then
- return ""
- end
- local result = { }
- local nofresult = 0
- for i=1,n do
- local author = authors[i]
- local vons = author.vons
- local surnames = author.surnames
- local initials = author.initials
- local juniors = author.juniors
- if vons and #vons > 0 then
- for j=1,#vons do
- nofresult = nofresult + 1
- result[nofresult] = vons[j]
- end
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
end
- if surnames and #surnames > 0 then
- for j=1,#surnames do
- nofresult = nofresult + 1
- result[nofresult] = surnames[j]
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
end
- end
- if initials and #initials > 0 then
- initials = the_initials(initials)
- for j=1,#initials do
- nofresult = nofresult + 1
- result[nofresult] = initials[j]
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
end
- end
- if juniors and #juniors > 0 then
- for j=1,#juniors do
- nofresult = nofresult + 1
- result[nofresult] = juniors[j]
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
end
end
+ return concat(result," ")
+ else
+ return authors
end
- return concat(result," ")
end
authorhashers.normalinverted = authorhashers.normal
@@ -684,41 +725,45 @@ authorhashers.short = function(authors)
-- a short is a real dumb hardcodes kind of tag and we only support
-- this one because some users might expect it, not because it makes
-- sense
- local n = #authors
- if n == 0 then
- return "unk"
- elseif n == 1 then
- local surnames = authors[1].surnames
- if not surnames or #surnames == 0 then
- return "err"
- else
- local s = surnames[1]
- local c = lpegmatch(p_clean,s)
- if s ~= c then
- report_cite("name %a cleaned to %a for short construction",s,c)
- end
- return utfsub(c,1,3)
- end
- else
- local t = { }
- for i=1,n do
- if i > 3 then
- t[#t+1] = "+" -- indeed
- break
- end
- local surnames = authors[i].surnames
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return "unk"
+ elseif n == 1 then
+ local surnames = authors[1].surnames
if not surnames or #surnames == 0 then
- t[#t+1] = "?"
+ return "err"
else
local s = surnames[1]
local c = lpegmatch(p_clean,s)
if s ~= c then
report_cite("name %a cleaned to %a for short construction",s,c)
end
- t[#t+1] = utfsub(c,1,1)
+ return utfsub(c,1,3)
end
+ else
+ local t = { }
+ for i=1,n do
+ if i > 3 then
+ t[#t+1] = "+" -- indeed
+ break
+ end
+ local surnames = authors[i].surnames
+ if not surnames or #surnames == 0 then
+ t[#t+1] = "?"
+ else
+ local s = surnames[1]
+ local c = lpegmatch(p_clean,s)
+ if s ~= c then
+ report_cite("name %a cleaned to %a for short construction",s,c)
+ end
+ t[#t+1] = utfsub(c,1,1)
+ end
+ end
+ return concat(t)
end
- return concat(t)
+ else
+ return utfsub(authors,1,4)
end
end
diff --git a/tex/context/base/publ-imp-apa.mkvi b/tex/context/base/publ-imp-apa.mkvi
index 7cab4a131..fce9361f3 100644
--- a/tex/context/base/publ-imp-apa.mkvi
+++ b/tex/context/base/publ-imp-apa.mkvi
@@ -60,6 +60,7 @@
[\c!otherstext={,\nobreakspace\textellipsis\space},
\c!etallimit=7,
\c!etaldisplay=6,
+ \c!etaloption=last,
\c!authorconversion=invertedshort,
\c!separator:names:2={,\space}, % aka namesep - in this namespace
\c!separator:names:3={,\nobreakspace\textampersand\space}, % comma separated list
@@ -94,6 +95,8 @@
\definebtx
[apa:list:numbering:num]
[apa:list:numbering]
+ [\c!stopper={.},
+ \c!right={\space}]
\definebtx
[apa:list:numbering:yes]
@@ -102,10 +105,12 @@
\definebtx
[apa:list:numbering:short]
[apa:list:numbering:num]
+ [\c!stopper=]
\definebtx
- [apa:list:numbering:bib]
+ [apa:list:numbering:tag]
[apa:list:numbering:num]
+ [\c!stopper=]
% Next, we define a namespace for each category
@@ -294,13 +299,9 @@
\c!right={)}]
\definebtx
- [apa:cite:authorref]
- [apa:cite:authornum]
-
-\definebtx
[apa:cite:author:num] % todo
[apa:cite:authornum]
- [\c!left={[},
+ [\c!left={\space[},
\c!right={]}]
\definebtx
@@ -328,6 +329,7 @@
\c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
\c!separator:4={\space\btxlabeltext{apa:and}\space}, % not \textampersand
\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
\c!style=\v!italic]
\definebtx
@@ -483,7 +485,6 @@
\setupbtxlabeltext
[fr]
[apa:and=et,
- apa:others={et al.},
apa:number={n\high{o}},
apa:edition={édition},
apa:Editor=Éditeur,
@@ -749,9 +750,10 @@
}
}
\endgroup
- \doif{\btxparameter{translate}}\v!yes {
+ % which namespace?
+ %\doif{\btxparameter{translate}}\v!yes {
\texdefinition{btx:apa:translated-title}{#title}
- }
+ %}
\stoptexdefinition
\starttexdefinition btx:apa:title
diff --git a/tex/context/base/publ-imp-aps.mkvi b/tex/context/base/publ-imp-aps.mkvi
index cd05fce7f..80df2867a 100644
--- a/tex/context/base/publ-imp-aps.mkvi
+++ b/tex/context/base/publ-imp-aps.mkvi
@@ -33,7 +33,7 @@
[aps]
[\c!default=default,
\c!specification=aps,
- \c!otherstext={\space\btxlabeltext{aps:others}},
+ \c!otherstext={\space{\it\btxlabeltext{aps:others}}},
\c!etallimit=10,
\c!etaldisplay=\btxparameter\c!etallimit,
%c!journalconversion=\v!normal,
@@ -47,7 +47,8 @@
\definebtxrendering
[aps]
- [\c!specification=aps]
+ [\c!specification=aps,
+ \c!sorttype=\v!default]
\setupbtxlist
[aps]
@@ -97,7 +98,7 @@
[aps:list:numbering:num]
\definebtx
- [aps:list:numbering:bib]
+ [aps:list:numbering:tag]
[aps:list:numbering:num]
%D In order to be able to get journals expanded (or normalized or abbreviated) you need
@@ -270,13 +271,9 @@
\c!right={)}]
\definebtx
- [aps:cite:authorref]
- [aps:cite:authornum]
-
-\definebtx
[aps:cite:author:num] % todo
[aps:cite:authornum]
- [\c!left={[},
+ [\c!left={\space[},
\c!right={]}]
\definebtx
@@ -300,6 +297,7 @@
[aps:cite:title]
[aps:cite]
[\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
\c!style=\v!italic]
\definebtx
@@ -447,7 +445,6 @@
\setupbtxlabeltext
[fr]
[aps:and=et,
- aps:others={et al.},
aps:number={n\high{o}},
aps:edition={édition},
aps:Editor=Éditeur,
diff --git a/tex/context/base/publ-imp-author.mkvi b/tex/context/base/publ-imp-author.mkvi
index 534d8ce0b..7f2c35fdd 100644
--- a/tex/context/base/publ-imp-author.mkvi
+++ b/tex/context/base/publ-imp-author.mkvi
@@ -172,9 +172,9 @@
\startsetups \s!btx:\s!list:\s!author:concat
\ifcase\currentbtxoverflow
\btxparameter{\c!separator:names:\number\currentbtxconcat}
- \else
+ \else\ifnum\currentbtxauthorindex>\plusone
\btxparameter{\c!separator:names:2}
- \fi
+ \fi\fi
\stopsetups
\startsetups \s!btx:\s!list:\s!author:others
diff --git a/tex/context/base/publ-imp-cite.mkvi b/tex/context/base/publ-imp-cite.mkvi
index 6b09eaf01..7ce9ea0da 100644
--- a/tex/context/base/publ-imp-cite.mkvi
+++ b/tex/context/base/publ-imp-cite.mkvi
@@ -206,19 +206,6 @@
\startsetups \s!btx:\s!cite:authornum
\fastsetup{\s!btx:\s!cite:normal}
\stopsetups
-\startsetups \s!btx:\s!cite:authorref
- % what a crap ... no one will ever understand this module .. it makes no
- % sense to have these tests, just let users set the right variant ..
- \doifelse{\btxparameter\c!alternative} {authoryear} {
- \fastsetup{\s!btx:\s!cite:authoryears}
- } {
- \doifelse {\btxparameter\c!alternative} {num} {
- \fastsetup{\s!btx:\s!cite:authornum}
- } {
- \fastsetup{\s!btx:\s!cite:author}
- }
- }
-\stopsetups
\startsetups \s!btx:\s!cite:num
\fastsetup{\s!btx:\s!cite:range}
\stopsetups
diff --git a/tex/context/base/publ-imp-default.mkvi b/tex/context/base/publ-imp-default.mkvi
index e51955c70..eead8a324 100644
--- a/tex/context/base/publ-imp-default.mkvi
+++ b/tex/context/base/publ-imp-default.mkvi
@@ -65,6 +65,26 @@
% List variants, some having specific settings:
\definebtx
+ [\s!default:\s!list:numbering]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:numbering:num]
+ [\s!default:\s!list:numbering]
+
+\definebtx
+ [\s!default:\s!list:numbering:yes]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:short]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:tag]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
[\s!default:\s!list:author]
[\s!default:\s!list]
diff --git a/tex/context/base/publ-imp-list.mkvi b/tex/context/base/publ-imp-list.mkvi
index 61432486b..d34e4bc24 100644
--- a/tex/context/base/publ-imp-list.mkvi
+++ b/tex/context/base/publ-imp-list.mkvi
@@ -71,9 +71,9 @@
\btxstopstyleandcolor
\stopsetups
-\startsetups[\s!btx:\s!list:\s!numbering:bib]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:bib]
- \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:bib] {
+\startsetups[\s!btx:\s!list:\s!numbering:tag]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:tag]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:tag] {
\fastsetup{\s!btx:\s!list:\s!numbering}
}
\btxstopstyleandcolor
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
index 30d0c8908..5720c193a 100644
--- a/tex/context/base/publ-ini.lua
+++ b/tex/context/base/publ-ini.lua
@@ -1060,9 +1060,11 @@ do
local userdata = listentry.userdata
local btxspc = userdata and userdata.btxspc
if btxspc then
- -- this will become a specification entry
+ -- we could act on the 3rd arg returned by getcasted but in general any string will do
+ -- so we deal with it in the author hashers ... maybe some day ...
local author = getcasted(dataset,tag,field,specifications[btxspc])
- if type(author) == "table" then
+ local kind = type(author)
+ if kind == "table" or kind == "string" then
if u then
u = listentry.entries.text -- hm
else
@@ -1633,17 +1635,20 @@ do
end
end
+
+ -- tag | listindex | reference | userdata | dataindex
+
local methods = { }
lists.methods = methods
methods[v_dataset] = function(dataset,rendering,keyword)
- -- why only once unless criterium=all?
local current = datasets[dataset]
local luadata = current.luadata
local list = rendering.list
for tag, data in sortedhash(luadata) do
if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag, false, 0, false, false, data.index or 0}
+ local index = data.index or 0
+ list[#list+1] = { tag, index, 0, false, index }
end
end
end
@@ -1666,7 +1671,7 @@ do
local tag = u.btxref
if tag and (not keyword or validkeyword(dataset,tag,keyword)) then
local data = luadata[tag]
- list[#list+1] = { tag, listindex, 0, u, u.btxint, data and data.index or 0 }
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
end
end
end
@@ -1714,7 +1719,7 @@ do
l[#l+1] = u.btxint
else
local data = luadata[tag]
- local l = { tag, listindex, 0, u, u.btxint, data and data.index or 0 }
+ local l = { tag, listindex, 0, u, data and data.index or 0 }
list[#list+1] = l
traced[tag] = l
end
@@ -1722,7 +1727,7 @@ do
done[tag] = section
alldone[tag] = true
local data = luadata[tag]
- list[#list+1] = { tag, listindex, 0, u, u.btxint, data and data.index or 0 }
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
end
end
if tag then
@@ -2025,17 +2030,6 @@ do
if language then
ctx_btxsetlanguage(language)
end
- local bl = li[5]
- if bl and bl ~= "" then
- ctx_btxsetbacklink(bl)
- -- ctx_btxsetbacktrace(concat(li," ",5)) -- two numbers
- else
- -- nothing
- end
- local authorsuffix = detail.authorsuffix
- if authorsuffix then
- ctx_btxsetsuffix(authorsuffix)
- end
local userdata = li[4]
if userdata then
local b = userdata.btxbtx
@@ -2046,6 +2040,14 @@ do
if a then
ctx_btxsetafter(a)
end
+ local bl = userdata.btxint
+ if bl and bl ~= "" then
+ ctx_btxsetbacklink(bl)
+ end
+ end
+ local authorsuffix = detail.authorsuffix
+ if authorsuffix then
+ ctx_btxsetsuffix(authorsuffix)
end
rendering.userdata = userdata
if textmode then
@@ -2391,7 +2393,7 @@ do
tobemarked = specification.markentry and todo
--
if not found or #found == 0 then
- report("nothing found for %a",reference)
+ report("no entry %a found in dataset %a",reference,dataset)
elseif not setup then
report("invalid reference for %a",reference)
else
@@ -2816,8 +2818,10 @@ do
else
return false
end
- else
+ elseif ak and bk then
return ak < bk
+ else
+ return false
end
end
@@ -3109,7 +3113,14 @@ do
end
listvariants[v_yes] = listvariants.num
- listvariants.bib = listvariants.num
+
+ function listvariants.tag(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(tag)
+ if trace_detail then
+ report("expanding %a list setup %a","tag",variant)
+ end
+ ctx_btxnumberingsetup(variant or "tag")
+ end
function listvariants.short(dataset,block,tag,variant,listindex)
local short = getdetail(dataset,tag,"shorthash")
@@ -3154,3 +3165,32 @@ do
end
end
+
+-- a helper
+
+do
+
+ -- local context = context
+ -- local lpegmatch = lpeg.match
+ local splitter = lpeg.tsplitat(":")
+
+ interfaces.implement {
+ name = "checkinterfacechain",
+ arguments = { "string", "string" },
+ actions = function(str,command)
+ local chain = lpegmatch(splitter,str)
+ if #chain > 0 then
+ local command = context[command]
+ local parent = ""
+ local child = chain[1]
+ command(child,parent)
+ for i=2,#chain do
+ parent = child
+ child = child .. ":" .. chain[i]
+ command(child,parent)
+ end
+ end
+ end
+ }
+
+end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
index 7270b5f4d..a00712b10 100644
--- a/tex/context/base/publ-ini.mkiv
+++ b/tex/context/base/publ-ini.mkiv
@@ -14,6 +14,8 @@
% TODO: s! vs v! for default and neutral key/values
% todo: too many refs in list
+% todo: no need for all these %'s
+
% todo: tagging
% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
% todo: \v!cite => \s!cite
@@ -147,11 +149,41 @@
\installswitchcommandhandler \??btx {btx} \??btx
+% because we have lots of setups we provide a checker for sloppy users
+
+\unexpanded\def\btx_check_chain#1#2#3%
+ {\doifelsesomething{#3}
+ {\writestatus{btx #1}{defining\space"#2"\space as\space descendant\space of\space"#3"}% we're in definition regime (no space)
+ \definebtx[#2][#3]}
+ {\writestatus{btx #1}{defining\space"#2"}%
+ \definebtx[#2]}}
+
+% \unexpanded\def\btxcheckdefine#1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain{define}{#1}{#2}}}
+% \unexpanded\def\btxchecksetup #1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain {setup}{#1}{#2}}}
+
+\unexpanded\def\btxcheckdefine#1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain{define}{#1}}} % {#2}
+\unexpanded\def\btxchecksetup #1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain {setup}{#1}}} % {#2}
+
+% fpr the moment experimental:
+
+\unexpanded\def\btxenableautodefine
+ {\prependtoks
+ \clf_checkinterfacechain{\currentbtx}{btxcheckdefine}%
+ \to \everydefinebtx
+ \prependtoks
+ \ifnum\btxsetupmode=\doingrootsetupnamed
+ \clf_checkinterfacechain{\currentbtx}{btxchecksetup}%
+ \fi
+ \to \everysetupbtx
+ \let\btxenableautodefine\relax}
+
\appendtoks
\ifnum\btxsetupmode=\doingrootsetuproot
\publ_specification_set{\btxparameter\c!specification}%
\else\ifnum\btxsetupmode=\doingrootsetupnamed
- \publ_specification_set{\btxparameter\c!specification}%
+ \doifelsecommandhandler\??btx\currentbtx
+ {\publ_specification_set{\btxparameter\c!specification}}%
+ {}% maybe a warning
\fi\fi
\to \everysetupbtx
@@ -968,8 +1000,10 @@
\appendtoks
\edef\currentbtxnumbering{\btxrenderingparameter\c!numbering}%
- \edef\p_numbering{\btxrenderingparameter\c!numbering}% link to headnumber
- \ifx\p_numbering\v!no
+ \ifx\currentbtxnumbering\v!yes
+ \def\currentbtxnumbering{num}% convenient alias
+ \letbtxrenderingparameter\c!numbering\currentbtxnumbering
+ \else\ifx\currentbtxnumbering\v!no
\letlistparameter\c!headnumber\v!no
\let\currentbtxnumbering\empty
% \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
@@ -978,7 +1012,7 @@
\letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
\else
\letlistparameter\c!headnumber\v!always
- \fi
+ \fi\fi
\let\currentlistmethod\s!btx
\to \everysetupbtxlistplacement
@@ -1015,6 +1049,7 @@
kind {list}%
etallimit {\btxparameter\c!etallimit}%
etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
symbol {\btxparameter{\c!stopper:initials}}%
}%
\relax
@@ -1040,6 +1075,7 @@
kind {cite}%
etallimit {\btxparameter\c!etallimit}%
etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
symbol {\btxparameter{\c!stopper:initials}}%
}%
\relax
diff --git a/tex/context/base/publ-sor.lua b/tex/context/base/publ-sor.lua
index c442e3953..e587d0ee9 100644
--- a/tex/context/base/publ-sor.lua
+++ b/tex/context/base/publ-sor.lua
@@ -26,6 +26,8 @@ local v_short = variables.short
local v_default = variables.default
local v_reference = variables.reference
local v_dataset = variables.dataset
+local v_list = variables.list
+local v_used = variables.used
local report = logs.reporter("publications","sorters")
@@ -225,70 +227,138 @@ local function sortsequence(dataset,list,sorttype)
end
--- index : order in dataset
--- order : order of citations
--- short : alphabetic + suffix
--- reference : order in list
--- default : automatic sorter
--- authoryear : sort order list
-
--- tag | listindex | 0 | u | u.btxint | data.index
-
-local sorters = {
- [v_short] = function(dataset,rendering,list) -- should we store it
- local shorts = rendering.shorts
- local function compare(a,b)
- local aa = a and a[1]
- local bb = b and b[1]
- if aa and bb then
- aa, bb = shorts[aa], shorts[bb]
- return aa and bb and aa < bb
- else
- return a[1] < b[1]
+-- tag | listindex | reference | userdata | dataindex
+
+-- short : short + tag index
+-- dataset : index + tag
+-- list : list + index
+-- reference : tag + index
+-- used : reference + dataset
+-- authoryear : complex sort
+
+local sorters = { }
+
+sorters[v_short] = function(dataset,rendering,list) -- should we store it
+ local shorts = rendering.shorts
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ local shorta = shorts[taga]
+ local shortb = shorts[tagb]
+ if shorta and shortb then
+ -- assumes ascii shorts ... no utf yet
+ return shorta < shortb
+ end
+ -- fall back on tag order
+ return taga < tagb
+ end
+ -- fall back on dataset order
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_dataset] = function(dataset,rendering,list) -- dataset index
+ local function compare(a,b)
+ if a and b then
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
end
end
- sort(list,compare)
- end,
- [v_reference] = function(dataset,rendering,list) -- tag
- local function compare(a,b)
- return a[1] < b[1]
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_list] = function(dataset,rendering,list) -- list index (normally redundant)
+ local function compare(a,b)
+ if a and b then
+ local lista = a[2]
+ local listb = b[2]
+ if lista and listb then
+ return lista < listb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
end
- sort(list,compare)
- end,
- [v_dataset] = function(dataset,rendering,list) -- dataset index
- local function compare(a,b)
- local aa = a and a[6]
- local bb = b and b[6]
- if aa and bb then
- return aa < bb
- else
- return a[1] < b[1]
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_reference] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
end
end
- sort(list,compare)
- end,
- [v_default] = function(dataset,rendering,list,sorttype)
- if sorttype == "" or sorttype == v_default then -- listorder
- local function compare(a,b)
- return a[2] < b[2]
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_used] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local referencea = a[2]
+ local referenceb = b[2]
+ if referencea and referenceb then
+ return referencea < referenceb
end
- sort(list,compare)
- else
- local valid = sortsequence(dataset,list,sorttype) -- field order
- if valid and #valid > 0 then
- -- hm, we have a complication here because a sortsequence doesn't know if there's a field
- -- so there is no real catch possible here .., anyway, we add a index as last entry when no
- -- one is set so that should be good enough (needs testing)
- for i=1,#valid do
- local v = valid[i]
- valid[i] = list[v.index]
- end
- return valid
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
end
end
+ return false
end
-}
+ sort(list,compare)
+end
+
+sorters[v_default] = sorters[v_list]
+sorters[""] = sorters[v_list]
+
+local function anything(dataset,rendering,list,sorttype)
+ local valid = sortsequence(dataset,list,sorttype) -- field order
+ if valid and #valid > 0 then
+ -- hm, we have a complication here because a sortsequence doesn't know if there's a field
+ -- so there is no real catch possible here .., anyway, we add a index as last entry when no
+ -- one is set so that should be good enough (needs testing)
+ for i=1,#valid do
+ local v = valid[i]
+ valid[i] = list[v.index]
+ end
+ return valid
+ end
+end
-table.setmetatableindex(sorters,function(t,k) return t[v_default] end)
+table.setmetatableindex(sorters,function(t,k) return anything end)
publications.lists.sorters = sorters
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index 6686bce53..07d588ba7 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -207,6 +207,9 @@
\unexpanded\def\spac_align_set_stretch
{\emergencystretch\bodyfontsize}
+\unexpanded\def\spac_align_set_extreme_stretch
+ {\emergencystretch10\bodyfontsize}
+
% Vertical
\newconstant\c_spac_align_state_vertical
@@ -681,6 +684,7 @@
\setvalue{\??aligncommand\v!tolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_tolerant}}
\setvalue{\??aligncommand\v!verytolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_very_tolerant}}
\setvalue{\??aligncommand\v!stretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_stretch}}
+\setvalue{\??aligncommand\v!extremestretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_extreme_stretch}}
%D For Wolfgang:
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index b58500367..31873fcff 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 2046da47a..3eee87cb2 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index 2b6f3a3ec..fcf8ac312 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -235,7 +235,7 @@
\installcorenamespace{naturaltablehei}
\installcorenamespace{naturaltabledis}
\installcorenamespace{naturaltableaut}
-\installcorenamespace{naturaltablefwd} % forcedwidth
+%installcorenamespace{naturaltablefwd} % forcedwidth
\installcorenamespace{naturaltabletxt}
\installcorenamespace{naturaltablespn}
\installcorenamespace{naturaltableref}
@@ -1459,6 +1459,8 @@
\tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
\fi}
+\let\tabl_ntb_preroll\relax
+
\def\tabl_ntb_table_stop
{\forgetall % new, here see narrower-004.tex
%\setbox\scratchbox\hbox
@@ -1481,6 +1483,7 @@
\dorecurse\c_tabl_ntb_maximum_row
{\tabl_ntb_let_hei\recurselevel\maxdimen}%
\tabl_ntb_let_gal\zerocount
+\tabl_ntb_preroll\relax
\c_tabl_tbl_pass\plusone
\let\tabl_ntb_pass\tabl_ntb_pass_one
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_a
@@ -1822,38 +1825,52 @@
\fi
\fi}
-\def\tabl_ntb_cell_process_a_check_span_two_yes
- {\iftightTBLcolspan
- \donefalse
- \else
- \ifnum\scratchcounter>\plusone
- \begingroup
- \edef\p_width{\naturaltablelocalparameter\c!width}%
- \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
- \ifdone
- \endgroup
- \edef\p_option{\naturaltablelocalparameter\c!option}%
- \ifx\p_option\v!tight\donefalse\else\donetrue\fi
- \else
- % a dimension
- \endgroup
- \donefalse
- \fi
- \else
- \edef\p_option{\naturaltablelocalparameter\c!option}%
- \ifx\p_option\v!tight\donefalse\else\donetrue\fi
- \fi
- \fi}
+% \def\tabl_ntb_cell_process_a_check_span_two_yes
+% {\iftightTBLcolspan
+% \donefalse
+% \else
+% \ifnum\scratchcounter>\plusone
+% \begingroup
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \ifdone
+% \endgroup
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \else
+% % a dimension
+% \endgroup
+% \donefalse
+% \fi
+% \else
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \fi
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
+% \def\tabl_ntb_cell_process_a_check_span_two_nop
+% {\ifnum\scratchcounter>\plusone
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \else
+% \donetrue
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
-% \let\tabl_ntb_cell_process_a_check_span_two_nop\donetrue
+\let\tabl_ntb_cell_process_a_check_span_two_yes\relax
\def\tabl_ntb_cell_process_a_check_span_two_nop
- {\ifnum\scratchcounter>\plusone
- \edef\p_width{\naturaltablelocalparameter\c!width}%
- \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
- \else
- \donetrue
+ {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+ \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
\fi}
\unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! !
@@ -1878,11 +1895,6 @@
\tabl_ntb_spn_doifelse\c_tabl_ntb_col
\tabl_ntb_cell_process_a_check_span_two_yes
\tabl_ntb_cell_process_a_check_span_two_nop
- \ifdone
- \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
- \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
- \fi
- \fi
\fi
\scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
\scratchdimen\tabl_ntb_get_hei\scratchcounter\relax
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index 6030be86c..932c9a1b4 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -38,7 +38,7 @@ end
local type, rawget = type, rawget
local concat, insert, remove = table.concat, table.insert, table.remove
local rep, gmatch, gsub, find = string.rep, string.gmatch, string.gsub, string.find
-local utfchar = utf.char
+local utfchar, utfbyte = utf.char, utf.byte
local lpegmatch, patterns = lpeg.match, lpeg.patterns
local S, P, R, C, V, Cc, Ct, Cs, Carg = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs, lpeg.Carg
@@ -140,6 +140,7 @@ local reserved = {
["times"] = { true, "×" },
["-:"] = { true, "÷" },
["@"] = { true, "∘" },
+ ["circ"] = { true, "∘" },
["o+"] = { true, "⊕" },
["ox"] = { true, "⊗" },
["o."] = { true, "⊙" },
@@ -700,8 +701,29 @@ local reserved = {
["\\frac"] = { true, "frac" },
+ -- now it gets real crazy, only these two:
+
+ ["&gt;"] = { true, ">" },
+ ["&lt;"] = { true, "<" },
+
}
+for k, v in next, characters.data do
+ local name = v.mathname
+ if name and not reserved[name] then
+ reserved[name] = { true, utfchar(k) }
+ end
+ local spec = v.mathspec
+-- if spec then
+-- for i=1,#spec do
+-- local name = spec[i].name
+-- if name and not reserved[name] then
+-- reserved[name] = { true, utfchar(k) }
+-- end
+-- end
+-- end
+end
+
local isbinary = {
["\\frac"] = true,
["\\root"] = true,
@@ -889,13 +911,13 @@ local p_spaces = patterns.whitespace
local p_utf_base = patterns.utf8character
local p_utf = C(p_utf_base)
-local p_entity = (P("&") * C((1-P(";"))^2) * P(";"))/ entities
+-- local p_entity = (P("&") * C((1-P(";"))^2) * P(";"))/ entities
-entities["gt"] = ">"
-entities["lt"] = "<"
-entities["amp"] = "&"
-entities["dquot"] = '"'
-entities["quot"] = "'"
+-- entities["gt"] = ">"
+-- entities["lt"] = "<"
+-- entities["amp"] = "&"
+-- entities["dquot"] = '"'
+-- entities["quot"] = "'"
local p_onechar = p_utf_base * P(-1)
@@ -934,8 +956,10 @@ for k, v in sortedhash(reserved) do
end
for k, v in next, entities do
- k_unicode[k] = v
- k_unicode["\\"..k] = v
+ if not k_unicode[k] then
+ k_unicode[k] = v
+ k_unicode["\\"..k] = v
+ end
end
if not find(k,"[^[a-zA-Z]+$]") then
@@ -1061,10 +1085,10 @@ local p_special =
-- open | close :: {: | :}
-local e_parser = Cs ( (
- p_entity +
- p_utf_base
-)^0 )
+-- local e_parser = Cs ( (
+-- p_entity +
+-- p_utf_base
+-- )^0 )
local u_parser = Cs ( (
@@ -1641,7 +1665,7 @@ local ctx_type = context and context.type or function() end
local ctx_inleft = context and context.inleft or function() end
local function convert(str,totex)
- local entified = lpegmatch(e_parser,str) or str -- when used in text
+ local entified = str -- lpegmatch(e_parser,str) or str -- when used in text
local unicoded = lpegmatch(u_parser,entified) or entified
local texcoded = collapse(lpegmatch(a_parser,unicoded))
if trace_mapping then
@@ -1795,7 +1819,7 @@ asciimath.cleanedup = cleanedup
local function convert(str)
if #str > 0 then
- local entified = lpegmatch(e_parser,str) or str -- when used in text
+ local entified = str -- lpegmatch(e_parser,str) or str -- when used in text
local unicoded = lpegmatch(u_parser,entified) or entified
if lpegmatch(p_onechar,unicoded) then
ctx_mathematics(unicoded)
@@ -2063,3 +2087,5 @@ end
function show.save(name)
table.save(name ~= "" and name or "dummy.lua",collected)
end
+
+-- inspect(sortedkeys(reserved))
diff --git a/tex/context/interface/cont-cs.xml b/tex/context/interface/cont-cs.xml
index afaacb709..c798ea4a9 100644
--- a/tex/context/interface/cont-cs.xml
+++ b/tex/context/interface/cont-cs.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerantni"/>
<cd:constant type="velmitolerantni"/>
<cd:constant type="natahnout"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index e9771d07a..f40927b45 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="sehrtolerant"/>
<cd:constant type="strecken"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index f00215596..651292e7f 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="verytolerant"/>
<cd:constant type="stretch"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index 1cd7fa33a..30a89c059 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="trestolerant"/>
<cd:constant type="etire"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index d9fe0ac97..32d19e87f 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tollerante"/>
<cd:constant type="moltotollerante"/>
<cd:constant type="dilata"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index 4bfad3798..6c49baccd 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="soepel"/>
<cd:constant type="zeersoepel"/>
<cd:constant type="rek"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-pe.xml b/tex/context/interface/cont-pe.xml
index 434a328e8..bf7d65fd0 100644
--- a/tex/context/interface/cont-pe.xml
+++ b/tex/context/interface/cont-pe.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="بردبار"/>
<cd:constant type="خیلی‌بردبار"/>
<cd:constant type="بکش"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 31ef5d920..b81c3bc7e 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -7188,6 +7188,7 @@
<cd:constant type="tolerant"/>
<cd:constant type="foartetolerant"/>
<cd:constant type="dilatat"/>
+ <cd:constant type="extremestretch"/>
<cd:constant type="lefttoright"/>
<cd:constant type="righttoleft"/>
</cd:keywords>
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index c46c16874..006db5ad7 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 05/04/15 19:00:43
+-- merge date : 05/09/15 13:41:44
do -- begin closure to overcome local limits and interference