From 7b47df8cbaae8690f14d306c633b2b2db053f66a Mon Sep 17 00:00:00 2001 From: Marius Date: Wed, 8 Feb 2012 23:00:23 +0200 Subject: beta 2012.02.08 21:04 --- .../lexers/data/scite-context-data-context.lua | 4 +- .../data/scite/lexers/scite-context-lexer-cld.lua | 14 ++--- .../lexers/scite-context-lexer-lua-longstring.lua | 20 +++--- .../data/scite/lexers/scite-context-lexer-lua.lua | 48 +++++++------- .../data/scite/lexers/scite-context-lexer-mps.lua | 16 ++--- .../lexers/scite-context-lexer-pdf-object.lua | 15 +++-- .../scite/lexers/scite-context-lexer-pdf-xref.lua | 36 +++++------ .../data/scite/lexers/scite-context-lexer-pdf.lua | 21 +++---- .../data/scite/lexers/scite-context-lexer-tex.lua | 21 ++++--- .../data/scite/lexers/scite-context-lexer-txt.lua | 22 +++++-- .../scite/lexers/scite-context-lexer-xml-cdata.lua | 20 ++++-- .../lexers/scite-context-lexer-xml-comment.lua | 22 ++++--- .../data/scite/lexers/scite-context-lexer-xml.lua | 26 ++++---- context/data/scite/lexers/scite-context-lexer.lua | 28 +++++++-- .../scite/lexers/themes/scite-context-theme.lua | 7 ++- .../scite/scite-context-data-context.properties | 73 ++++++++++++---------- .../data/scite/scite-context-external.properties | 14 ++++- context/data/scite/scite-context.properties | 1 + context/data/scite/scite-ctx.properties | 2 +- 19 files changed, 240 insertions(+), 170 deletions(-) (limited to 'context/data') diff --git a/context/data/scite/lexers/data/scite-context-data-context.lua b/context/data/scite/lexers/data/scite-context-data-context.lua index bf7aa720f..1e5b4059c 100644 --- a/context/data/scite/lexers/data/scite-context-data-context.lua +++ b/context/data/scite/lexers/data/scite-context-data-context.lua @@ -1,4 +1,4 @@ return { - ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startdocument", "stopdocument", "documentvariable", "startmodule", "stopmodule", "usemodule", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode" }, - ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "donetrue", "donefalse", "htdp", "unvoidbox", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "tracingall", "tracingnone", "loggingall", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "empty", "null", "space", "obeyspaces", "obeylines", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "measure", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd" }, + ["constants"]={ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive", "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred", "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint", "points", "halfpoint", "zeroskip", "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi", "normalpagebox", "endoflinetoken", "outputnewlinechar", "emptytoks", "empty", "undefined", "voidbox", "emptybox", "emptyvbox", "emptyhbox", "bigskipamount", "medskipamount", "smallskipamount", "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "luatexengine", "pdftexengine", "xetexengine", "unknownengine", "etexversion", "pdftexversion", "xetexversion", "xetexrevision", "activecatcode", "bgroup", "egroup", "endline", "conditionaltrue", "conditionalfalse", "attributeunsetvalue", "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle", "inicatcodes", "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes", "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes", "xmlcatcodes", "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode", "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode", "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode", "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode", "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode", "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode", "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode", "lessthanasciicode", "morethanasciicode", "doublecommentsignal", "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode", "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode", "primeasciicode", "activemathcharcode", "activetabtoken", "activeformfeedtoken", "activeendoflinetoken", "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode", "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode", "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode", "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode", "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode", "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode", "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode", "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode", "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode", "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode", "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode", "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink", "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint", "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace", "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight", "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset", "doifmode", "doifmodeelse", "doifnotmode", "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes", "startenvironment", "stopenvironment", "environment", "startcomponent", "stopcomponent", "component", "startproduct", "stopproduct", "product", "startproject", "stopproject", "project", "starttext", "stoptext", "startdocument", "stopdocument", "documentvariable", "startmodule", "stopmodule", "usemodule", "enablemode", "disablemode", "preventmode", "pushmode", "popmode", "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix", "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode", "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode", "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode", "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument", "continueifinputfile" }, + ["helpers"]={ "startsetups", "stopsetups", "startxmlsetups", "stopxmlsetups", "startluasetups", "stopluasetups", "starttexsetups", "stoptexsetups", "startrawsetups", "stoprawsetups", "startlocalsetups", "stoplocalsetups", "starttexdefinition", "stoptexdefinition", "starttexcode", "stoptexcode", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "newmode", "setmode", "resetmode", "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode", "booleanmodevalue", "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif", "newlanguage", "newfamily", "newfam", "newhelp", "then", "donothing", "dontcomplain", "donetrue", "donefalse", "htdp", "unvoidbox", "vfilll", "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha", "currentcatcodetable", "defaultcatcodetable", "catcodetablename", "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable", "pushcatcodetable", "popcatcodetable", "restorecatcodes", "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand", "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg", "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg", "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg", "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter", "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern", "ruledhglue", "ruledvglue", "normalhglue", "normalvglue", "ruledpenalty", "scratchcounter", "globalscratchcounter", "scratchdimen", "globalscratchdimen", "scratchskip", "globalscratchskip", "scratchmuskip", "globalscratchmuskip", "scratchtoks", "globalscratchtoks", "scratchbox", "globalscratchbox", "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs", "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance", "scratchhsize", "scratchvsize", "scratchcounterone", "scratchcountertwo", "scratchcounterthree", "scratchdimenone", "scratchdimentwo", "scratchdimenthree", "scratchskipone", "scratchskiptwo", "scratchskipthree", "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree", "scratchtoksone", "scratchtokstwo", "scratchtoksthree", "scratchboxone", "scratchboxtwo", "scratchboxthree", "doif", "doifnot", "doifelse", "doifinset", "doifnotinset", "doifinsetelse", "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse", "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined", "doifelsevalue", "doifvalue", "doifnotvalue", "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse", "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing", "doifdimensionelse", "doifnumberelse", "doifcommonelse", "doifcommon", "doifnotcommon", "doifinstring", "doifnotinstring", "doifinstringelse", "doifassignmentelse", "tracingall", "tracingnone", "loggingall", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to", "endgraf", "empty", "null", "space", "obeyspaces", "obeylines", "normalspace", "executeifdefined", "singleexpandafter", "doubleexpandafter", "tripleexpandafter", "dontleavehmode", "removelastspace", "removeunwantedspaces", "wait", "writestatus", "define", "redefine", "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "measure", "getvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue", "resetvalue", "undefinevalue", "ignorevalue", "setuvalue", "setuevalue", "setugvalue", "setuxvalue", "globallet", "glet", "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist", "processaction", "processallactions", "processfirstactioninset", "processallactionsinset", "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect", "firstofoneargument", "firstoftwoarguments", "secondoftwoarguments", "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments", "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments", "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments", "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments", "firstofoneunexpanded", "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments", "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals", "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant", "newmacro", "setnewmacro", "newfraction", "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty", "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty", "nopdfcompression", "maximumpdfcompression", "normalpdfcompression", "modulonumber", "dividenumber", "getfirstcharacter", "doiffirstcharelse", "startnointerference", "stopnointerference", "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "begstrut", "endstrut" }, } \ No newline at end of file diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/lexers/scite-context-lexer-cld.lua index 642c469a9..9623e2efe 100644 --- a/context/data/scite/lexers/scite-context-lexer-cld.lua +++ b/context/data/scite/lexers/scite-context-lexer-cld.lua @@ -9,14 +9,12 @@ local info = { local lexer = lexer local token = lexer.token -module(...) - -local cldlexer = _M +local cldlexer = { _NAME = "cld" } local lualexer = lexer.load('scite-context-lexer-lua') -_rules = lualexer._rules_cld -_tokenstyles = lualexer._tokenstyles -_foldsymbols = lualexer._foldsymbols -_directives = lualexer._directives +cldlexer._rules = lualexer._rules_cld +cldlexer._tokenstyles = lualexer._tokenstyles +cldlexer._foldsymbols = lualexer._foldsymbols +cldlexer._directives = lualexer._directives --- _rules[1] = { "whitespace", token(cldlexer.WHITESPACE, lexer.space^1) } +return cldlexer diff --git a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua index dd58dfe3b..6cc79aeb9 100644 --- a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua +++ b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua @@ -2,21 +2,21 @@ local lexer = lexer local token = lexer.token local P = lpeg.P -module(...) +local stringlexer = { _NAME = "string" } -local stringlexer = _M +local whitespace = lexer.WHITESPACE -local whitespace = stringlexer.WHITESPACE -- triggers states +local space = lexer.space +local nospace = 1 - space -local space = lexer.space -local nospace = 1 - space +local p_spaces = token(whitespace, space ^1) +local p_string = token("string", nospace^1) -local p_spaces = token(whitespace, space ^1) -local p_string = token("string", nospace^1) - -_rules = { +stringlexer._rules = { { "whitespace", p_spaces }, { "string", p_string }, } -_tokenstyles = lexer.context.styleset +stringlexer._tokenstyles = lexer.context.styleset + +return stringlexer diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/lexers/scite-context-lexer-lua.lua index 278c6d72f..55bc30999 100644 --- a/context/data/scite/lexers/scite-context-lexer-lua.lua +++ b/context/data/scite/lexers/scite-context-lexer-lua.lua @@ -6,22 +6,24 @@ local info = { license = "see context related readme files", } +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing local P, R, S, C, Cg, Cb, Cs, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt local match, find = string.match, string.find local setmetatable = setmetatable -local global = _G -- beware: all multiline is messy, so even if it's no lexer, it should be an embedded lexer -module(...) +local lualexer = { _NAME = "lua" } +local stringlexer = lexer.load("scite-context-lexer-lua-longstring") -local lualexer = _M +local whitespace = lexer.WHITESPACE -_directives = { } -- communication channel +local directives = { } -- communication channel --- this will be eextended +-- this will be extended local keywords = { 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', -- 'goto', @@ -91,8 +93,6 @@ local longcomment = Cmt(#('[[' + ('[' * C(equals) * '[')), function(input,index, return stop and stop + 1 or #input + 1 end) -local whitespace = lualexer.WHITESPACE -- triggers states - local space = lexer.space -- S(" \n\r\t\f\v") local any = lexer.any @@ -117,18 +117,17 @@ local shortstring = token("quote", dquote) * token("string", (escaped + (1-squote))^0) * token("quote", squote) -local longstring = token("quote", longonestart) - * token("string", longonestring) - * token("quote", longonestop) - + token("quote", longtwostart) - * token("string", longtwostring) - * token("quote", longtwostop) +----- longstring = token("quote", longonestart) +----- * token("string", longonestring) +----- * token("quote", longonestop) +----- + token("quote", longtwostart) +----- * token("string", longtwostring) +----- * token("quote", longtwostop) local string = shortstring --- + longstring +----- + longstring - local longstringlexer = lexer.load("scite-context-lexer-lua-longstring") - lexer.embed_lexer(lualexer, longstringlexer, token("quote",longtwostart), token("string",longtwostring_body) * token("quote",longtwostring_end)) +lexer.embed_lexer(lualexer, stringlexer, token("quote",longtwostart), token("string",longtwostring_body) * token("quote",longtwostring_end)) local integer = P('-')^-1 * (lexer.hex_num + lexer.dec_num) local number = token("number", lexer.float + integer) @@ -160,7 +159,7 @@ local csname = token("user", exact_match(csnames )) + ( optionalspace * token("special", P(".")) * optionalspace * token("user", validword) )^1 ) -_rules = { +lualexer._rules = { { 'whitespace', spacing }, { 'keyword', keyword }, { 'function', builtin }, @@ -178,9 +177,9 @@ _rules = { { 'rest', rest }, } -_tokenstyles = lexer.context.styleset +lualexer._tokenstyles = lexer.context.styleset -_foldsymbols = { +lualexer._foldsymbols = { _patterns = { '%l+', -- '[%({%)}%[%]]', @@ -212,7 +211,7 @@ local cstoken = R("az","AZ","\127\255") + S("@!?_") local texcsname = P("\\") * cstoken^1 local commentline = P('%') * (1-S("\n\r"))^0 -local texcomment = token('comment', Cmt(commentline, function() return _directives.cld_inline end)) +local texcomment = token('comment', Cmt(commentline, function() return directives.cld_inline end)) local longthreestart = P("\\!!bs") local longthreestop = P("\\!!es") @@ -229,7 +228,12 @@ local texcommand = token("warning", texcsname) -- * (texcommand + token("string",P(1-texcommand-longthreestop)^1) - longthreestop)^0 -- we match long non-\cs sequences -- * token("quote", longthreestop) -_rules_cld = { +-- local whitespace = "whitespace" +-- local spacing = token(whitespace, space^1) + +lualexer._directives = directives + +lualexer._rules_cld = { { 'whitespace', spacing }, { 'texstring', texstring }, { 'texcomment', texcomment }, @@ -246,3 +250,5 @@ _rules_cld = { { 'operator', operator }, { 'rest', rest }, } + +return lualexer diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua index 22338b351..00cc7f8ba 100644 --- a/context/data/scite/lexers/scite-context-lexer-mps.lua +++ b/context/data/scite/lexers/scite-context-lexer-mps.lua @@ -6,15 +6,17 @@ local info = { license = "see context related readme files", } +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local global, string, table, lpeg = _G, string, table, lpeg local token, exact_match = lexer.token, lexer.exact_match local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt local type = type -module(...) +local metafunlexer = { _NAME = "metafun" } -local metafunlexer = _M +local whitespace = lexer.WHITESPACE local context = lexer.context @@ -65,8 +67,6 @@ do end -local whitespace = metafunlexer.WHITESPACE -- triggers states - local space = lexer.space -- S(" \n\r\t\f\v") local any = lexer.any @@ -99,7 +99,7 @@ local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc s local texlike = token('string', P("\\") * cstokentex^1) local extra = token('extra', S("`~%^&_-+*/\'|\\")) -_rules = { +metafunlexer._rules = { { 'whitespace', spacing }, { 'comment', comment }, { 'internal', internal }, @@ -118,9 +118,9 @@ _rules = { { 'rest', rest }, } -_tokenstyles = context.styleset +metafunlexer._tokenstyles = context.styleset -_foldsymbols = { +metafunlexer._foldsymbols = { _patterns = { "%l+", }, @@ -140,3 +140,5 @@ _foldsymbols = { ["endfor"] = -1, } } + +return metafunlexer diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua b/context/data/scite/lexers/scite-context-lexer-pdf-object.lua index 1de006813..ec950c26c 100644 --- a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua +++ b/context/data/scite/lexers/scite-context-lexer-pdf-object.lua @@ -9,17 +9,14 @@ local info = { local lexer = lexer local token = lexer.token local P, R, S, C, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.V -local global = _G -module(...) +local pdfobjectlexer = { _NAME = "pdfobject" } -local objectlexer = _M +local whitespace = lexer.WHITESPACE -- triggers states local context = lexer.context local patterns = context.patterns -local whitespace = objectlexer.WHITESPACE -- triggers states - local space = lexer.space local somespace = space^1 @@ -103,13 +100,15 @@ local t_object = { "object", -- weird that we need to catch the end her whatever = V("dictionary") + V("array") + constant + reference + string + unicode + number + whatsit, } -_shared = { +pdfobjectlexer._shared = { dictionary = t_dictionary, } -_rules = { +pdfobjectlexer._rules = { { 'whitespace', t_spacing }, { 'object', t_object }, } -_tokenstyles = context.styleset +pdfobjectlexer._tokenstyles = context.styleset + +return pdfobjectlexer diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua b/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua index 8988fbbb4..9fd6df992 100644 --- a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua +++ b/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua @@ -1,6 +1,6 @@ local info = { version = 1.002, - comment = "scintilla lpeg lexer for pdf", + comment = "scintilla lpeg lexer for pdf xref", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", @@ -9,33 +9,31 @@ local info = { local lexer = lexer local token = lexer.token local P = lpeg.P -local global = _G -module(...) +local pdfxreflexer = { _NAME = "pdfxref" } +local pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object") -local pdflexer = _M -local objectlexer = lexer.load("scite-context-lexer-pdf-object") +local context = lexer.context +local patterns = context.patterns -local context = lexer.context -local patterns = context.patterns +local whitespace = lexer.WHITESPACE -- triggers states -local whitespace = pdflexer.WHITESPACE -- triggers states +local spacing = patterns.spacing -local space = patterns.space -local spacing = patterns.spacing +local t_spacing = token(whitespace, spacing) -local t_spacing = token(whitespace, spacing) +local p_trailer = P("trailer") -local p_trailer = P("trailer") +local t_xref = token("default", (1-p_trailer)^1) + * token("keyword", p_trailer) + * t_spacing + * pdfobjectlexer._shared.dictionary -local t_xref = token("default", (1-p_trailer)^1) - * token("keyword", p_trailer) - * t_spacing - * objectlexer._shared.dictionary - -_rules = { +pdfxreflexer._rules = { { 'whitespace', t_spacing }, { 'xref', t_xref }, } -_tokenstyles = context.styleset +pdfxreflexer._tokenstyles = context.styleset + +return pdfxreflexer diff --git a/context/data/scite/lexers/scite-context-lexer-pdf.lua b/context/data/scite/lexers/scite-context-lexer-pdf.lua index ebc3fba4f..bddcc282d 100644 --- a/context/data/scite/lexers/scite-context-lexer-pdf.lua +++ b/context/data/scite/lexers/scite-context-lexer-pdf.lua @@ -6,22 +6,21 @@ local info = { license = "see context related readme files", } +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local token = lexer.token local P, R, S = lpeg.P, lpeg.R, lpeg.S -local global = _G -module(...) +local pdflexer = { "pdf" } +local pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object") +local pdfxreflexer = lexer.load("scite-context-lexer-pdf-xref") -local pdflexer = _M -local objectlexer = lexer.load("scite-context-lexer-pdf-object") -local xreflexer = lexer.load("scite-context-lexer-pdf-xref") +local whitespace = lexer.WHITESPACE -- triggers states local context = lexer.context local patterns = context.patterns -local whitespace = pdflexer.WHITESPACE -- triggers states - local space = patterns.space local spacing = patterns.spacing local nospacing = patterns.nospacing @@ -51,13 +50,13 @@ local t_closeobject = token("keyword", p_endobj) local t_openxref = token("keyword", p_xref) local t_closexref = token("keyword", p_startxref) -lexer.embed_lexer(pdflexer, objectlexer, t_openobject, t_closeobject) -lexer.embed_lexer(pdflexer, xreflexer, t_openxref, t_closexref) +lexer.embed_lexer(pdflexer, pdfobjectlexer, t_openobject, t_closeobject) +lexer.embed_lexer(pdflexer, pdfxreflexer, t_openxref, t_closexref) -_rules = { +pdflexer._rules = { { 'whitespace', t_spacing }, { 'comment', t_comment }, { 'rest', t_rest }, } -_tokenstyles = context.styleset +pdflexer._tokenstyles = context.styleset diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua index f0929625c..812dfbb05 100644 --- a/context/data/scite/lexers/scite-context-lexer-tex.lua +++ b/context/data/scite/lexers/scite-context-lexer-tex.lua @@ -25,13 +25,16 @@ local info = { -- local interface = lexer.get_property("keywordclass.macros.context.en","") -- it seems that whitespace triggers the lexer when embedding happens, but this - -- is quite fragile due to duplicate styles + -- is quite fragile due to duplicate styles .. lexer.WHITESPACE is a number + -- (initially) -- this lexer does not care about other macro packages (one can of course add a fake -- interface but it's not on the agenda) ]]-- +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local global, string, table, lpeg = _G, string, table, lpeg local token, exact_match = lexer.token, lexer.exact_match @@ -39,12 +42,14 @@ local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, l local type, next = type, next local find, match, lower = string.find, string.match, string.lower -module(...) +-- module(...) -local contextlexer = _M +local contextlexer = { _NAME = "context" } local cldlexer = lexer.load('scite-context-lexer-cld') local mpslexer = lexer.load('scite-context-lexer-mps') +-- local cldlexer = lexer.load('scite-context-lexer-lua') -- test + local commands = { en = { } } local primitives = { } local helpers = { } @@ -184,7 +189,7 @@ end) local commentline = P('%') * (1-S("\n\r"))^0 local endline = S("\n\r")^1 -local whitespace = contextlexer.WHITESPACE -- triggers states +local whitespace = lexer.WHITESPACE local space = lexer.space -- S(" \n\r\t\f\v") local any = lexer.any @@ -427,7 +432,7 @@ lexer.embed_lexer(contextlexer, mpslexer, startmetafuncode, stopmetafuncode) -- Watch the text grabber, after all, we're talking mostly of text (beware, -- no punctuation here as it can be special. We might go for utf here. -_rules = { +contextlexer._rules = { { "whitespace", spacing }, { "preamble", preamble }, { "word", word }, @@ -450,14 +455,14 @@ _rules = { { "rest", rest }, } -_tokenstyles = context.styleset +contextlexer._tokenstyles = context.styleset local folds = { ["\\start"] = 1, ["\\stop" ] = -1, ["\\begin"] = 1, ["\\end" ] = -1, } -_foldsymbols = { +contextlexer._foldsymbols = { _patterns = { "\\start", "\\stop", -- regular environments "\\begin", "\\end", -- (moveable) blocks @@ -468,3 +473,5 @@ _foldsymbols = { ["user"] = folds, -- csname ["grouping"] = folds, } + +return contextlexer diff --git a/context/data/scite/lexers/scite-context-lexer-txt.lua b/context/data/scite/lexers/scite-context-lexer-txt.lua index 07dff2970..d92be8ed7 100644 --- a/context/data/scite/lexers/scite-context-lexer-txt.lua +++ b/context/data/scite/lexers/scite-context-lexer-txt.lua @@ -1,15 +1,26 @@ +local info = { + version = 1.002, + comment = "scintilla lpeg lexer for plain text (with spell checking)", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local token = lexer.token local P, S, Cmt = lpeg.P, lpeg.S, lpeg.Cmt local find, match = string.find, string.match -module(...) +-- local textlexer = (_VERSION == "Lua 5.1" and (module(...) or true) and _M) or { } +-- (_VERSION == "Lua 5.1" and (module(...) or true) and _M) or { } -local textlexer = _M +local textlexer = { _NAME = "text" } local context = lexer.context -local whitespace = textlexer.WHITESPACE -- triggers states +local whitespace = lexer.WHITESPACE local space = lexer.space local any = lexer.any @@ -57,7 +68,7 @@ local t_rest = local t_spacing = token(whitespace, space^1) -_rules = { +textlexer._rules = { { "whitespace", t_spacing }, { "preamble", t_preamble }, { "word", t_word }, -- words >= 3 @@ -65,5 +76,6 @@ _rules = { { "rest", t_rest }, } -_tokenstyles = lexer.context.styleset +textlexer._tokenstyles = lexer.context.styleset +return textlexer diff --git a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua b/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua index 71826099c..511465b01 100644 --- a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua +++ b/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua @@ -1,12 +1,18 @@ +local info = { + version = 1.002, + comment = "scintilla lpeg lexer for xml cdata", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + local lexer = lexer local token = lexer.token local P = lpeg.P -module(...) +local xmlcdatalexer = { _NAME = "xmlcdata" } -local commentlexer = _M - -local whitespace = commentlexer.WHITESPACE -- triggers states +local whitespace = lexer.WHITESPACE -- triggers states local space = lexer.space local nospace = 1 - space - P("]]>") @@ -14,9 +20,11 @@ local nospace = 1 - space - P("]]>") local p_spaces = token(whitespace, space ^1) local p_cdata = token("comment", nospace^1) -_rules = { +xmlcdatalexer._rules = { { "whitespace", p_spaces }, { "cdata", p_cdata }, } -_tokenstyles = lexer.context.styleset +xmlcdatalexer._tokenstyles = lexer.context.styleset + +return xmlcdatalexer diff --git a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua index 2d9ce66bd..151270091 100644 --- a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua +++ b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua @@ -1,12 +1,18 @@ +local info = { + version = 1.002, + comment = "scintilla lpeg lexer for xml comments", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files", +} + local lexer = lexer local token = lexer.token local P = lpeg.P -module(...) +local xmlcommentlexer = { _NAME = "xmlcomment" } -local commentlexer = _M - -local whitespace = commentlexer.WHITESPACE -- triggers states +local whitespace = lexer.WHITESPACE local space = lexer.space local nospace = 1 - space - P("-->") @@ -14,14 +20,14 @@ local nospace = 1 - space - P("-->") local p_spaces = token(whitespace, space ^1) local p_comment = token("comment", nospace^1) -_rules = { +xmlcommentlexer._rules = { { "whitespace", p_spaces }, { "comment", p_comment }, } -_tokenstyles = lexer.context.styleset +xmlcommentlexer._tokenstyles = lexer.context.styleset -_foldsymbols = { +xmlcommentlexer._foldsymbols = { _patterns = { "<%!%-%-", "%-%->", -- comments }, @@ -29,3 +35,5 @@ _foldsymbols = { ["" ] = -1, } } + +return xmlcommentlexer diff --git a/context/data/scite/lexers/scite-context-lexer-xml.lua b/context/data/scite/lexers/scite-context-lexer-xml.lua index 3c7e30ce7..2ca814fef 100644 --- a/context/data/scite/lexers/scite-context-lexer-xml.lua +++ b/context/data/scite/lexers/scite-context-lexer-xml.lua @@ -12,6 +12,8 @@ local info = { -- todo: parse entities in attributes +if not lexer._CONTEXTEXTENSIONS then dofile(_LEXERHOME .. "/scite-context-lexer.lua") end + local lexer = lexer local global, string, table, lpeg = _G, string, table, lpeg local token, exact_match = lexer.token, lexer.exact_match @@ -19,14 +21,13 @@ local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt local type = type local match, find = string.match, string.find -module(...) - -local examplelexer = _M +local xmllexer = { _NAME = "xml" } +local xmlcommentlexer = lexer.load("scite-context-lexer-xml-comment") -- indirect (some issue with the lexer framework) +local xmlcdatalexer = lexer.load("scite-context-lexer-xml-cdata") -- indirect (some issue with the lexer framework) +local whitespace = lexer.WHITESPACE -- triggers states local context = lexer.context -local whitespace = examplelexer.WHITESPACE -- triggers states - local space = lexer.space -- S(" \t\n\r\v\f") local any = lexer.any -- P(1) @@ -217,11 +218,8 @@ local p_doctype = token("command",P("")) -local commentlexer = lexer.load("scite-context-lexer-xml-comment") -- indirect (some issue with the lexer framework) -local cdatalexer = lexer.load("scite-context-lexer-xml-cdata") -- indirect (some issue with the lexer framework) - -lexer.embed_lexer(examplelexer, commentlexer, token("command",opencomment), token("command",closecomment)) -lexer.embed_lexer(examplelexer, cdatalexer, token("command",opencdata), token("command",closecdata)) +lexer.embed_lexer(xmllexer, xmlcommentlexer, token("command",opencomment), token("command",closecomment)) +lexer.embed_lexer(xmllexer, xmlcdatalexer, token("command",opencdata), token("command",closecdata)) local p_name = token("plain",name) @@ -280,7 +278,7 @@ local p_instruction = local p_invisible = token("invisible",invisibles^1) -_rules = { +xmllexer._rules = { { "whitespace", p_spacing }, { "preamble", p_preamble }, { "word", p_word }, @@ -296,9 +294,9 @@ _rules = { { "rest", p_rest }, } -_tokenstyles = context.styleset +xmllexer._tokenstyles = context.styleset -_foldsymbols = { -- somehow doesn't work yet +xmllexer._foldsymbols = { -- somehow doesn't work yet _patterns = { "[<>]", }, @@ -306,3 +304,5 @@ _foldsymbols = { -- somehow doesn't work yet ["<"] = 1, [">"] = -1, }, } + +return xmllexer diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua index d9d5782fb..166ee96d6 100644 --- a/context/data/scite/lexers/scite-context-lexer.lua +++ b/context/data/scite/lexers/scite-context-lexer.lua @@ -9,8 +9,19 @@ local info = { -- The fold and lex functions are copied and patched from original code by Mitchell (see -- lexer.lua). All errors are mine. -- --- I'll probably make a whole copy and patch the other functions too as we need an extra --- nesting model. +-- I've considered making a whole copy and patch the other functions too as we need +-- an extra nesting model. However, I don't want to maintain too much. An unfortunate +-- change in 3.03 is that no longer a script can be specified. This means that instead +-- of loading the extensions via the properties file, we now need to load them in our +-- own lexers, unless of course we replace lexer.lua completely (which adds another +-- installation issue). The loading takes place with: +-- +-- if not lexer._CONTEXTEXTENSIONS then +-- dofile(_LEXERHOME .. "/scite-context-lexer.lua") +-- end +-- +-- So, where pre 3.03 we loaded that file and in that file th eoriginal lexing code, we +-- now do the reverse. -- -- Also needed: preamble scan once. Can be handled in caller below and _M.preamble. -- @@ -25,9 +36,8 @@ local info = { -- means that we need to have it frozen at the moment we load another lexer. Because spacing -- is used to revert to a parent lexer we need to make sure that we load children as late -- as possible in order not to get the wrong whitespace trigger. This took me quite a while --- to figure out (not being that familiar with the internals). BTW, if performance becomes --- an issue we can rewrite the main lex function (memorize the grammars and speed up the --- byline variant). +-- to figure out (not being that familiar with the internals). The lex and fold functions +-- hav ebeen optimized. It is a pitty that there is no proper print available. -- Maybe it's safer to copy the other methods here so that we have no dependencies, apart -- from the the library. @@ -41,7 +51,9 @@ local concat = table.concat local global = _G local type, next, setmetatable, rawset = type, next, setmetatable, rawset -dofile(_LEXERHOME .. '/lexer.lua') +if not lexer then + dofile(_LEXERHOME .. '/lexer.lua') -- pre 3.03 situation +end lexer.context = lexer.context or { } local context = lexer.context @@ -49,6 +61,8 @@ local context = lexer.context context.patterns = context.patterns or { } local patterns = context.patterns +lexer._CONTEXTEXTENSIONS = true + local locations = { -- lexer.context.path, _LEXERHOME .. "/data", -- optional data directory @@ -541,6 +555,8 @@ function context.lex(text,init_style) else for style, style_num in next, lexer._TOKENS do if style_num == init_style then + -- the name of the lexers is filtered from the whitespace + -- specification local lexer_name = match(style,'^(.+)_whitespace') or lexer._NAME if lexer._INITIALRULE ~= lexer_name then grammar = hash[lexer_name] diff --git a/context/data/scite/lexers/themes/scite-context-theme.lua b/context/data/scite/lexers/themes/scite-context-theme.lua index d9617617e..240755382 100644 --- a/context/data/scite/lexers/themes/scite-context-theme.lua +++ b/context/data/scite/lexers/themes/scite-context-theme.lua @@ -17,11 +17,12 @@ if not WIN32 then font_name = '!' .. font_name end -local global = _G - -- dofile(_LEXERHOME .. '/themes/scite.lua') -- starting point so we miss nothing -module('lexer', package.seeall) +-- module('lexer', package.seeall) + +local color = lexer.color +local style = lexer.style lexer.context = lexer.context or { } lexer.context.path = context_path diff --git a/context/data/scite/scite-context-data-context.properties b/context/data/scite/scite-context-data-context.properties index 0699862b5..b5a4d1b1f 100644 --- a/context/data/scite/scite-context-data-context.properties +++ b/context/data/scite/scite-context-data-context.properties @@ -2,10 +2,14 @@ keywordclass.context.helpers=\ startsetups stopsetups startxmlsetups stopxmlsetups \ startluasetups stopluasetups starttexsetups stoptexsetups startrawsetups \ stoprawsetups startlocalsetups stoplocalsetups starttexdefinition stoptexdefinition \ -starttexcode stoptexcode newcount newdimen newskip \ -newmuskip newbox newtoks newread newwrite \ -newmarks newinsert newattribute newif newlanguage \ -newfamily newfam newhelp then donetrue \ +starttexcode stoptexcode doifsetupselse doifsetups doifnotsetups \ +setup setups texsetup xmlsetup luasetup \ +directsetup newmode setmode resetmode newsystemmode \ +setsystemmode resetsystemmode pushsystemmode popsystemmode booleanmodevalue \ +newcount newdimen newskip newmuskip newbox \ +newtoks newread newwrite newmarks newinsert \ +newattribute newif newlanguage newfamily newfam \ +newhelp then donothing dontcomplain donetrue \ donefalse htdp unvoidbox vfilll mathbox \ mathlimop mathnolop mathnothing mathalpha currentcatcodetable \ defaultcatcodetable catcodetablename newcatcodetable startcatcodetable stopcatcodetable \ @@ -36,33 +40,34 @@ doifinstringelse doifassignmentelse tracingall tracingnone loggingall \ appendtoks prependtoks appendtotoks prependtotoks to \ endgraf empty null space obeyspaces \ obeylines normalspace executeifdefined singleexpandafter doubleexpandafter \ -tripleexpandafter dontleavehmode wait writestatus define \ -redefine setmeasure setemeasure setgmeasure setxmeasure \ -definemeasure measure getvalue setvalue setevalue \ -setgvalue setxvalue letvalue letgvalue resetvalue \ -undefinevalue ignorevalue setuvalue setuevalue setugvalue \ -setuxvalue globallet glet getparameters geteparameters \ -getgparameters getxparameters forgetparameters processcommalist processcommacommand \ -quitcommalist quitprevcommalist processaction processallactions processfirstactioninset \ -processallactionsinset unexpanded expanded startexpanded stopexpanded \ -protected protect unprotect firstofoneargument firstoftwoarguments \ -secondoftwoarguments firstofthreearguments secondofthreearguments thirdofthreearguments firstoffourarguments \ -secondoffourarguments thirdoffourarguments fourthoffourarguments firstoffivearguments secondoffivearguments \ -thirdoffivearguments fourthoffivearguments fifthoffivearguments firstofsixarguments secondofsixarguments \ -thirdofsixarguments fourthofsixarguments fifthofsixarguments sixthofsixarguments firstofoneunexpanded \ -gobbleoneargument gobbletwoarguments gobblethreearguments gobblefourarguments gobblefivearguments \ -gobblesixarguments gobblesevenarguments gobbleeightarguments gobbleninearguments gobbletenarguments \ -gobbleoneoptional gobbletwooptionals gobblethreeoptionals gobblefouroptionals gobblefiveoptionals \ -dorecurse doloop exitloop dostepwiserecurse recurselevel \ -recursedepth dofastloopcs newconstant setnewconstant newconditional \ -settrue setfalse setconstant newmacro setnewmacro \ -newfraction dosingleempty dodoubleempty dotripleempty doquadrupleempty \ -doquintupleempty dosixtupleempty doseventupleempty dosingleargument dodoubleargument \ -dotripleargument doquadrupleargument dosinglegroupempty dodoublegroupempty dotriplegroupempty \ -doquadruplegroupempty doquintuplegroupempty nopdfcompression maximumpdfcompression normalpdfcompression \ -modulonumber dividenumber getfirstcharacter doiffirstcharelse startnointerference \ -stopnointerference strut setstrut strutbox strutht \ -strutdp strutwd +tripleexpandafter dontleavehmode removelastspace removeunwantedspaces wait \ +writestatus define redefine setmeasure setemeasure \ +setgmeasure setxmeasure definemeasure measure getvalue \ +setvalue setevalue setgvalue setxvalue letvalue \ +letgvalue resetvalue undefinevalue ignorevalue setuvalue \ +setuevalue setugvalue setuxvalue globallet glet \ +getparameters geteparameters getgparameters getxparameters forgetparameters \ +processcommalist processcommacommand quitcommalist quitprevcommalist processaction \ +processallactions processfirstactioninset processallactionsinset unexpanded expanded \ +startexpanded stopexpanded protected protect unprotect \ +firstofoneargument firstoftwoarguments secondoftwoarguments firstofthreearguments secondofthreearguments \ +thirdofthreearguments firstoffourarguments secondoffourarguments thirdoffourarguments fourthoffourarguments \ +firstoffivearguments secondoffivearguments thirdoffivearguments fourthoffivearguments fifthoffivearguments \ +firstofsixarguments secondofsixarguments thirdofsixarguments fourthofsixarguments fifthofsixarguments \ +sixthofsixarguments firstofoneunexpanded gobbleoneargument gobbletwoarguments gobblethreearguments \ +gobblefourarguments gobblefivearguments gobblesixarguments gobblesevenarguments gobbleeightarguments \ +gobbleninearguments gobbletenarguments gobbleoneoptional gobbletwooptionals gobblethreeoptionals \ +gobblefouroptionals gobblefiveoptionals dorecurse doloop exitloop \ +dostepwiserecurse recurselevel recursedepth dofastloopcs newconstant \ +setnewconstant newconditional settrue setfalse setconstant \ +newmacro setnewmacro newfraction dosingleempty dodoubleempty \ +dotripleempty doquadrupleempty doquintupleempty dosixtupleempty doseventupleempty \ +dosingleargument dodoubleargument dotripleargument doquadrupleargument dosinglegroupempty \ +dodoublegroupempty dotriplegroupempty doquadruplegroupempty doquintuplegroupempty nopdfcompression \ +maximumpdfcompression normalpdfcompression modulonumber dividenumber getfirstcharacter \ +doiffirstcharelse startnointerference stopnointerference strut setstrut \ +strutbox strutht strutdp strutwd begstrut \ +endstrut keywordclass.context.constants=\ zerocount minusone minustwo plusone \ @@ -114,9 +119,11 @@ doifallmodeselse doifnotallmodes startenvironment stopenvironment environment \ startcomponent stopcomponent component startproduct stopproduct \ product startproject stopproject project starttext \ stoptext startdocument stopdocument documentvariable startmodule \ -stopmodule usemodule typescriptone typescripttwo typescriptthree \ +stopmodule usemodule enablemode disablemode preventmode \ +pushmode popmode typescriptone typescripttwo typescriptthree \ mathsizesuffix mathordcode mathopcode mathbincode mathrelcode \ mathopencode mathclosecode mathpunctcode mathalphacode mathinnercode \ mathnothingcode mathlimopcode mathnolopcode mathboxcode mathchoicecode \ -mathaccentcode mathradicalcode +mathaccentcode mathradicalcode constantnumber constantnumberargument constantdimen \ +constantdimenargument constantemptyargument continueifinputfile diff --git a/context/data/scite/scite-context-external.properties b/context/data/scite/scite-context-external.properties index 723f19889..b492e2932 100644 --- a/context/data/scite/scite-context-external.properties +++ b/context/data/scite/scite-context-external.properties @@ -3,7 +3,17 @@ import $(SciteDefaultHome)/lexers/lpeg lexer.lpeg.home=$(SciteDefaultHome)/lexers -lexer.lpeg.script=$(lexer.lpeg.home)/scite-context-lexer.lua + +# # pre 3.03: +# +# lexer.lpeg.script=$(lexer.lpeg.home)/scite-context-lexer.lua +# +# # post 3.03: +# +lexer.lpeg.script=$(lexer.lpeg.home)/lexer.lua +# +# where we load the extensions in the lexers themselves. + lexer.lpeg.color.theme=$(lexer.lpeg.home)/themes/scite-context-theme.lua # alas, only a few properties are passed (only indentation) @@ -20,8 +30,6 @@ if PLAT_GTK lexer.*.lpeg=lpeg -# an xml lexer will also be provided - lexer.$(file.patterns.metapost)=lpeg_scite-context-lexer-mps lexer.$(file.patterns.metafun)=lpeg_scite-context-lexer-mps lexer.$(file.patterns.context)=lpeg_scite-context-lexer-tex diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties index a4c4f8f8e..1664affa1 100644 --- a/context/data/scite/scite-context.properties +++ b/context/data/scite/scite-context.properties @@ -38,6 +38,7 @@ output.code.page=65001 textwrapper.margin=4 textwrapper.length=68 +#~ textwrapper.length=80 # ConTeXt: suffixes (really needed) diff --git a/context/data/scite/scite-ctx.properties b/context/data/scite/scite-ctx.properties index 94a51aeb7..8b2651c0d 100644 --- a/context/data/scite/scite-ctx.properties +++ b/context/data/scite/scite-ctx.properties @@ -49,7 +49,7 @@ ctx.menulist.example=\ check=check_text|\ reset=reset_text -ctx.wraptext.length=65 +ctx.wraptext.length=80 ctx.spellcheck.language=auto ctx.spellcheck.wordsize=4 -- cgit v1.2.3