diff options
18 files changed, 956 insertions, 195 deletions
diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/lexers/scite-context-lexer-cld.lua index 632a7672c..f81119adf 100644 --- a/context/data/scite/lexers/scite-context-lexer-cld.lua +++ b/context/data/scite/lexers/scite-context-lexer-cld.lua @@ -1,6 +1,6 @@ local info = { version = 1.002, - comment = "scintilla lpeg lexer for cld/lua", + comment = "scintilla lpeg lexer for cld", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/lexers/scite-context-lexer-lua.lua index 2a0f48026..49799a978 100644 --- a/context/data/scite/lexers/scite-context-lexer-lua.lua +++ b/context/data/scite/lexers/scite-context-lexer-lua.lua @@ -1,13 +1,11 @@ local info = { version = 1.002, - comment = "scintilla lpeg lexer for cld/lua", + comment = "scintilla lpeg lexer for lua", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", } --- Adapted from lua.lua by Mitchell who based it on a lexer by Peter Odding. - local lexer = lexer local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing local P, R, S, C, Cg, Cb, Cs, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cg, lpeg.Cb, lpeg.Cs, lpeg.Cmt @@ -20,6 +18,8 @@ local cldlexer = _M _directives = { } -- communication channel +-- this will be eextended + local keywords = { 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if', 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua index 188c98c73..2d8cc3a70 100644 --- a/context/data/scite/lexers/scite-context-lexer-mps.lua +++ b/context/data/scite/lexers/scite-context-lexer-mps.lua @@ -56,6 +56,10 @@ local number = sign^-1 * ( -- at most one + digit^1 -- 10 ) +local cstokentex = R("az","AZ","\127\255") + S("@!?_") + +-- we could collapse as in tex + local spacing = token(whitespace, space^1) local rest = token('default', any) local comment = token('comment', P('%') * (1-S("\n\r"))^0) @@ -66,10 +70,10 @@ local quoted = token('quote', dquote) * token('string', P(1-dquote)^1) * token('quote', dquote) local primitive = token('primitive', exact_match(primitivecommands)) ------ csname = token('user', cstoken^1) -local identifier = token('default', cstoken^1) +local identifier = token('default', cstoken) local number = token('number', number) -local special = token('special', S("#()[]<>=:\"")) +local special = token('special', S("#()[]<>=:\"")) -- or else := <> etc split +local texlike = token('string', P("\\") * cstokentex^1) local extra = token('extra', S("`~%^&_-+/\'|\\")) _rules = { @@ -83,6 +87,7 @@ _rules = { { 'number', number }, { 'quoted', quoted }, { 'special', special }, +-- { 'texlike', texlike }, { 'extra', extra }, { 'rest', rest }, } diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua index dac3ada63..340c3f75e 100644 --- a/context/data/scite/lexers/scite-context-lexer-tex.lua +++ b/context/data/scite/lexers/scite-context-lexer-tex.lua @@ -37,7 +37,7 @@ local global, string, table, lpeg = _G, string, table, lpeg local token, style, colors, exact_match, no_style = lexer.token, lexer.style, lexer.colors, lexer.exact_match, lexer.style_nothing local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Cp, lpeg.Cc, lpeg.Ct local type, next, pcall, loadfile, setmetatable = type, next, pcall, loadfile, setmetatable -local find, match = string.find, string.match +local find, match, lower = string.find, string.match, string.lower module(...) @@ -111,14 +111,19 @@ local knowncommand = Cmt(cstoken^1, function(_,i,s) return currentcommands[s] and i end) +local validwords = false + local knownpreamble = Cmt(P("% "), function(input,i,_) if i < 10 then + validwords = false local s, e, word = find(input,'^(.+)[\n\r]',i) -- combine with match if word then local interface = match(word,"interface=(..)") if interface then currentcommands = commands[interface] or commands.en or { } end + local language = match(word,"language=(..)") + validwords = language and lexer.context.setwordlist(language) end end return false @@ -169,22 +174,51 @@ local whitespace = contextlexer.WHITESPACE -- triggers states local space = lexer.space -- S(" \n\r\t\f\v") local any = lexer.any +local backslash = P("\\") +local hspace = S(" \t") local p_spacing = space^1 local p_rest = any local p_preamble = knownpreamble local p_comment = commentline -local p_command = P('\\') * knowncommand -local p_constant = P('\\') * exact_match(constants) -local p_helper = P('\\') * exact_match(helpers) -local p_primitive = P('\\') * exact_match(primitives) +local p_command = backslash * knowncommand +local p_constant = backslash * exact_match(constants) +local p_helper = backslash * exact_match(helpers) +local p_primitive = backslash * exact_match(primitives) local p_ifprimitive = P('\\if') * cstoken^1 -local p_csname = P('\\') * (cstoken^1 + P(1)) +local p_csname = backslash * (cstoken^1 + P(1)) local p_grouping = S("{$}") local p_special = S("#()[]<>=\"") local p_extra = S("`~%^&_-+/\'|") -local p_text = cstoken^1 +local p_text = cstoken^1 --maybe add punctuation and space + +-- no looking back = #(1-S("[=")) * cstoken^3 * #(1-S("=]")) + +local p_word = Cmt(cstoken^3, function(_,i,s) + if not validwords then + return true, { "text", i } + else + -- keys are lower + local word = validwords[s] + if word == s then + return true, { "okay", i } -- exact match + elseif word then + return true, { "warning", i } -- case issue + else + local word = validwords[lower(s)] + if word == s then + return true, { "okay", i } -- exact match + elseif word then + return true, { "warning", i } -- case issue + else + return true, { "error", i } + end + end + end +end) + +-- local p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1 -- keep key pressed at end-of syst-aux.mkiv: -- @@ -202,7 +236,6 @@ if option == 1 then p_grouping = p_grouping^1 p_special = p_special^1 p_extra = p_extra^1 - p_text = p_text^1 p_command = p_command^1 p_constant = p_constant^1 @@ -218,7 +251,6 @@ elseif option == 2 then p_grouping = (p_grouping * included)^1 p_special = (p_special * included)^1 p_extra = (p_extra * included)^1 - p_text = (p_text * included)^1 p_command = (p_command * included)^1 p_constant = (p_constant * included)^1 @@ -243,6 +275,8 @@ local grouping = token('grouping', p_grouping ) local special = token('special', p_special ) local extra = token('extra', p_extra ) local text = token('default', p_text ) +----- word = token("okay", p_word ) +local word = p_word ----- startluacode = token("grouping", P("\\startluacode")) ----- stopluacode = token("grouping", P("\\stopluacode")) @@ -261,6 +295,7 @@ end local function stopdisplaylua(_,i,s) local ok = luatag == s if ok then +cldlexer._directives.cld_inline = false luastatus = false end return ok @@ -298,6 +333,7 @@ local function stopinlinelua_e(_,i,s) -- } lualevel = lualevel - 1 local ok = lualevel <= 0 if ok then +cldlexer._directives.cld_inline = false luastatus = false end return ok @@ -347,7 +383,8 @@ lexer.embed_lexer(contextlexer, mpslexer, startmetafuncode, stopmetafuncode) _rules = { { "whitespace", spacing }, { "preamble", preamble }, - { "text", text }, + { "word", word }, + -- { "text", text }, { "comment", comment }, { "constant", constant }, { "helper", helper }, diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua index 4848dc9d9..20af5d68f 100644 --- a/context/data/scite/lexers/scite-context-lexer.lua +++ b/context/data/scite/lexers/scite-context-lexer.lua @@ -24,12 +24,12 @@ local info = { -- an issue we can rewrite the main lex function (memorize the grammars and speed up the -- byline variant). -local R, P, S, Cp, Cs, Ct, Cmt, Cc = lpeg.R, lpeg.P, lpeg.S, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc +local R, P, S, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg local lpegmatch = lpeg.match local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub local concat = table.concat local global = _G -local type, next, setmetatable = type, next, setmetatable +local type, next, setmetatable, rawset = type, next, setmetatable, rawset dofile(_LEXERHOME .. '/lexer.lua') @@ -42,7 +42,7 @@ local locations = { } local function collect(name) - local definitions = loadfile(name .. ".lua") + local definitions = loadfile(name .. ".luc") or loadfile(name .. ".lua") if type(definitions) == "function" then definitions = definitions() end @@ -423,11 +423,45 @@ end -- todo: keywords: one lookup and multiple matches --- function lexer.context.token(name, patt) --- return Ct(patt * Cc(name) * Cp()) --- end +function lexer.context.token(name, patt) + return Ct(patt * Cc(name) * Cp()) +end lexer.fold = lexer.context.fold lexer.lex = lexer.context.lex --- lexer.token = lexer.context.token +lexer.token = lexer.context.token lexer.exact_match = lexer.context.exact_match + +-- spell checking (we can only load lua files) + +local lists = { } + +local splitter = (Cf(Ct("") * (Cg(C((1-S(" \t\n\r"))^1 * Cc(true))) + P(1))^1,rawset) )^0 +local splitter = (Cf(Ct("") * (Cg(C(R("az","AZ","\127\255")^1) * Cc(true)) + P(1))^1,rawset) )^0 + +local function splitwords(words) + return lpegmatch(splitter,words) +end + +function lexer.context.setwordlist(tag) + if not tag or tag == "" then + return false + elseif lists[tag] ~= nil then + return lists[tag] + else + local list = collect("spell-" .. tag) + if not list or type(list) ~= "table" then + lists[tag] = false + return nil + elseif type(list.words) == "string" then + list = splitwords(list.words) + lists[tag] = list + return list + else + list = list.words or false + lists[tag] = list + return list + end + end +end + diff --git a/context/data/scite/lexers/themes/scite-context-theme.lua b/context/data/scite/lexers/themes/scite-context-theme.lua index 360a5d435..94f623cd8 100644 --- a/context/data/scite/lexers/themes/scite-context-theme.lua +++ b/context/data/scite/lexers/themes/scite-context-theme.lua @@ -70,7 +70,9 @@ style_char = style { fore = colors.magenta } style_class = style { fore = colors.black, bold = true } style_constant = style { fore = colors.cyan, bold = true } style_definition = style { fore = colors.black, bold = true } +style_okay = style { fore = colors.dark } style_error = style { fore = colors.red } +style_warning = style { fore = colors.orange } style_function = style { fore = colors.black, bold = true } style_operator = style { fore = colors.blue } style_preproc = style { fore = colors.yellow, bold = true } @@ -86,6 +88,8 @@ style_indentguide = style { fore = colors.linepanel, back = colors.white style_calltip = style { fore = colors.white, back = colors.tippanel } style_controlchar = style_nothing +-- only bold seems to work + lexer.context.styles = { -- ["whitespace"] = style_whitespace, @@ -108,6 +112,10 @@ lexer.context.styles = { ["extra"] = style { fore = colors.yellow }, ["quote"] = style { fore = colors.blue, bold = true }, + ["okay"] = style_okay, + ["warning"] = style_warning, + ["error"] = style_error, + } local styleset = { } diff --git a/context/data/scite/scite-context-readme.pdf b/context/data/scite/scite-context-readme.pdf Binary files differnew file mode 100644 index 000000000..9cd61ba87 --- /dev/null +++ b/context/data/scite/scite-context-readme.pdf diff --git a/context/data/scite/scite-context-readme.tex b/context/data/scite/scite-context-readme.tex index 3a6d673c5..6f32c87bc 100644 --- a/context/data/scite/scite-context-readme.tex +++ b/context/data/scite/scite-context-readme.tex @@ -1,19 +1,199 @@ +% interface=en modes=icon,screen language=uk + +\usemodule[abr-02] + +\logo [METAPOST] {MetaPost} +\logo [METAFUN] {MetaFun} + +\setupcolors + [state=start] + +\setuplayout + [footer=0pt, + width=middle, + height=middle] + +\setupbodyfont + [dejavu,11pt] + +\setuphead + [section] + [page=, + style=\bfb, + color=darkblue, + after=\blank] + +\setuptype + [color=darkblue] + +\setuptyping + [color=darkblue] + +\setuptyping + [margin=yes] + +\setupwhitespace + [big] + +\definecolor[gray][s=.2,t=.5,a=1] + +\startuseMPgraphic{TitlePage}{darkness} + StartPage ; + + numeric factor ; factor := 1/3 ; + numeric multiple ; multiple := PaperHeight/PaperWidth ; % 1.6 ; + numeric stages ; stages := multiple/16 ; % .1 ; + numeric darkness ; darkness := \MPvar{darkness} ; + + def Scaled(expr s, m) = + if m = 1 : + scaled (2*s*PaperWidth) + else : + xscaled (2*s*PaperWidth) yscaled (2*s*PaperHeight) + fi + enddef ; + + fill Page withcolor (factor*white) ; + + fill fullcircle scaled (multiple*PaperWidth) shifted llcorner Page withcolor (factor*red) ; + fill fullcircle scaled (multiple*PaperWidth) shifted ulcorner Page withcolor (factor*green) ; + fill fullcircle scaled (multiple*PaperWidth) shifted urcorner Page withcolor (factor*blue) ; + fill fullcircle scaled (multiple*PaperWidth) shifted lrcorner Page withcolor (factor*yellow) ; + + for i = llcorner Page, ulcorner Page, urcorner Page, lrcorner Page : + for j = 0 step stages until (10*stages-eps) : % or .8 + fill fullcircle Scaled(j,1) shifted i withcolor transparent(1,\MPvar{darkness}*(1-j),white) ; + endfor ; + endfor ; + + draw Page withpen pencircle scaled .1PaperWidth withcolor transparent(1,.5,.5white) ; + + StopPage +\stopuseMPgraphic + +\startmode[icon,screen] + + \setuppapersize[S66][S66] + + \setupbodyfont[10pt] + +\stopmode + +\startmode[icon] + + \starttext + + \startTEXpage + \useMPgraphic{TitlePage}{darkness=0.4} + \stopTEXpage + + \stoptext + +\stopmode + \starttext -\subject{Installing Scite} +% title page + +\definelayer + [TitlePage] + [width=\paperwidth, + height=\paperheight] + +\setlayer + [TitlePage] + {\useMPgraphic{TitlePage}{darkness=1}} + +\setlayerframed + [TitlePage] + [preset=rightbottom, + hoffset=.1\paperwidth, + voffset=.1\paperwidth] + [align=left, + width=\hsize, + frame=off, + foregroundcolor=gray] + {\definedfont[SerifBold sa 10]SciTE\endgraf + \definedfont[SerifBold sa 2.48]IN CONTEXT\kern.25\bodyfontsize} + +\startTEXpage + \tightlayer[TitlePage] +\stopTEXpage + +% main text + +\subject{About \SCITE} + +{\em This is an updated but yet uncorrected version.} + +\SCITE\ is a source code editor written by Neil Hodgson. After +playing with several editors we decided that this editor was quite +configurable and extendible. + +For a long time at \PRAGMA\ we used \TEXEDIT, an editor we'd +written in \MODULA. It had some project management features and +recognized the project structure in \CONTEXT\ documents. Later we +rewrote this to a platform independent reimplementation called +\TEXWORK\ written in \PERLTK\ (not to be confused with the editor +with the plural name). + +In the beginning of the century we can into \SCITE. Although the +mentioned editors provide some functionality not present in +\SCITE\ we've decided to use that editor because it frees us from +maintaining our own. We ported our \TEX\ and \METAPOST\ (line based) +syntax highlighting to \SCITE\ and got a lot of others for free. + +After a while I found out that there was an extension interface +written in \LUA. I played with it and wrote a few extensions too. +This pleasant experience later triggered the \LUATEX\ project. + +A decade into the century \SCITE\ as another new feature: you can +write dynamic external lexers in \LUA\ using \LPEG. As in the +meantime \CONTEXT\ had evolved in a \TEX/\LUA\ hybrid, it made +sense to look into this. The result is a couple of lexers that +suit \TEX, \METAPOST\ and \LUA\ usage in \CONTEXT\ \MKIV. +\footnote {In the process some of the general lexing framework was +adapted to suit our demands for speed. We shipe these files as +well.} + +In the \CONTEXT\ (standalone) distribution you will find the +relevant files under: -Scite has built-in lexers as well as external lpeg based ones. We -can use both but for the external lexers some more work is needed -to get them running. As they are more advanced it's worth the -effort. +\starttyping +<texroot>/tex/texmf-context/context/data/scite +\stoptyping + +Normally a user will not have to dive into the implementation +details but in principle you can tweak the properties files to +suit your purpose. + +\subject{Installing \SCITE} -First you need to install Scite. Just get the latest greatest from: +Installing \SCITE\ is straightforward. We are most familiar with +\MSWINDOWS\ but for other operating systems installation is not +much different. First you need to fetch the archive from: \starttyping www.scintilla.org \stoptyping -Next you need to install the lpeg lexers. These can be fetched from: +The \MSWINDOWS\ binaries are zipped in \type {wscite.zip}, and you +can unzip this in any directory you want as long as you make sure +that the binary ends up in your path or as shortcut on your +desktop. So, say that you install \SCITE\ in: + +\starttyping +c:\data\system\scite\wscite +\stoptyping + +You need to add this path to your local path definition. +Installing \SCITE\ to some known place has the advantage that you +can move it around. There are no special dependencies on the +operating system. + +Next you need to install the lpeg lexers. \footnote {Versions +later than 2.11 will not run on Windows 2K. In that case you need +to comment the external lexer import.} These can be fetched from: \starttyping code.google.com/p/scintilla @@ -22,32 +202,37 @@ code.google.com/p/scintilla On windows you need to copy the \type {lexers} subfolder to the \type {wscite} folder. For Linux the place depends on the distribution. -\subject{Extending Scite} - -In the \CONTEXT\ distribution you find the relevant files in: +For \UNIX, one can take a precompiled version as well. Here we +need to split the set of files into: \starttyping -<contextroot>/tex/texmf-context/context/data/scite +/usr/bin +/usr/share/scite \stoptyping -The easy way is to copy all the files in that path to the path where -the global properties files lives +The second path is hard coded in the binary. + +If you want to use \CONTEXT, you need to copy the relevant files from \starttyping -SciteGlobal.properties +<texroot>/tex/texmf-context/context/data/scite \stoptyping -At the end of that file (on windows it is in the path where the Scite -binary) you then add a line to the end: +to the path were \SCITE\ keeps its property files (\type (*.properties). + +There is a file called \type {SciteGlobal.properties}. At the end +of that file (on windows it is in the path where the Scite binary) +you then add a line to the end: \starttyping import scite-context-user \stoptyping -You need to restart Scite in order to see if things work out as expected. +You need to restart \SCITE\ in order to see if things work out as +expected. -Disabling the external lexer in a recent Scite is somewhat tricky. In that -case the end of that file looks like: +Disabling the external lexer in a recent \SCITE\ is somewhat +tricky. In that case the end of that file looks like: \starttyping imports.exclude=scite-context-external @@ -57,10 +242,34 @@ import scite-context-user In any case you need to make sure that the user file is loaded last. +After this, things should run as expected (given that \TEX\ runs +at the console as well). + +% In order to run the commands needed, we assume that the following programs +% are installed: +% +% \startitemize[packed] +% \item tidy (for quick and dirty checking of \XML\ files) +% \item xsltproc (for converting \XML\ files into other formats) +% \item acrobat (for viewing files) +% \item ghostview (for viewing files, use gv on \UNIX) +% \item rxvt (a console, only needed on \UNIX) +% \stopitemize + +\subject{Fonts} + +The configuration file defauls to the Dejavu fonts. These are part of the +\CONTEXT\ standalone (minimals) distribution. You can copy them to your +operating system from: + +\starttyping +<contextroot>/tex/texmf/fonts/truetype/public/dejavu +\stoptyping + \subject{An alternative approach} If for some reason you prefer not to mess with property files in the main -Scite path, you can follow a different route and selectively copy files to +\SCITE\ path, you can follow a different route and selectively copy files to places. The following files are needed for the lpeg based lexer: @@ -85,10 +294,10 @@ The data files are needed because we cannot access property files from within the lexer. If we could open a file we could use the property files instead. -These files go to the \type {lexers} subpath in your Scite +These files go to the \type {lexers} subpath in your \SCITE\ installation. Normally this sits in the binary path. The following files provide some extensions. On windows you can copy -these files to the path where the scite binary lives. +these files to the path where the \SCITE\ binary lives. \starttyping scite-ctx.lua @@ -141,24 +350,45 @@ SciTEUser.properties \stoptyping Of course the pragma import is optional. You can comment either the -internal or external variant but there is no reason not to keep them both. +internal or external variant but there is no reason not to keep them both. + +\subject{Extensions} + +Just a quick not to some extensions. If you select a part of the +text (normally you do this with the shift key pressed) and you hit +\type {Shift-F11}, you get a menu with some options. More (robust) +ones will be provided at some point. \subject{Spell checking} -If you want to have spellchecking, you need have files with correct words +If you want to have spell checking, you need have files with correct words on each line. The first line of a file determines the language: \starttyping % language=uk \stoptyping -In this case the following file is needed: +When you use the external lexers, you need to provide some files. Given that +you have a text file with valid words only, you can run the following script: + +\starttyping +mtxrun --script scite --words nl uk +\stoptyping + +This will convert files with names like \type {spell-nl.txt} into \LUA\ files +that you need to copy to the \type {lexers/data} path. Spell checking happens +realtime when you have the language directive (just add a bogus character to +disable it). Wrong words are colored red, and words that might have a case +problem are colored orange. Recognized words are greyed and words with less than +three characters are ignored. + +In the case of internal lexers, the following file is needed: \starttyping spell-uk.txt \stoptyping -This file is searched on the the path determined by the environment variable: +This file is searched on the path determined by the environment variable: \starttyping CTXSPELLPATH @@ -172,4 +402,394 @@ In a similar fashion you can drive the interface checking: % interface=nl \stoptyping +\subject{Property files} + +The internal lexers are controlled by the property files while the +external ones are steered with themes. Unfortunately there is +hardly any access to properties from the external lexer code nor +can we consult the file system and/or run programs like \type +{mtxrun}. This means that we cannot use configuration files in the +\CONTEXT\ distribution directly. Hopefully this changes with future +releases. + +\subject{The external lexers} + +These are the more advanced. They provide more detail and the \CONTEXT\ +lexer also supports nested \METAPOST\ and \LUA. Currently there is no +detailed configuration but this might change once they are stable. + +The external lexers operate on documents while the internal ones +operate on lines. This can make the external lexers slow on large +documents. We've optimized the code somewhat for speed and memory +consumption but there's only so much one can do. While lexing each +change in style needs a small table but allocating and garbage +collecting many small tables comes at a price. Of course in +practice this probably gets unnoticed. \footnote {I wrote the code +in 2011 on a more than 5 years old Dell M90 laptop, so I suppose +that speed is less an issue now.} + +In principle the external lexers can be used with \type +{textadept} which also uses \type {scintilla}. Actually, support +for lpeg lexing originates in \type {textadept}. Currently \type +{textadept} lacks a couple of features I like about \SCITE\ (for +instance it has no realtime logpane) and it's also still changing. +\footnote {A native version of \SCITE\ for \MACOSX\ is underway, +which is a good thing.} At some point the \CONTEXT\ distribution +will probably provide files for \type {textadept} as well. + +\subject{The internal lexers} + +\SCITE\ has quite some built in lexers. A lexer is responsible for +highlighting the syntax of your document. The way a \TEX\ file is +treated is configured in the file: + +\starttyping +tex.properties +\stoptyping + +You can edit this file to your needs using the menu entry under +\type {options} in the top bar. In this file, the following +settings apply to the \TEX\ lexer: + +\starttyping +lexer.tex.interface.default=0 +lexer.tex.use.keywords=1 +lexer.tex.comment.process=0 +lexer.tex.auto.if=1 +\stoptyping + +The option \type {lexer.tex.interface.default} determines the way +keywords are highlighted. You can control the interface from your +document as well, which makes more sense that editing the +configuration file each time. + +\starttyping +% interface=all|tex|nl|en|de|cz|it|ro|latex +\stoptyping + +The values in the properties file and the keywords in the preamble +line have the following meaning: + +\starttabulate[|lT|lT|p|] +\NC 0 \NC all \NC all commands (preceded by a backslash) \NC \NR +\NC 1 \NC tex \NC \TEX, \ETEX, \PDFTEX, \OMEGA\ primitives (and macros) \NC \NR +\NC 2 \NC nl \NC the dutch \CONTEXT\ interface \NC \NR +\NC 3 \NC en \NC the english \CONTEXT\ interface \NC \NR +\NC 4 \NC de \NC the german \CONTEXT\ interface \NC \NR +\NC 5 \NC cz \NC the czech \CONTEXT\ interface \NC \NR +\NC 6 \NC it \NC the italian \CONTEXT\ interface \NC \NR +\NC 7 \NC ro \NC the romanian \CONTEXT\ interface \NC \NR +\NC 8 \NC latex \NC \LATEX\ (apart from packages) \NC \NR +\stoptabulate + +The configuration file is set up in such a way that you can easily +add more keywords to the lists. The keywords for the second and +higher interfaces are defined in their own properties files. If +you're curious about the way this is configures, you can peek into +the property files that start with \type {scite-context}. When you +have \CONTEXT\ installed you can generate configuration files with + +\starttyping +mtxrun --script interface --scite +\stoptyping + +You need to make sure that you move the result to the right place so best +not mess around with this command and use the files from the distribution. + +Back to the properties in \type {tex.properties}. You can disable keyword +coloring alltogether with: + +\starttyping +lexer.tex.use.keywords=0 +\stoptyping + +but this is only handy for testing purposes. More interesting is that you can +influence the way comment is treated: + +\starttyping +lexer.tex.comment.process=0 +\stoptyping + +When set to zero, comment is not interpreted as \TEX\ code and it will come out +in a uniform color. But, when set to one, you will get as much colors as a \TEX\ +source. It's a matter of taste what you choose. + +The lexer tries to cope with the \TEX\ syntax as good as possible and takes for +instance care of the funny \type {^^} notation. A special treatment is +applied to so called \type {\if}'s: + +\starttyping +lexer.tex.auto.if=1 +\stoptyping + +This is the default setting. When set to one, all \type {\ifwhatever}'s will be +seen as a command. When set to zero, only the primitive \type {\if}'s will be +treated. In order not to confuse you, when this property is set to one, the +lexer will not color an \type {\ifwhatever} that follows an \type {\newif}. + +\subject{The \METAPOST\ lexer} + +The \METAPOST\ lexer is set up slightly different from its \TEX\ counterpart, +first of all because \METAPOST\ is more a language that \TEX. As with the +\TEX\ lexer, we can control the interpretation of identifiers. The \METAPOST\ +specific configuration file is: + +\starttyping +metapost.properties +\stoptyping + +Here you can find properties like: + +\starttyping +lexer.metapost.interface.default=1 +\stoptyping + +Instead of editing the configuration file you can control the lexer with the +first line in your document: + +\starttyping +% interface=none|metapost|mp|metafun +\stoptyping + +The numbers and keywords have the following meaning: + +\starttabulate[|lT|lT|p|] +\NC 0 \NC none \NC no highlighting of identifiers \NC \NR +\NC 1 \NC metapost or mp \NC \METAPOST\ primitives and macros \NC \NR +\NC 2 \NC metafun \NC \METAFUN\ macros \NC \NR +\stoptabulate + +Similar to the \TEX\ lexer, you can influence the way comments are handled: + +\starttyping +lexer.metapost.comment.process=1 +\stoptyping + +This will interpret comment as \METAPOST\ code, which is not that useful +(opposite to \TEX, where documentation is often coded in \TEX). + +The lexer will color the \METAPOST\ keywords, and, when enabled also additional +keywords (like those of \METAFUN). The additional keywords are colored and shown +in a slanted font. + +The \METAFUN\ keywords are defined in a separate file: + +\starttyping +metafun-scite.properties +\stoptyping + +You can either copy this file to the path where you global properties files lives, +or put a copy in the path of your user properties file. In that case you need to +add an entry to the file \type {SciTEUser.properties}: + +\starttyping +import metafun-scite +\stoptyping + +The lexer is able to recognize \type {btex}||\type {etex} and will treat anything +in between as just text. The same happens with strings (between \type {"}). Both +act on a per line basis. + +\subject{Using \ConTeXt} + +When \type {mtxrun} is in your path, \CONTEXT\ should run out of the box. You can +find \type {mtxrun} in: + +\starttyping +<contextroot>/tex/texmf-mswin/bin +\stoptyping + +or in a similar path that suits the operating system that you use. + +When you hit \type{CTRL-12} your document will be processed. Take a look at +the \type {Tools} menu to see what more is provided. + +\subject{Extensions (using \LUA)} + +When the \LUA\ extensions are loaded, you will see a message +in the log pane that looks like: + +\starttyping +- see scite-ctx.properties for configuring info + +- ctx.spellcheck.wordpath set to ENV(CTXSPELLPATH) +- ctxspellpath set to c:\data\develop\context\spell +- ctx.spellcheck.wordpath expands to c:\data\develop\context\spell + +- ctx.wraptext.length is set to 65 +- key bindings: + +Shift + F11 pop up menu with ctx options + +Ctrl + B check spelling +Ctrl + M wrap text (auto indent) +Ctrl + R reset spelling results +Ctrl + I insert template +Ctrl + E open log file + +- recognized first lines: + +xml <?xml version='1.0' language='nl' +tex % language=nl +\stoptyping + +This message tells you what extras are available. + +\subject{Templates} + +There is an experimental template mechanism. One option is to define +templates in a properties file. The property file \type +{scite-ctx-context} contains definitions like: + +\starttyping +command.25.$(file.patterns.context)=insert_template \ +$(ctx.template.list.context) + +ctx.template.list.context=\ + itemize=structure.itemize.context|\ + tabulate=structure.tabulate.context|\ + natural TABLE=structure.TABLE.context|\ + use MP graphic=graphics.usemp.context|\ + reuse MP graphic=graphics.reusemp.context|\ + typeface definition=fonts.typeface.context + +ctx.template.structure.itemize.context=\ +\startitemize\n\ +\item ?\n\ +\item ?\n\ +\item ?\n\ +\stopitemize\n +\stoptyping + +The file \type {scite-ctx-example} defines \XML\ variants: + +\starttyping +command.25.$(file.patterns.example)=insert_template \ +$(ctx.template.list.example) + +ctx.template.list.example=\ + bold=font.bold.example|\ + emphasized=font.emphasized.example|\ + |\ + inline math=math.inline.example|\ + display math=math.display.example|\ + |\ + itemize=structure.itemize.example + +ctx.template.structure.itemize.example=\ +<itemize>\n\ +<item>?</item>\n\ +<item>?</item>\n\ +<item>?</item>\n\ +</itemize>\n +\stoptyping + +For larger projects it makes sense to keep templates with the +project. In one of our projects we have a directory in the +path where the project files are kept which holds template files: + +\starttyping +..../ctx-templates/achtergronden.xml +..../ctx-templates/bewijs.xml +\stoptyping + +One could define a template menu like we did previously: + +\starttyping +ctx.templatelist.example=\ + achtergronden=mathadore.achtergronden|\ + bewijs=mathadore.bewijs|\ + +ctx.template.mathadore.achtergronden.file=smt-achtergronden.xml +ctx.template.mathadore.bewijs.file=smt-bewijs.xml +\stoptyping + +However, when no such menu is defined, we will automatically scan +the directory and build the menu without user intervention. + +\subject{Using \SCITE} + +The following keybindings are available in \SCITE. Most of this +list is taken from the on|-|line help pages. + +\startbuffer[keybindings] +\starttabulate[|l|p|] +\FL +\NC \rm \bf keybinding \NC \bf meaning (taken from the \SCITE\ help file) \NC \NR +\ML +\NC \type{Ctrl+Keypad+} \NC magnify text size \NC \NR +\NC \type{Ctrl+Keypad-} \NC reduce text size \NC \NR +\NC \type{Ctrl+Keypad/} \NC restore text size to normal \NC \NR +\ML +\NC \type{Ctrl+Keypad*} \NC expand or contract a fold point \NC \NR +\ML +\NC \type{Ctrl+Tab} \NC cycle through recent files \NC \NR +\ML +\NC \type{Tab} \NC indent block \NC \NR +\NC \type{Shift+Tab} \NC dedent block \NC \NR +\ML +\NC \type{Ctrl+BackSpace} \NC delete to start of word \NC \NR +\NC \type{Ctrl+Delete} \NC delete to end of word \NC \NR +\NC \type{Ctrl+Shift+BackSpace} \NC delete to start of line \NC \NR +\NC \type{Ctrl+Shift+Delete} \NC delete to end of line \NC \NR +\ML +\NC \type{Ctrl+Home} \NC go to start of document; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+End} \NC go to end of document; \type{Shift} extends selection \NC \NR +\NC \type{Alt+Home} \NC go to start of display line; \type{Shift} extends selection \NC \NR +\NC \type{Alt+End} \NC go to end of display line; \type{Shift} extends selection \NC \NR +\ML +\NC \type{Ctrl+F2} \NC create or delete a bookmark \NC \NR +\NC \type{F2} \NC go to next bookmark \NC \NR +\ML +\NC \type{Ctrl+F3} \NC find selection \NC \NR +\NC \type{Ctrl+Shift+F3} \NC find selection backwards \NC \NR +\ML +\NC \type{Ctrl+Up} \NC scroll up \NC \NR +\NC \type{Ctrl+Down} \NC scroll down \NC \NR +\ML +\NC \type{Ctrl+C} \NC copy selection to buffer \NC \NR +\NC \type{Ctrl+V} \NC insert content of buffer \NC \NR +\NC \type{Ctrl+X} \NC copy selection to buffer and delete selection \NC \NR +\ML +\NC \type{Ctrl+L} \NC line cut \NC \NR +\NC \type{Ctrl+Shift+T} \NC line copy \NC \NR +\NC \type{Ctrl+Shift+L} \NC line delete \NC \NR +\NC \type{Ctrl+T} \NC line transpose with previous \NC \NR +\NC \type{Ctrl+D} \NC line duplicate \NC \NR +\ML +\NC \type{Ctrl+K} \NC find matching preprocessor conditional, skipping nested ones \NC \NR +\NC \type{Ctrl+Shift+K} \NC select to matching preprocessor conditional \NC \NR +\NC \type{Ctrl+J} \NC find matching preprocessor conditional backwards, skipping nested ones \NC \NR +\NC \type{Ctrl+Shift+J} \NC select to matching preprocessor conditional backwards \NC \NR +\ML +\NC \type{Ctrl+[} \NC previous paragraph; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+]} \NC next paragraph; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+Left} \NC previous word; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+Right} \NC next word; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+/} \NC previous word part; \type{Shift} extends selection \NC \NR +\NC \type{Ctrl+\ } \NC next word part; \type{Shift} extends selection \NC \NR +\LL +\stoptabulate +\stopbuffer + +\getbuffer[keybindings] + +\subject{Affiliation} + +\starttabulate[|l|l|] +\NC author \NC Hans Hagen \NC \NR +\NC copyright \NC PRAGMA ADE, Hasselt NL \NC \NR +\NC more info \NC \type {www.pragma-ade.com} \NC \NR +\NC \NC \type {www.contextgarden.net} \NC \NR +\NC version \NC \currentdate \NC \NR +\stoptabulate + +\startstandardmakeup[headerstate=none,footer=none] + + \setuptabulate[before=,after=] \getbuffer[keybindings] + + \vfill + +\stopstandardmakeup + \stoptext diff --git a/scripts/context/lua/mtx-scite.lua b/scripts/context/lua/mtx-scite.lua index 0ac09b017..c969f209e 100644 --- a/scripts/context/lua/mtx-scite.lua +++ b/scripts/context/lua/mtx-scite.lua @@ -6,17 +6,22 @@ if not modules then modules = { } end modules ['mtx-scite'] = { license = "see context related readme files" } --- todo: append to global properties else order of loading problem --- linux problem ... files are under root protection so we need --install +local P, R, S, C, Ct, Cf, Cc, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.Cg +local lpegmatch = lpeg.match +local format, lower, gmatch = string.format, string.lower, string.gmatch + +-- local helpinfo = [[ +-- --start [--verbose] start scite +-- --test report what will happen +-- ]] local helpinfo = [[ ---start [--verbose] start scite ---test report what will happen +--words convert spell-*.txt into spell-*.lua ]] local application = logs.application { name = "mtx-scite", - banner = "Scite Startup Script 1.00", + banner = "Scite Helper Script 1.00", helpinfo = helpinfo, } @@ -25,148 +30,200 @@ local report = application.report scripts = scripts or { } scripts.scite = scripts.scite or { } -local scitesignals = { "scite-context.rme", "context.properties" } -local screenfont = "lmtypewriter10-regular.ttf" +-- todo: append to global properties else order of loading problem +-- linux problem ... files are under root protection so we need --install +-- +-- local scitesignals = { "scite-context.rme", "context.properties" } +-- local screenfont = "lmtypewriter10-regular.ttf" -function scripts.scite.start(indeed) - local usedsignal, datapath, fullname, workname, userpath, fontpath - if os.type == "windows" then - workname = "scite.exe" - userpath = os.getenv("USERPROFILE") or "" - fontpath = os.getenv("SYSTEMROOT") - fontpath = (fontpath and file.join(fontpath,"fonts")) or "" - else - workname = "scite" - userpath = os.getenv("HOME") or "" - fontpath = "" - end - local binpaths = file.split_path(os.getenv("PATH")) or file.split_path(os.getenv("path")) - for i=1,#scitesignals do - local scitesignal = scitesignals[i] - local scitepath = resolvers.findfile(scitesignal,"other text files") or "" - if scitepath ~= "" then - scitepath = file.dirname(scitepath) -- data - if scitepath == "" then - scitepath = resolvers.cleanpath(lfs.currentdir()) - else - usedsignal, datapath = scitesignal, scitepath - break - end - end - end - if not datapath or datapath == "" then - report("invalid datapath, maybe you need to regenerate the file database") - return false - end - if not binpaths or #binpaths == 0 then - report("invalid binpath") - return false - end - for i=1,#binpaths do - local p = file.join(binpaths[i],workname) - if lfs.isfile(p) and lfs.attributes(p,"size") > 10000 then -- avoind stub - fullname = p - break - end - end - if not fullname then - report("unable to locate %s",workname) - return false - end - local properties = dir.glob(file.join(datapath,"*.properties")) - local luafiles = dir.glob(file.join(datapath,"*.lua")) - local extrafont = resolvers.findfile(screenfont,"truetype font") or "" - local pragmafound = dir.glob(file.join(datapath,"pragma.properties")) - if userpath == "" then - report("unable to figure out userpath") - return false - end - local verbose = environment.argument("verbose") - local tobecopied, logdata = { }, { } - local function check_state(fullname,newpath) - local basename = file.basename(fullname) - local destination = file.join(newpath,basename) - local pa, da = lfs.attributes(fullname), lfs.attributes(destination) - if not da then - logdata[#logdata+1] = { "new : %s", basename } - tobecopied[#tobecopied+1] = { fullname, destination } - elseif pa.modification > da.modification then - logdata[#logdata+1] = { "outdated : %s", basename } - tobecopied[#tobecopied+1] = { fullname, destination } - else - logdata[#logdata+1] = { "up to date : %s", basename } - end - end - for i=1,#properties do - check_state(properties[i],userpath) - end - for i=1,#luafiles do - check_state(luafiles[i],userpath) - end - if fontpath ~= "" then - check_state(extrafont,fontpath) - end - local userpropfile = "SciTEUser.properties" - if os.name ~= "windows" then - userpropfile = "." .. userpropfile +-- function scripts.scite.start(indeed) +-- local usedsignal, datapath, fullname, workname, userpath, fontpath +-- if os.type == "windows" then +-- workname = "scite.exe" +-- userpath = os.getenv("USERPROFILE") or "" +-- fontpath = os.getenv("SYSTEMROOT") +-- fontpath = (fontpath and file.join(fontpath,"fonts")) or "" +-- else +-- workname = "scite" +-- userpath = os.getenv("HOME") or "" +-- fontpath = "" +-- end +-- local binpaths = file.split_path(os.getenv("PATH")) or file.split_path(os.getenv("path")) +-- for i=1,#scitesignals do +-- local scitesignal = scitesignals[i] +-- local scitepath = resolvers.findfile(scitesignal,"other text files") or "" +-- if scitepath ~= "" then +-- scitepath = file.dirname(scitepath) -- data +-- if scitepath == "" then +-- scitepath = resolvers.cleanpath(lfs.currentdir()) +-- else +-- usedsignal, datapath = scitesignal, scitepath +-- break +-- end +-- end +-- end +-- if not datapath or datapath == "" then +-- report("invalid datapath, maybe you need to regenerate the file database") +-- return false +-- end +-- if not binpaths or #binpaths == 0 then +-- report("invalid binpath") +-- return false +-- end +-- for i=1,#binpaths do +-- local p = file.join(binpaths[i],workname) +-- if lfs.isfile(p) and lfs.attributes(p,"size") > 10000 then -- avoind stub +-- fullname = p +-- break +-- end +-- end +-- if not fullname then +-- report("unable to locate %s",workname) +-- return false +-- end +-- local properties = dir.glob(file.join(datapath,"*.properties")) +-- local luafiles = dir.glob(file.join(datapath,"*.lua")) +-- local extrafont = resolvers.findfile(screenfont,"truetype font") or "" +-- local pragmafound = dir.glob(file.join(datapath,"pragma.properties")) +-- if userpath == "" then +-- report("unable to figure out userpath") +-- return false +-- end +-- local verbose = environment.argument("verbose") +-- local tobecopied, logdata = { }, { } +-- local function check_state(fullname,newpath) +-- local basename = file.basename(fullname) +-- local destination = file.join(newpath,basename) +-- local pa, da = lfs.attributes(fullname), lfs.attributes(destination) +-- if not da then +-- logdata[#logdata+1] = { "new : %s", basename } +-- tobecopied[#tobecopied+1] = { fullname, destination } +-- elseif pa.modification > da.modification then +-- logdata[#logdata+1] = { "outdated : %s", basename } +-- tobecopied[#tobecopied+1] = { fullname, destination } +-- else +-- logdata[#logdata+1] = { "up to date : %s", basename } +-- end +-- end +-- for i=1,#properties do +-- check_state(properties[i],userpath) +-- end +-- for i=1,#luafiles do +-- check_state(luafiles[i],userpath) +-- end +-- if fontpath ~= "" then +-- check_state(extrafont,fontpath) +-- end +-- local userpropfile = "SciTEUser.properties" +-- if os.name ~= "windows" then +-- userpropfile = "." .. userpropfile +-- end +-- local fullpropfile = file.join(userpath,userpropfile) +-- local userpropdata = io.loaddata(fullpropfile) or "" +-- local propfiledone = false +-- if pragmafound then +-- if userpropdata == "" then +-- logdata[#logdata+1] = { "error : no user properties found on '%s'", fullpropfile } +-- elseif string.find(userpropdata,"import *pragma") then +-- logdata[#logdata+1] = { "up to date : 'import pragma' in '%s'", userpropfile } +-- else +-- logdata[#logdata+1] = { "yet unset : 'import pragma' in '%s'", userpropfile } +-- userproperties = userpropdata .. "\n\nimport pragma\n\n" +-- propfiledone = true +-- end +-- else +-- if string.find(userpropdata,"import *context") then +-- logdata[#logdata+1] = { "up to date : 'import context' in '%s'", userpropfile } +-- else +-- logdata[#logdata+1] = { "yet unset : 'import context' in '%s'", userpropfile } +-- userproperties = userpropdata .. "\n\nimport context\n\n" +-- propfiledone = true +-- end +-- end +-- if not indeed or verbose then +-- report("used signal: %s", usedsignal) +-- report("data path : %s", datapath) +-- report("full name : %s", fullname) +-- report("user path : %s", userpath) +-- report("extra font : %s", extrafont) +-- end +-- if #logdata > 0 then +-- report("") +-- for k=1,#logdata do +-- local v = logdata[k] +-- report(v[1],v[2]) +-- end +-- end +-- if indeed then +-- if #tobecopied > 0 then +-- report("warning : copying updated files") +-- for i=1,#tobecopied do +-- local what = tobecopied[i] +-- report("copying : '%s' => '%s'",what[1],what[2]) +-- file.copy(what[1],what[2]) +-- end +-- end +-- if propfiledone then +-- report("saving : '%s'",userpropfile) +-- io.savedata(fullpropfile,userpropdata) +-- end +-- os.launch(fullname) +-- end +-- end + +-- local splitter = (Cf(Ct("") * (Cg(C(R("az","AZ","\127\255")^1) * Cc(true)) + P(1))^1,rawset) )^0 +-- +-- local function splitwords(words) +-- return lpegmatch(splitter,words) -- or just split and tohash +-- end + +local function splitwords(words) + local w = { } + for s in string.gmatch(words,"[a-zA-Z\127-255]+") do + w[lower(s)] = s end - local fullpropfile = file.join(userpath,userpropfile) - local userpropdata = io.loaddata(fullpropfile) or "" - local propfiledone = false - if pragmafound then - if userpropdata == "" then - logdata[#logdata+1] = { "error : no user properties found on '%s'", fullpropfile } - elseif string.find(userpropdata,"import *pragma") then - logdata[#logdata+1] = { "up to date : 'import pragma' in '%s'", userpropfile } - else - logdata[#logdata+1] = { "yet unset : 'import pragma' in '%s'", userpropfile } - userproperties = userpropdata .. "\n\nimport pragma\n\n" - propfiledone = true - end - else - if string.find(userpropdata,"import *context") then - logdata[#logdata+1] = { "up to date : 'import context' in '%s'", userpropfile } + return w +end + +-- maybe: lowerkey = UpperWhatever + +function scripts.scite.words() + for i=1,#environment.files do + local tag = environment.files[i] + local tag = string.match(tag,"spell%-(..)%.") or tag + local txtname = format("spell-%s.txt",tag) + local luaname = format("spell-%s.lua",tag) + local lucname = format("spell-%s.luc",tag) + if lfs.isfile(txtname) then + report("loading %s",txtname) + local olddata = io.loaddata(txtname) or "" + local newdata = { + words = splitwords(olddata), + -- words = olddata, + source = oldname, + } + report("saving %s",luaname) + io.savedata(luaname,table.serialize(newdata,true)) + report("compiling %s",lucname) + os.execute(format("luac -s -o %s %s",lucname,luaname)) else - logdata[#logdata+1] = { "yet unset : 'import context' in '%s'", userpropfile } - userproperties = userpropdata .. "\n\nimport context\n\n" - propfiledone = true + report("no data file %s",txtname) end end - if not indeed or verbose then - report("used signal: %s", usedsignal) - report("data path : %s", datapath) - report("full name : %s", fullname) - report("user path : %s", userpath) - report("extra font : %s", extrafont) - end - if #logdata > 0 then - report("") - for k=1,#logdata do - local v = logdata[k] - report(v[1],v[2]) - end - end - if indeed then - if #tobecopied > 0 then - report("warning : copying updated files") - for i=1,#tobecopied do - local what = tobecopied[i] - report("copying : '%s' => '%s'",what[1],what[2]) - file.copy(what[1],what[2]) - end - end - if propfiledone then - report("saving : '%s'",userpropfile) - io.savedata(fullpropfile,userpropdata) - end - os.launch(fullname) - end + report("you need to move the lua files to lexers/data") end -if environment.argument("start") then - scripts.scite.start(true) -elseif environment.argument("test") then - scripts.scite.start() +-- if environment.argument("start") then +-- scripts.scite.start(true) +-- elseif environment.argument("test") then +-- scripts.scite.start() +-- else +-- application.help() +-- end + +if environment.argument("words") then + scripts.scite.words() else application.help() end + diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index b6c35d923..d90576138 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.09.17 09:40} +\newcontextversion{2011.09.17 15:56} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 9ee827ce1..a2b5e8fc9 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.09.17 09:40} +\newcontextversion{2011.09.17 15:56} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differindex 781926a10..94d1b8d35 100644 --- a/tex/context/base/context-version.pdf +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png Binary files differindex ca5f0a5e2..876c4ab7d 100644 --- a/tex/context/base/context-version.png +++ b/tex/context/base/context-version.png diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index 04eb70d4e..d7841eb80 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.09.17 09:40} +\edef\contextversion{2011.09.17 15:56} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 5bb9eb360..b8b7378f4 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.09.17 09:40} +\edef\contextversion{2011.09.17 15:56} %D For those who want to use this: diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex c2bd7d806..edab0a6b8 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 26089e691..eee3ffe94 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index d779357af..132562502 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 09/17/11 09:40:07 +-- merge date : 09/17/11 15:56:52 do -- begin closure to overcome local limits and interference |