summaryrefslogtreecommitdiff
path: root/context/data/scite/lexers
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2011-10-03 15:00:13 +0300
committerMarius <mariausol@gmail.com>2011-10-03 15:00:13 +0300
commit070e1ac9465175c8b3674540c243882a3b3900b5 (patch)
tree076c007973da2d5589d56d5eb439858be972597a /context/data/scite/lexers
parentdac8b3d4c89eab191488cf107ada8c16570579cf (diff)
downloadcontext-070e1ac9465175c8b3674540c243882a3b3900b5.tar.gz
beta 2011.10.03 12:59
Diffstat (limited to 'context/data/scite/lexers')
-rw-r--r--context/data/scite/lexers/scite-context-lexer-cld.lua13
-rw-r--r--context/data/scite/lexers/scite-context-lexer-lua.lua22
-rw-r--r--context/data/scite/lexers/scite-context-lexer-tex.lua10
-rw-r--r--context/data/scite/lexers/scite-context-lexer.lua215
4 files changed, 147 insertions, 113 deletions
diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/lexers/scite-context-lexer-cld.lua
index f81119adf..642c469a9 100644
--- a/context/data/scite/lexers/scite-context-lexer-cld.lua
+++ b/context/data/scite/lexers/scite-context-lexer-cld.lua
@@ -7,13 +7,16 @@ local info = {
}
local lexer = lexer
+local token = lexer.token
module(...)
-local cldlexer = lexer.load('scite-context-lexer-lua')
+local cldlexer = _M
+local lualexer = lexer.load('scite-context-lexer-lua')
-_rules = cldlexer._rules_cld
-_tokenstyles = cldlexer._tokenstyles
-_foldsymbols = cldlexer._foldsymbols
+_rules = lualexer._rules_cld
+_tokenstyles = lualexer._tokenstyles
+_foldsymbols = lualexer._foldsymbols
+_directives = lualexer._directives
-_directives = cldlexer._directives
+-- _rules[1] = { "whitespace", token(cldlexer.WHITESPACE, lexer.space^1) }
diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/lexers/scite-context-lexer-lua.lua
index 1b55be55f..e10f49ff3 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua.lua
+++ b/context/data/scite/lexers/scite-context-lexer-lua.lua
@@ -16,7 +16,7 @@ local global = _G
module(...)
-local cldlexer = _M
+local lualexer = _M
_directives = { } -- communication channel
@@ -83,7 +83,7 @@ local longcomment = Cmt(#('[[' + ('[' * C(P('=')^0) * '[')), function(input,ind
return stop and stop + 1 or #input + 1
end)
-local whitespace = cldlexer.WHITESPACE -- triggers states
+local whitespace = lualexer.WHITESPACE -- triggers states
local space = lexer.space -- S(" \n\r\t\f\v")
local any = lexer.any
@@ -147,9 +147,10 @@ _rules = {
{ 'constant', constant },
{ 'identifier', identifier },
{ 'string', string },
+ { 'number', number },
{ 'longcomment', longcomment },
{ 'shortcomment', shortcomment },
- { 'number', number },
+-- { 'number', number },
{ 'operator', operator },
{ 'rest', rest },
}
@@ -159,9 +160,10 @@ _tokenstyles = lexer.context.styleset
_foldsymbols = {
_patterns = {
'%l+',
- '[%({%)}%[%]]',
+ -- '[%({%)}%[%]]',
+ '[{}%[%]]',
},
- ['keyword'] = {
+ ['keyword'] = { -- challenge: if=0 then=1 else=-1 elseif=-1
['if'] = 1,
['end'] = -1,
['do'] = 1,
@@ -176,7 +178,7 @@ _foldsymbols = {
['['] = 1, [']'] = -1,
},
['special'] = {
- ['('] = 1, [')'] = -1,
+ -- ['('] = 1, [')'] = -1,
['{'] = 1, ['}'] = -1,
},
}
@@ -184,7 +186,7 @@ _foldsymbols = {
-- embedded in tex:
local cstoken = R("az","AZ","\127\255") + S("@!?_")
-local csnametex = P("\\") * cstoken^1
+local texcsname = P("\\") * cstoken^1
local commentline = P('%') * (1-S("\n\r"))^0
local texcomment = token('comment', Cmt(commentline, function() return _directives.cld_inline end))
@@ -197,8 +199,9 @@ local texstring = token("quote", longthreestart)
* token("string", longthreestring)
* token("quote", longthreestop)
--- local texcommand = token("user", csnametex)
---
+-- local texcommand = token("user", texcsname)
+local texcommand = token("warning", texcsname)
+
-- local texstring = token("quote", longthreestart)
-- * (texcommand + token("string",P(1-texcommand-longthreestop)^1) - longthreestop)^0 -- we match long non-\cs sequences
-- * token("quote", longthreestop)
@@ -207,6 +210,7 @@ _rules_cld = {
{ 'whitespace', spacing },
{ 'texstring', texstring },
{ 'texcomment', texcomment },
+ { 'texcommand', texcommand },
{ 'keyword', keyword },
{ 'function', builtin },
{ 'csname', csname },
diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua
index 8204ae3a0..2c82454d4 100644
--- a/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -329,7 +329,7 @@ local function startinlinelua(_,i,s)
cldlexer._directives.cld_inline = true
lualevel = 1
return true
- else
+ else-- if luastatus == "inline" then
lualevel = lualevel + 1
return true
end
@@ -351,7 +351,7 @@ local function stopinlinelua_e(_,i,s) -- }
return false
elseif luastatus == "inline" then
lualevel = lualevel - 1
- local ok = lualevel <= 0
+ local ok = lualevel <= 0 -- was 0
if ok then
cldlexer._directives.cld_inline = false
luastatus = false
@@ -362,6 +362,12 @@ local function stopinlinelua_e(_,i,s) -- }
end
end
+contextlexer._reset_parser = function()
+ luastatus = false
+ luatag = nil
+ lualevel = 0
+end
+
local luaenvironment = P("luacode")
local inlinelua = P("\\") * (
diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua
index 13b8d77a8..8573d331c 100644
--- a/context/data/scite/lexers/scite-context-lexer.lua
+++ b/context/data/scite/lexers/scite-context-lexer.lua
@@ -321,128 +321,149 @@ setmetatable(n_table, { __index = function(t,level) local v = { level
-- end
-- line_num = line_num + 1
-- end
---
--- -- not that much faster but less memory:
-
-local action_y, action_n
-local newline = P("\r\n") + S("\r\n")
-local splitlines = ( (
- (Cp() * Cs((1-newline)^1) * newline^-1) / function(p,l) action_y(p,l) end
- + ( newline ) / function() action_n() end
-) )^0
+local newline = P("\r\n") + S("\r\n")
+local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
+local p_nop = newline
-function context.fold(text, start_pos, start_line, start_level)
- if text == '' then
+local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+ local foldsymbols = lexer._foldsymbols
+ if not foldsymbols then
return { }
end
- local lexer = global._LEXER
- if lexer._fold then
- return lexer._fold(text, start_pos, start_line, start_level)
+ local patterns = foldsymbols._patterns
+ if not patterns then
+ return { }
+ end
+ local nofpatterns = #patterns
+ if nofpatterns == 0 then
+ return { }
end
local folds = { }
- if lexer._foldsymbols then
- local fold_symbols = lexer._foldsymbols
- local line_num = start_line
- local prev_level = start_level
- local current_level = prev_level
- local patterns = fold_symbols._patterns
- local nofpatterns = #patterns
- local hash = fold_symbols._hash
- if not hash then
- hash = { }
- for symbol, matches in next, fold_symbols do
- if not find(symbol,"^_") then
- for s, _ in next, matches do
- hash[s] = true
- end
+ local line_num = start_line
+ local prev_level = start_level
+ local current_level = prev_level
+ local validmatches = foldsymbols._validmatches
+ if not validmatches then
+ validmatches = { }
+ for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
+ if not find(symbol,"^_") then -- brrr
+ for s, _ in next, matches do
+ validmatches[s] = true
end
end
- fold_symbols._hash = hash
end
- action_y = function(pos,line) -- we can consider moving this one outside the function
- for i=1,nofpatterns do
- for s, m in gmatch(line,patterns[i]) do
- if hash[m] then
- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
- if symbols then
- local l = symbols[m]
- if l then
- local t = type(l)
- if t == 'number' then
- current_level = current_level + l
- if current_level < FOLD_BASE then -- can this happen?
- current_level = FOLD_BASE
- end
- elseif t == 'function' then
- current_level = current_level + l(text, pos, line, s, match)
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
- end
+ foldsymbols._validmatches = validmatches
+ end
+ local function action_y(pos,line) -- we can consider moving the local functions outside (drawback: folds is kept)
+ for i=1,nofpatterns do
+ for s, m in gmatch(line,patterns[i]) do
+ if validmatches[m] then
+ local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
+ if symbols then
+ local action = symbols[m]
+ if action then
+ if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
+ current_level = current_level + action
+ else
+ current_level = current_level + action(text,pos,line,s,m)
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
end
end
end
end
end
- if current_level > prev_level then
- folds[line_num] = h_table[prev_level] -- { prev_level, FOLD_HEADER }
- else
- folds[line_num] = n_table[prev_level] -- { prev_level }
- end
- prev_level = current_level
- line_num = line_num + 1
end
- action_n = function() -- we can consider moving this one outside the function
- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
- line_num = line_num + 1
+ if current_level > prev_level then
+ folds[line_num] = h_table[prev_level] -- { prev_level, FOLD_HEADER }
+ else
+ folds[line_num] = n_table[prev_level] -- { prev_level }
end
- local lines = lpegmatch(splitlines,text)
- elseif get_property('fold.by.indentation',1) == 1 then
- local current_line = start_line
- local prev_level = start_level
- for _, line in gmatch(text,'([\t ]*)(.-)\r?\n') do
- if line ~= "" then
- local current_level = FOLD_BASE + get_indent_amount(current_line)
- if current_level > prev_level then -- next level
- local i = current_line - 1
- while true do
- local f = folds[i]
- if f and f[2] == FOLD_BLANK then
- i = i - 1
- else
- break
- end
- end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local function action_n()
+ folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+ line_num = line_num + 1
+ end
+ if lexer._reset_parser then
+ lexer._reset_parser()
+ end
+ local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
+ lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
+ return folds
+end
+
+local function fold_by_indentation(text,start_pos,start_line,start_level)
+ local folds = { }
+ local current_line = start_line
+ local prev_level = start_level
+ for _, line in gmatch(text,'([\t ]*)(.-)\r?\n') do
+ if line ~= "" then
+ local current_level = FOLD_BASE + get_indent_amount(current_line)
+ if current_level > prev_level then -- next level
+ local i = current_line - 1
+ while true do
local f = folds[i]
- if f then
- f[2] = FOLD_HEADER
- end -- low indent
- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
+ if f and f[2] == FOLD_BLANK then
+ i = i - 1
+ else
+ break
end
- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
- else -- same level
- folds[current_line] = n_table[prev_level] -- { prev_level }
end
- prev_level = current_level
- else
- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+ local f = folds[i]
+ if f then
+ f[2] = FOLD_HEADER
+ end -- low indent
+ folds[current_line] = n_table[current_level] -- { current_level } -- high indent
+ elseif current_level < prev_level then -- prev level
+ local f = folds[current_line - 1]
+ if f then
+ f[1] = prev_level -- high indent
+ end
+ folds[current_line] = n_table[current_level] -- { current_level } -- low indent
+ else -- same level
+ folds[current_line] = n_table[prev_level] -- { prev_level }
end
- current_line = current_line + 1
- end
- else
- for _ in gmatch(text,".-\r?\n") do
- folds[start_line] = n_table[start_level] -- { start_level }
- start_line = start_line + 1
+ prev_level = current_level
+ else
+ folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
end
+ current_line = current_line + 1
+ end
+ return folds
+end
+
+local function fold_by_line(text,start_pos,start_line,start_level)
+ local folds = { }
+ for _ in gmatch(text,".-\r?\n") do
+ folds[start_line] = n_table[start_level] -- { start_level }
+ start_line = start_line + 1
end
return folds
end
+function context.fold(text,start_pos,start_line,start_level)
+ if text == '' then
+ return { }
+ end
+ local lexer = global._LEXER
+ local fold_by_lexer = lexer._fold
+ if fold_by_lexer then
+ return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
+ elseif get_property('fold.by.parsing',1) > 0 then
+ return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+ elseif get_property('fold.by.indentation',1) > 0 then -- not that usefull
+ return fold_by_indentation(text,start_pos,start_line,start_level,lexer)
+ elseif get_property('fold.by.line',1) > 0 then -- rather useless
+ return fold_by_line(text,start_pos,start_line,start_level,lexer)
+ else
+ return { }
+ end
+end
+
function context.lex(text,init_style)
local lexer = global._LEXER
local grammar = lexer._GRAMMAR