summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2012-04-20 00:40:13 +0300
committerMarius <mariausol@gmail.com>2012-04-20 00:40:13 +0300
commit959400a677b3eb6ff6513a750be6dde943e62c36 (patch)
tree91e6f0884f2b018acd879276f1c976440b7c835b
parent0756a263c41de5279fef717c5b9cca9909308c3a (diff)
downloadcontext-959400a677b3eb6ff6513a750be6dde943e62c36.tar.gz
beta 2012.04.19 23:18
-rw-r--r--context/data/scite/lexers/scite-context-lexer-cld.lua8
-rw-r--r--context/data/scite/lexers/scite-context-lexer-lua-longstring.lua3
-rw-r--r--context/data/scite/lexers/scite-context-lexer-lua.lua20
-rw-r--r--context/data/scite/lexers/scite-context-lexer-mps.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf-object.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf-xref.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-pdf.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-tex.lua55
-rw-r--r--context/data/scite/lexers/scite-context-lexer-txt.lua22
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml-cdata.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml-comment.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml.lua23
-rw-r--r--context/data/scite/lexers/scite-context-lexer.lua113
-rw-r--r--context/data/scite/lexers/themes/scite-context-theme.lua13
-rw-r--r--context/data/scite/scite-context.properties22
-rw-r--r--context/data/scite/scite-ctx.lua2
-rw-r--r--scripts/context/lua/mtxrun.lua14
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua14
-rw-r--r--scripts/context/stubs/unix/mtxrun14
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4073 -> 4074 bytes
-rw-r--r--tex/context/base/context-version.pngbin106054 -> 105915 bytes
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-dat.lua63
-rw-r--r--tex/context/base/lxml-tab.lua14
-rw-r--r--tex/context/base/mult-sys.mkiv1
-rw-r--r--tex/context/base/page-imp.mkiv12
-rw-r--r--tex/context/base/status-files.pdfbin24323 -> 24321 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin173014 -> 173036 bytes
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
32 files changed, 332 insertions, 103 deletions
diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/lexers/scite-context-lexer-cld.lua
index 4d235b1af..4aa2901d2 100644
--- a/context/data/scite/lexers/scite-context-lexer-cld.lua
+++ b/context/data/scite/lexers/scite-context-lexer-cld.lua
@@ -9,9 +9,10 @@ local info = {
local lexer = lexer
local token = lexer.token
--- local cldlexer = { _NAME = "cld" }
-local cldlexer = { _NAME = "lua" } -- get whitespace right
-local whitespace = lexer.WHITESPACE
+local cldlexer = { _NAME = "cld", _FILENAME = "scite-context-lexer-cld" }
+local whitespace = lexer.WHITESPACE -- maybe we need to fix this
+local context = lexer.context
+
local lualexer = lexer.load('scite-context-lexer-lua')
cldlexer._rules = lualexer._rules_cld
@@ -20,4 +21,3 @@ cldlexer._foldsymbols = lualexer._foldsymbols
cldlexer._directives = lualexer._directives
return cldlexer
-
diff --git a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
index f4ac2cff0..26bdb8dbc 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
+++ b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
@@ -2,8 +2,9 @@ local lexer = lexer
local token = lexer.token
local P = lpeg.P
-local stringlexer = { _NAME = "lua-longstring" }
+local stringlexer = { _NAME = "lua-longstring", _FILENAME = "scite-context-lexer-lua-longstring" }
local whitespace = lexer.WHITESPACE
+local context = lexer.context
local space = lexer.space
local nospace = 1 - space
diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/lexers/scite-context-lexer-lua.lua
index 473e45bb2..4f48fb2a5 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua.lua
+++ b/context/data/scite/lexers/scite-context-lexer-lua.lua
@@ -15,9 +15,11 @@ local match, find = string.match, string.find
local setmetatable = setmetatable
-- beware: all multiline is messy, so even if it's no lexer, it should be an embedded lexer
+-- we probably could use a local whitespace variant but this is cleaner
-local lualexer = { _NAME = "lua" }
+local lualexer = { _NAME = "lua", _FILENAME = "scite-context-lexer-lua" }
local whitespace = lexer.WHITESPACE
+local context = lexer.context
local stringlexer = lexer.load("scite-context-lexer-lua-longstring")
@@ -129,7 +131,7 @@ local string = shortstring
lexer.embed_lexer(lualexer, stringlexer, token("quote",longtwostart), token("string",longtwostring_body) * token("quote",longtwostring_end))
-local integer = P('-')^-1 * (lexer.hex_num + lexer.dec_num)
+local integer = P("-")^-1 * (lexer.hex_num + lexer.dec_num)
local number = token("number", lexer.float + integer)
-- officially 127-255 are ok but not utf so useless
@@ -138,7 +140,11 @@ local validword = R("AZ","az","__") * R("AZ","az","__","09")^0
local identifier = token("default",validword)
-local operator = token("special", P('..') + P('~=') + S('+-*/%^#=<>;:,.{}[]()')) -- maybe split off {}[]()
+----- operator = token("special", P('..') + P('~=') + S('+-*/%^#=<>;:,.{}[]()')) -- maybe split off {}[]()
+----- operator = token("special", S('+-*/%^#=<>;:,{}[]()') + P('..') + P('.') + P('~=') ) -- maybe split off {}[]()
+local operator = token("special", S('+-*/%^#=<>;:,{}[]().') + P('~=') )
+
+local structure = token("special", S('{}[]()'))
local optionalspace = spacing^0
local hasargument = #S("{(")
@@ -162,6 +168,7 @@ local csname = token("user", exact_match(csnames ))
lualexer._rules = {
{ 'whitespace', spacing },
{ 'keyword', keyword },
+ -- { 'structure', structure },
{ 'function', builtin },
{ 'csname', csname },
{ 'constant', constant },
@@ -177,11 +184,13 @@ lualexer._rules = {
{ 'rest', rest },
}
-lualexer._tokenstyles = lexer.context.styleset
+lualexer._tokenstyles = context.styleset
lualexer._foldsymbols = {
_patterns = {
- '%l+',
+ -- '%l+', -- costly
+ -- '%l%l+',
+ '[a-z][a-z]+',
-- '[%({%)}%[%]]',
'[{}%[%]]',
},
@@ -238,6 +247,7 @@ lualexer._rules_cld = {
{ 'texstring', texstring },
{ 'texcomment', texcomment },
{ 'texcommand', texcommand },
+ -- { 'structure', structure },
{ 'keyword', keyword },
{ 'function', builtin },
{ 'csname', csname },
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
index 5e8a440fa..e6987f6c6 100644
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -14,7 +14,7 @@ local token, exact_match = lexer.token, lexer.exact_match
local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt
local type = type
-local metafunlexer = { _NAME = "mps" }
+local metafunlexer = { _NAME = "mps", _FILENAME = "scite-context-lexer-mps" }
local whitespace = lexer.WHITESPACE
local context = lexer.context
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua b/context/data/scite/lexers/scite-context-lexer-pdf-object.lua
index e98be07a7..3694b24d8 100644
--- a/context/data/scite/lexers/scite-context-lexer-pdf-object.lua
+++ b/context/data/scite/lexers/scite-context-lexer-pdf-object.lua
@@ -10,7 +10,7 @@ local lexer = lexer
local token = lexer.token
local P, R, S, C, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.V
-local pdfobjectlexer = { _NAME = "pdf-object" }
+local pdfobjectlexer = { _NAME = "pdf-object", _FILENAME = "scite-context-lexer-pdf-object" }
local whitespace = lexer.WHITESPACE -- triggers states
local context = lexer.context
local patterns = context.patterns
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua b/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua
index e250d3b84..827926185 100644
--- a/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua
+++ b/context/data/scite/lexers/scite-context-lexer-pdf-xref.lua
@@ -10,7 +10,7 @@ local lexer = lexer
local token = lexer.token
local P = lpeg.P
-local pdfxreflexer = { _NAME = "pdf-xref" }
+local pdfxreflexer = { _NAME = "pdf-xref", _FILENAME = "scite-context-lexer-pdf-xref" }
local whitespace = lexer.WHITESPACE -- triggers states
local context = lexer.context
local patterns = context.patterns
diff --git a/context/data/scite/lexers/scite-context-lexer-pdf.lua b/context/data/scite/lexers/scite-context-lexer-pdf.lua
index 6ed7d1ecd..c89270c64 100644
--- a/context/data/scite/lexers/scite-context-lexer-pdf.lua
+++ b/context/data/scite/lexers/scite-context-lexer-pdf.lua
@@ -12,7 +12,7 @@ local lexer = lexer
local token = lexer.token
local P, R, S = lpeg.P, lpeg.R, lpeg.S
-local pdflexer = { _NAME = "pdf" }
+local pdflexer = { _NAME = "pdf", _FILENAME = "scite-context-lexer-pdf" }
local whitespace = lexer.WHITESPACE -- triggers states
local pdfobjectlexer = lexer.load("scite-context-lexer-pdf-object")
diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua
index e5fbf5900..68b67488e 100644
--- a/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -26,7 +26,7 @@ local info = {
-- it seems that whitespace triggers the lexer when embedding happens, but this
-- is quite fragile due to duplicate styles .. lexer.WHITESPACE is a number
- -- (initially)
+ -- (initially) ... _NAME vs filename (but we don't want to overwrite files)
-- this lexer does not care about other macro packages (one can of course add a fake
-- interface but it's not on the agenda)
@@ -40,14 +40,16 @@ local global, string, table, lpeg = _G, string, table, lpeg
local token, exact_match = lexer.token, lexer.exact_match
local P, R, S, V, C, Cmt, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Cp, lpeg.Cc, lpeg.Ct
local type, next = type, next
-local find, match, lower = string.find, string.match, string.lower
+local find, match, lower, upper = string.find, string.match, string.lower, string.upper
-- module(...)
-local contextlexer = { _NAME = "tex" }
+local contextlexer = { _NAME = "tex", _FILENAME = "scite-context-lexer-tex" }
local whitespace = lexer.WHITESPACE
+local context = lexer.context
local cldlexer = lexer.load('scite-context-lexer-cld')
+----- cldlexer = lexer.load('scite-context-lexer-lua')
local mpslexer = lexer.load('scite-context-lexer-mps')
local commands = { en = { } }
@@ -55,8 +57,6 @@ local primitives = { }
local helpers = { }
local constants = { }
-local context = lexer.context
-
do -- todo: only once, store in global
local definitions = context.loaddefinitions("scite-context-data-interfaces")
@@ -125,6 +125,7 @@ local wordpattern = context.patterns.wordpattern
local iwordpattern = context.patterns.iwordpattern
local invisibles = context.patterns.invisibles
local checkedword = context.checkedword
+local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
@@ -219,38 +220,21 @@ local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") +
-- no looking back = #(1-S("[=")) * cstoken^3 * #(1-S("=]"))
--- local p_word = Cmt(wordpattern, function(_,i,s)
--- if not validwords then
--- return true, { "text", i }
+-- This one gives stack overflows:
+--
+-- local p_word = Cmt(iwordpattern, function(_,i,s)
+-- if validwords then
+-- return checkedword(validwords,s,i)
-- else
--- -- keys are lower
--- local word = validwords[s]
--- if word == s then
--- return true, { "okay", i } -- exact match
--- elseif word then
--- return true, { "warning", i } -- case issue
--- else
--- local word = validwords[lower(s)]
--- if word == s then
--- return true, { "okay", i } -- exact match
--- elseif word then
--- return true, { "warning", i } -- case issue
--- else
--- return true, { "error", i }
--- end
--- end
+-- return true, { "text", i }
-- end
-- end)
+--
+-- So we use this one instead:
-local p_word = Cmt(iwordpattern, function(_,i,s)
- if validwords then
- return checkedword(validwords,s,i)
- else
- return true, { "text", i }
- end
-end)
+local p_word = Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
--- local p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
+----- p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
-- keep key pressed at end-of syst-aux.mkiv:
--
@@ -415,7 +399,7 @@ local stopmetafun = P("\\stop") * metafunenvironment
local openargument = token("special", P("{"))
local closeargument = token("special", P("}"))
-local argumentcontent = token("default",(1-P("}"))^0)
+local argumentcontent = token("default",(1-P("}"))^0) -- maybe space needs a treatment
local metafunarguments = (spacing^0 * openargument * argumentcontent * closeargument)^-2
@@ -454,6 +438,11 @@ contextlexer._rules = {
}
contextlexer._tokenstyles = context.styleset
+-- contextlexer._tokenstyles = context.stylesetcopy() -- experiment
+
+-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { cldlexer._NAME..'_whitespace', lexer.style_whitespace }
+-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { mpslexer._NAME..'_whitespace', lexer.style_whitespace }
+
local folds = {
["\\start"] = 1, ["\\stop" ] = -1,
diff --git a/context/data/scite/lexers/scite-context-lexer-txt.lua b/context/data/scite/lexers/scite-context-lexer-txt.lua
index f607ee1eb..012167aeb 100644
--- a/context/data/scite/lexers/scite-context-lexer-txt.lua
+++ b/context/data/scite/lexers/scite-context-lexer-txt.lua
@@ -10,10 +10,10 @@ if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
local lexer = lexer
local token = lexer.token
-local P, S, Cmt = lpeg.P, lpeg.S, lpeg.Cmt
+local P, S, Cmt, Cp, Ct = lpeg.P, lpeg.S, lpeg.Cmt, lpeg.Cp, lpeg.Ct
local find, match = string.find, string.match
-local textlexer = { _NAME = "txt" }
+local textlexer = { _NAME = "txt", _FILENAME = "scite-context-lexer-txt" }
local whitespace = lexer.WHITESPACE
local context = lexer.context
@@ -23,6 +23,7 @@ local any = lexer.any
local wordtoken = context.patterns.wordtoken
local wordpattern = context.patterns.wordpattern
local checkedword = context.checkedword
+local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
@@ -51,14 +52,17 @@ end)
local t_preamble =
token("preamble", p_preamble)
+-- local t_word =
+-- Cmt(wordpattern, function(_,i,s)
+-- if validwords then
+-- return checkedword(validwords,s,i)
+-- else
+-- return true, { "text", i }
+-- end
+-- end)
+
local t_word =
- Cmt(wordpattern, function(_,i,s)
- if validwords then
- return checkedword(validwords,s,i)
- else
- return true, { "text", i }
- end
- end)
+ Ct( wordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
local t_text =
token("default", wordtoken^1)
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua b/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua
index 7dfaed5bc..97253e140 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml-cdata.lua
@@ -10,7 +10,7 @@ local lexer = lexer
local token = lexer.token
local P = lpeg.P
-local xmlcdatalexer = { _NAME = "xml-cdata" }
+local xmlcdatalexer = { _NAME = "xml-cdata", _FILENAME = "scite-context-lexer-xml-cdata" }
local whitespace = lexer.WHITESPACE -- triggers states
local context = lexer.context
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
index f2e24fa90..eab3b2a61 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
@@ -10,7 +10,7 @@ local lexer = lexer
local token = lexer.token
local P = lpeg.P
-local xmlcommentlexer = { _NAME = "xml-comment" }
+local xmlcommentlexer = { _NAME = "xml-comment", _FILENAME = "scite-context-lexer-xml-comment" }
local whitespace = lexer.WHITESPACE
local context = lexer.context
diff --git a/context/data/scite/lexers/scite-context-lexer-xml.lua b/context/data/scite/lexers/scite-context-lexer-xml.lua
index d938840c0..34636127f 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml.lua
@@ -17,11 +17,11 @@ if not lexer._CONTEXTEXTENSIONS then require("scite-context-lexer") end
local lexer = lexer
local global, string, table, lpeg = _G, string, table, lpeg
local token, exact_match = lexer.token, lexer.exact_match
-local P, R, S, V, C, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt
+local P, R, S, V, C, Cmt, Ct, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cmt, lpeg.Ct, lpeg.Cp
local type = type
local match, find = string.match, string.find
-local xmllexer = { _NAME = "xml" }
+local xmllexer = { _NAME = "xml", _FILENAME = "scite-context-lexer-xml" }
local whitespace = lexer.WHITESPACE -- triggers states
local context = lexer.context
@@ -68,6 +68,7 @@ local wordpattern = context.patterns.wordpattern
local iwordpattern = context.patterns.iwordpattern
local invisibles = context.patterns.invisibles
local checkedword = context.checkedword
+local styleofword = context.styleofword
local setwordlist = context.setwordlist
local validwords = false
@@ -90,15 +91,17 @@ local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
return false
end)
+-- local p_word =
+-- Cmt(iwordpattern, function(_,i,s)
+-- if validwords then
+-- return checkedword(validwords,s,i)
+-- else
+-- return true, { "text", i } -- or default
+-- end
+-- end)
+
local p_word =
- Cmt(iwordpattern, function(_,i,s)
- if validwords then
- return checkedword(validwords,s,i)
- else
- return true, { "text", i } -- or default
--- return true, { "invisible", i }
- end
- end)
+ Ct( iwordpattern / function(s) return styleofword(validwords,s) end * Cp() ) -- the function can be inlined
local p_rest =
token("default", any)
diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua
index d82a057b7..457b855d2 100644
--- a/context/data/scite/lexers/scite-context-lexer.lua
+++ b/context/data/scite/lexers/scite-context-lexer.lua
@@ -4,6 +4,8 @@ local info = {
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
+ comment = "contains copyrighted code from mitchell.att.foicica.com",
+
}
-- The fold and lex functions are copied and patched from original code by Mitchell (see
@@ -21,11 +23,12 @@ local info = {
-- end
--
-- So, where pre 3.03 we loaded that file and in that file the original lexing code, we
--- now do the reverse.
+-- now do the reverse. I also moved some helpers here because the new module structure
+-- hides some (now local) functions.
--
-- Another change has been that _LEXERHOME is no longer available. It looks like more and
-- more functionality gets dropped so maybe at some point we need to ship our own dll/so
--- files.
+-- files. For instance, I'd like to have access to the current filename etc.
--
-- An increase in the number of built in styles made our own crash (probably due to some
-- maximum being reached) so some measures has been taken. We now get pretty close to
@@ -48,11 +51,11 @@ local info = {
-- have been optimized. It is a pitty that there is no proper print available.
-- Maybe it's safer to copy the other methods here so that we have no dependencies, apart
--- from the c library.
+-- from the c library. We need to copy anyway as helpers are local
--- Something is wrong with folds in combination with scite 3.00.
+local lpeg = require 'lpeg'
-local R, P, S, C, Cp, Cs, Ct, Cmt, Cc, Cf, Cg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg
local lpegmatch = lpeg.match
local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub
local concat = table.concat
@@ -60,7 +63,7 @@ local global = _G
local type, next, setmetatable, rawset = type, next, setmetatable, rawset
if lexer then
- -- we're ok
+ -- in recent c++ code the lexername and loading is hard coded
elseif _LEXERHOME then
dofile(_LEXERHOME .. '/lexer.lua') -- pre 3.03 situation
else
@@ -266,6 +269,31 @@ function context.checkedword(validwords,s,i) -- ,limit
end
end
+function context.styleofword(validwords,s) -- ,limit
+ if not validwords then
+ return "text"
+ else
+ -- keys are lower
+ local word = validwords[s]
+ if word == s then
+ return "okay" -- exact match
+ elseif word then
+ return "warning" -- case issue
+ else
+ local word = validwords[lower(s)]
+ if word == s then
+ return "okay" -- exact match
+ elseif word then
+ return "warning" -- case issue
+ elseif upper(s) == s then
+ return "warning" -- probably a logo or acronym
+ else
+ return "error"
+ end
+ end
+ end
+end
+
-- overloaded functions
local FOLD_BASE = SC_FOLDLEVELBASE
@@ -480,6 +508,79 @@ function context.fold(text,start_pos,start_line,start_level) -- hm, we had size
return { }
end
+-- The following code is mostly unchanged:
+
+local function add_rule(lexer, id, rule)
+ if not lexer._RULES then
+ lexer._RULES = {}
+ lexer._RULEORDER = {}
+ end
+ lexer._RULES[id] = rule
+ lexer._RULEORDER[#lexer._RULEORDER + 1] = id
+end
+
+local function add_style(lexer, token_name, style)
+ local len = lexer._STYLES.len
+ if len == 32 then
+ len = len + 8
+ end
+ if len >= 128 then
+ print('Too many styles defined (128 MAX)')
+ end
+ lexer._TOKENS[token_name] = len
+ lexer._STYLES[len] = style
+ lexer._STYLES.len = len + 1
+end
+
+local function join_tokens(lexer)
+ local patterns, order = lexer._RULES, lexer._RULEORDER
+ local token_rule = patterns[order[1]]
+ for i=2,#order do
+ token_rule = token_rule + patterns[order[i]]
+ end
+ lexer._TOKENRULE = token_rule
+ return lexer._TOKENRULE
+end
+
+local function add_lexer(grammar, lexer, token_rule)
+ local token_rule = join_tokens(lexer)
+ local lexer_name = lexer._NAME
+ local children = lexer._CHILDREN
+ for i=1,#children do
+ local child = children[i]
+ if child._CHILDREN then
+ add_lexer(grammar, child)
+ end
+ local child_name = child._NAME
+ local rules = child._EMBEDDEDRULES[lexer_name]
+ local rules_token_rule = grammar['__'..child_name] or rules.token_rule
+ grammar[child_name] = (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1 * V(lexer_name)
+ local embedded_child = '_' .. child_name
+ grammar[embedded_child] = rules.start_rule * (-rules.end_rule * rules_token_rule)^0 * rules.end_rule^-1
+ token_rule = V(embedded_child) + token_rule
+ end
+ grammar['__' .. lexer_name] = token_rule
+ grammar[lexer_name] = token_rule^0
+end
+
+local function build_grammar(lexer, initial_rule)
+ local children = lexer._CHILDREN
+ if children then
+ local lexer_name = lexer._NAME
+ if not initial_rule then
+ initial_rule = lexer_name
+ end
+ local grammar = { initial_rule }
+ add_lexer(grammar, lexer)
+ lexer._INITIALRULE = initial_rule
+ lexer._GRAMMAR = Ct(P(grammar))
+ else
+ lexer._GRAMMAR = Ct(join_tokens(lexer)^0)
+ end
+end
+
+-- so far. We need these local functions in the next one.
+
function context.lex(text,init_style)
local lexer = global._LEXER
local grammar = lexer._GRAMMAR
diff --git a/context/data/scite/lexers/themes/scite-context-theme.lua b/context/data/scite/lexers/themes/scite-context-theme.lua
index 6351c290d..7b305d3e5 100644
--- a/context/data/scite/lexers/themes/scite-context-theme.lua
+++ b/context/data/scite/lexers/themes/scite-context-theme.lua
@@ -148,7 +148,7 @@ lexer.style_controlchar = style_controlchar -- 38
local styles = { -- as we have globals we could do with less
-- ["whitespace"] = style_whitespace, -- not to be set!
- -- ["default"] = style_nothing,
+ ["default"] = style_nothing, -- else no good backtracking to start-of-child
-- ["number"] = style_number,
-- ["comment"] = style_comment,
-- ["keyword"] = style_keyword,
@@ -157,6 +157,7 @@ local styles = { -- as we have globals we could do with less
-- ["error"] = style_error,
-- ["label"] = style_label,
+ ["invisible"] = style_invisible,
["quote"] = style_quote,
["special"] = style_special,
["extra"] = style_extra,
@@ -166,7 +167,6 @@ local styles = { -- as we have globals we could do with less
-- ["definition"] = style_definition,
["okay"] = style_okay,
["warning"] = style_warning,
- ["invisible"] = style_invisible,
-- ["standout"] = style_standout,
["command"] = style_command,
["internal"] = style_internal,
@@ -190,6 +190,15 @@ end
context.styles = styles
context.styleset = styleset
+function context.stylesetcopy()
+ local t = { }
+ for i=1,#styleset do
+ t[i] = styleset[i]
+ end
+ t[#t+1] = { "whitespace", style_nothing }
+ return t
+end
+
-- We can be sparse if needed:
-- function context.newstyleset(list)
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties
index 1664affa1..4f91fae2d 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/scite-context.properties
@@ -27,6 +27,18 @@
# if PLAT_WIN
# find.command=fgrep -G -n $(find.what) $(find.files)
+# bugged: bad cursor
+#
+# technology=0
+#
+# not much difference
+#
+# buffered.draw=0
+#
+# no auto save:
+#
+# save.on.timer=0
+
# Just UTF-8
code.page=65001
@@ -46,11 +58,15 @@ file.patterns.tex=
file.patterns.latex=
file.patterns.context=*.tex;*.mkii;*.mkiv;*.mkvi;
+open.suffix.$(file.patterns.context)=.tex
+
# Example : patterns
file.patterns.xml=
file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.ctx;*.export;
+open.suffix.$(file.patterns.example)=.xml
+
filter.example=eXaMpLe|$(file.patterns.example)|
lexer.$(file.patterns.example)=xml
@@ -59,6 +75,8 @@ lexer.$(file.patterns.example)=xml
file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.tma;*.lfg
+open.suffix.$(file.patterns.lua)=.lua
+
filter.lua=Lua MkIV|$(file.patterns.lua)|
lexer.$(file.patterns.lua)=lua
@@ -642,3 +660,7 @@ style.context.35=$(styles.bracebad)
fold=1
fold.symbols=3
fold.margin.colour=#CCCCCC
+
+# testing
+
+#~ cache.layout=
diff --git a/context/data/scite/scite-ctx.lua b/context/data/scite/scite-ctx.lua
index 8db99693f..9f3526dc3 100644
--- a/context/data/scite/scite-ctx.lua
+++ b/context/data/scite/scite-ctx.lua
@@ -80,7 +80,7 @@ function traceln(str)
io.flush()
end
--- traceln("LPEG " .. tostring(lpeg))
+-- require "lpeg"
function string.grab(str,delimiter)
local list = { }
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 87d5c4460..8f098c1e3 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -7441,7 +7441,7 @@ local grammar_unparsed_text = P { "preamble",
-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
+local function _xmlconvert_(data, settings)
settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
--
strip = settings.strip_cm_and_dt
@@ -7537,6 +7537,18 @@ local function xmlconvert(data, settings)
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
function xml.inheritedconvert(data,xmldata) -- xmldata is parent
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 87d5c4460..8f098c1e3 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -7441,7 +7441,7 @@ local grammar_unparsed_text = P { "preamble",
-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
+local function _xmlconvert_(data, settings)
settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
--
strip = settings.strip_cm_and_dt
@@ -7537,6 +7537,18 @@ local function xmlconvert(data, settings)
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
function xml.inheritedconvert(data,xmldata) -- xmldata is parent
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 87d5c4460..8f098c1e3 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -7441,7 +7441,7 @@ local grammar_unparsed_text = P { "preamble",
-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
+local function _xmlconvert_(data, settings)
settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
--
strip = settings.strip_cm_and_dt
@@ -7537,6 +7537,18 @@ local function xmlconvert(data, settings)
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
function xml.inheritedconvert(data,xmldata) -- xmldata is parent
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index bd6a20f7a..494b78b13 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.04.17 22:37}
+\newcontextversion{2012.04.19 23:18}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 8ea0045c6..3cd207ede 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.04.17 22:37}
+\newcontextversion{2012.04.19 23:18}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 7d6dac649..14d3139ce 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png
index 47f40fdc4..a150ec151 100644
--- a/tex/context/base/context-version.png
+++ b/tex/context/base/context-version.png
Binary files differ
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index cf3bd5467..63c2dc542 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.04.17 22:37}
+\edef\contextversion{2012.04.19 23:18}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 2c6813941..dd5905ab0 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -23,7 +23,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.04.17 22:37}
+\edef\contextversion{2012.04.19 23:18}
%D For those who want to use this:
diff --git a/tex/context/base/core-dat.lua b/tex/context/base/core-dat.lua
index eab238704..2317eb759 100644
--- a/tex/context/base/core-dat.lua
+++ b/tex/context/base/core-dat.lua
@@ -15,6 +15,12 @@ local tonumber = tonumber
local context, commands = context, commands
+local trace_datasets = false trackers.register("job.datasets" , function(v) trace_datasets = v end)
+local trace_pagestates = false trackers.register("job.pagestates", function(v) trace_pagestates = v end)
+
+local report_dataset = logs.reporter("dataset")
+local report_pagestate = logs.reporter("pagestate")
+
local allocate = utilities.storage.allocate
local settings_to_hash = utilities.parsers.settings_to_hash
local format = string.format
@@ -77,6 +83,11 @@ local function setdata(settings)
data.index = index
data.order = index
data.realpage = texcount.realpageno
+ if trace_datasets then
+ report_dataset("delayed: name %s, tag %s, index %s",name,tag,index)
+ end
+ elseif trace_datasets then
+ report_dataset("immediate: name %s, tag %s",name,tag)
end
return name, tag, data
end
@@ -86,22 +97,33 @@ datasets.setdata = setdata
function datasets.extend(name,tag)
local set = sets[name]
local order = set.order + 1
+ local realpage = texcount.realpageno
set.order = order
local t = tobesaved[name][tag]
- t.realpage = texcount.realpageno
+ t.realpage = realpage
t.order = order
+ if trace_datasets then
+ report_dataset("flushed: name %s, tag %s, page %s, index %s, order",name,tag,t.index or 0,order,realpage)
+ end
end
function datasets.getdata(name,tag,key,default)
local t = collected[name]
- t = t and (t[tag] or t[tonumber(tag)])
- if not t then
- -- back luck
- elseif key then
- return t[key] or default
- else
- return t
+ if t then
+ t = t[tag] or t[tonumber(tag)]
+ if t then
+ if key then
+ return t[key] or default
+ else
+ return t
+ end
+ elseif trace_datasets then
+ report_dataset("unknown: name %s, tag %s",name,tag)
+ end
+ elseif trace_datasets then
+ report_dataset("unknown: name %s",name)
end
+ return default
end
function commands.setdataset(settings)
@@ -162,21 +184,38 @@ local function setstate(settings)
else
tag = tonumber(tag) or tag -- autonumber saves keys
end
- local data = texcount.realpageno
+ local realpage = texcount.realpageno
+ local data = realpage
list[tag] = data
+ if trace_pagestates then
+ report_pagestate("setting: name %s, tag %s, preset %s",name,tag,realpage)
+ end
return name, tag, data
end
pagestates.setstate = setstate
function pagestates.extend(name,tag)
- tobesaved[name][tag] = texcount.realpageno
+ local realpage = texcount.realpageno
+ if trace_pagestates then
+ report_pagestate("synchronizing: name %s, tag %s, preset %s",name,tag,realpage)
+ end
+ tobesaved[name][tag] = realpage
end
function pagestates.realpage(name,tag,default)
local t = collected[name]
- t = t and (t[tag] or t[tonumber(tag)])
- return tonumber(t or default)
+ if t then
+ t = t[tag] or t[tonumber(tag)]
+ if t then
+ return tonumber(t or default)
+ elseif trace_pagestates then
+ report_pagestate("unknown: name %s, tag %s",name,tag)
+ end
+ elseif trace_pagestates then
+ report_pagestate("unknown: name %s",name)
+ end
+ return default
end
function commands.setpagestate(settings)
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 789f454ae..0a43f4352 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -675,7 +675,7 @@ local grammar_unparsed_text = P { "preamble",
-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
+local function _xmlconvert_(data, settings)
settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
--
strip = settings.strip_cm_and_dt
@@ -772,6 +772,18 @@ local function xmlconvert(data, settings)
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
function xml.inheritedconvert(data,xmldata) -- xmldata is parent
diff --git a/tex/context/base/mult-sys.mkiv b/tex/context/base/mult-sys.mkiv
index 112a6c3d5..d3823ea57 100644
--- a/tex/context/base/mult-sys.mkiv
+++ b/tex/context/base/mult-sys.mkiv
@@ -531,7 +531,6 @@
\definesystemvariable {pn} % PaginaNummer
\definesystemvariable {pr} % PRogrammas
\definesystemvariable {ps} % PoSitioneren
-\definesystemvariable {pt} % PageshifT
\definesystemvariable {px} % Parallel
\definesystemvariable {py} % PropertYs
\definesystemvariable {pv} % PublicationVariable
diff --git a/tex/context/base/page-imp.mkiv b/tex/context/base/page-imp.mkiv
index 626d9042f..71c30a7e1 100644
--- a/tex/context/base/page-imp.mkiv
+++ b/tex/context/base/page-imp.mkiv
@@ -1193,11 +1193,15 @@
% #1=name #2=horizontal|vertical #3=shiftlist
+% this will move to lua
+
+\installcorenamespace {pageshift}
+
\unexpanded\def\definepageshift
{\dotripleargument\page_boxes_define_shift}
\def\page_boxes_define_shift[#1][#2][#3]%
- {\setvalue{\??pt#2:#1}{#3}}
+ {\setvalue{\??pageshift#2:#1}{#3}}
\let\page_boxes_h_shifts\empty
\let\page_boxes_v_shifts\empty
@@ -1221,7 +1225,7 @@
\getfromcommacommand[#2][#3]%
\fi
\ifx\commalistelement\empty \else
- #1\commalistelement
+ #1\commalistelement\relax % the relax is really needed as there is an \if later on
\fi
\fi}
@@ -1252,8 +1256,8 @@
\def\page_boxes_setup_shift[#1][#2][#3]% page|paper horizontal vertical
{\ifthirdargument % paper=arrange
- \edef\page_boxes_h_shifts{\ifcsname\??pt\v!horizontal:#2\endcsname\csname\??pt\v!horizontal:#2\endcsname\fi}%
- \edef\page_boxes_v_shifts{\ifcsname\??pt\v!vertical :#3\endcsname\csname\??pt\v!vertical :#3\endcsname\fi}%
+ \edef\page_boxes_h_shifts{\ifcsname\??pageshift\v!horizontal:#2\endcsname\csname\??pageshift\v!horizontal:#2\endcsname\fi}%
+ \edef\page_boxes_v_shifts{\ifcsname\??pageshift\v!vertical :#3\endcsname\csname\??pageshift\v!vertical :#3\endcsname\fi}%
\doifelse{#1}\v!page {\let\page_boxes_apply_shift_print\page_boxes_apply_shift}{\let\page_boxes_apply_shift_print\gobbleoneargument}%
\doifelse{#1}\v!paper{\let\page_boxes_apply_shift_paper\page_boxes_apply_shift}{\let\page_boxes_apply_shift_paper\gobbleoneargument}%
\else\ifsecondargument
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index c022049d5..060bb72f6 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 850e11e23..10850803d 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 7808e2cea..28b06e1dc 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 04/17/12 22:37:22
+-- merge date : 04/19/12 23:18:52
do -- begin closure to overcome local limits and interference