diff options
Diffstat (limited to 'tex')
-rw-r--r-- | tex/context/base/bibl-bib.lua | 2 | ||||
-rw-r--r-- | tex/context/base/cont-new.mkiv | 2 | ||||
-rw-r--r-- | tex/context/base/context-version.pdf | bin | 4065 -> 4061 bytes | |||
-rw-r--r-- | tex/context/base/context.mkiv | 2 | ||||
-rw-r--r-- | tex/context/base/m-scite.mkiv | 269 | ||||
-rw-r--r-- | tex/context/base/status-files.pdf | bin | 24624 -> 24625 bytes | |||
-rw-r--r-- | tex/context/base/status-lua.pdf | bin | 242465 -> 242466 bytes | |||
-rw-r--r-- | tex/context/base/trac-deb.lua | 16 | ||||
-rw-r--r-- | tex/context/base/trac-log.lua | 3 | ||||
-rw-r--r-- | tex/context/base/util-sci.lua | 262 | ||||
-rw-r--r-- | tex/generic/context/luatex/luatex-fonts-merged.lua | 2 |
11 files changed, 552 insertions, 6 deletions
diff --git a/tex/context/base/bibl-bib.lua b/tex/context/base/bibl-bib.lua index 65ca1f9e1..baeb3d2f9 100644 --- a/tex/context/base/bibl-bib.lua +++ b/tex/context/base/bibl-bib.lua @@ -105,7 +105,7 @@ local spacing = space^0 local equal = P("=") local collapsed = (space^1)/ " " -local function add(a,b) if b then return a..b else return a end end +----- function add(a,b) if b then return a..b else return a end end local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1) local s_quoted = ((escape*single) + collapsed + (1-single))^0 diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 30778c50c..e3df6f7bf 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2014.04.25 00:45} +\newcontextversion{2014.04.28 23:24} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differindex d778e2a07..6450c43f1 100644 --- a/tex/context/base/context-version.pdf +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 7ac90cc8d..e1ade2ba1 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -28,7 +28,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2014.04.25 00:45} +\edef\contextversion{2014.04.28 23:24} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/m-scite.mkiv b/tex/context/base/m-scite.mkiv new file mode 100644 index 000000000..aed2c2631 --- /dev/null +++ b/tex/context/base/m-scite.mkiv @@ -0,0 +1,269 @@ +%D \module +%D [ file=m-scite, +%D version=2014.04.28, +%D title=\CONTEXT\ Extra Modules, +%D subtitle=\SCITE\ lexers, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +% We can simplify the scite lexers, as long as we're able to return the +% lexed result table and provide alexer module with the functions that +% the lexer expects (so I need to decipher the cxx file). +% +% lexer._TOKENSTYLES : table +% lexer._CHILDREN : flag +% lexer._EXTRASTYLES : table +% lexer._GRAMMAR : flag +% +% lexers.load : function +% lexers.lex : function +% +% And some properties that map styles onto scintilla styling. I get the +% impression that we end up with something simpler, a hybrid between the +% scite lexing and the current context way, so we get an intermediate +% step, with some penalty for context, but at least I don't have to +% maintain two sets (three sets as we also have a line based series). + +% TODO: as these files are in tds we can locate them and set the lexer root +% to that one. Currently we're on: we're on context/documents. + +% This is an experiment: eventually we need to hook it into the verbatim code +% and deal with widow lines and so. + +\startluacode + +-- todo: merge with collapse +-- todo: prehash whitespaces + +-- todo: hook into the pretty print code +-- todo: a simple catcode regime with only \ { } + +local gsub, sub, find = string.gsub, string.sub, string.find +local concat = table.concat +local formatters = string.formatters +local lpegmatch = lpeg.match +local setmetatableindex = table.setmetatableindex + +local scite = require("util-sci") +buffers.scite = scite + +-- context output: + +local f_def_color = formatters["\\definecolor[slxc%s][h=%s%s%s]%%"] +local f_fore_none = formatters["\\def\\slx%s#1{{\\slxc%s#1}}%%"] +local f_fore_bold = formatters["\\def\\slx%s#1{{\\slxc%s\\bf#1}}%%"] +local f_none_bold = formatters["\\def\\slx%s#1{{\\bf#1}}%%"] +local f_none_none = formatters["\\def\\slx%s#1{{#1}}%%"] +local f_texstyled = formatters["\\slx%s{%s}"] + +local f_mapping = [[ +\let\string\slxL\string\letterleftbrace +\let\string\slxR\string\letterrightbrace +\let\string\slxM\string\letterdollar +\let\string\slxV\string\letterbar +\let\string\slxH\string\letterhash +\let\string\slxB\string\letterbackslash +\let\string\slxP\string\letterpercent +\let\string\slxS\string\fixedspace +%]] + +local replacer = lpeg.replacer { + ["{"] = "\\slxL ", + ["}"] = "\\slxR ", + ["$"] = "\\slxM ", + ["|"] = "\\slxV ", + ["#"] = "\\slxH ", + ["\\"] = "\\slxB ", + ["%"] = "\\slxP ", + [" "] = "\\slxS ", +} + +local colors = nil + +local function exportcolors() + if not colors then + scite.loadscitelexer() + local function black(f) + return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00') + end + local result, r = { f_mapping }, 1 + for k, v in table.sortedhash(lexer.context.styles) do + local fore = v.fore + if fore and not black(fore) then + r = r + 1 + result[r] = f_def_color(k,fore[1],fore[2],fore[3]) + end + end + r = r + 1 + result[r] = "%" + for k, v in table.sortedhash(lexer.context.styles) do + local bold = v.bold + local fore = v.fore + r = r + 1 + if fore and not black(fore) then + if bold then + result[r] = f_fore_bold(k,k) + else + result[r] = f_fore_none(k,k) + end + else + if bold then + result[r] = f_none_bold(k) + else + result[r] = f_none_none(k) + end + end + end + colors = concat(result,"\n") + end + return colors +end + +local function exportwhites() + return setmetatableindex(function(t,k) + local v = find(k,"white") and true or false + t[k] = v + return v + end) +end + +local function exportstyled(lexer,text) + local result = lexer.lex(lexer,text,0) + local start = 1 + local whites = exportwhites() + local buffer = { } + for i=1,#result,2 do + local style = result[i] + local position = result[i+1] + local txt = sub(text,start,position-1) + txt = lpegmatch(replacer,txt) + if whites[style] then + buffer[#buffer+1] = txt + else + buffer[#buffer+1] = f_texstyled(style,txt) + end + start = position + end + buffer = concat(buffer) + return buffer +end + +function scite.installcommands() + context(exportcolors()) +end + +local function lexdata(data,lexname) + buffers.assign("lex",exportstyled(scite.loadedlexers[lexname],data or "")) +end + +scite.lexdata = lexdata + +function scite.lexbuffer(name,lexname) + lexdata(buffers.getcontent(name) or "",lexname or "tex") +end + +function scite.lexfile(filename,lexname) + lexdata(io.loaddata(filename) or "",lexname or file.suffix(filename)) +end + +-- html output + +\stopluacode + +% This is a preliminary interface. + +\unprotect + +\unexpanded\def\installscitecommands + {\ctxlua{buffers.scite.installcommands()}% + \let\installscitecommands\relax} + +\unexpanded\def\startscite{\startlines} +\unexpanded\def\stopscite {\stoplines} + +\unexpanded\def\scitefile + {\dosingleargument\module_scite_file} + +\unexpanded\def\module_scite_file[#1]% + {\start + \ctxlua{buffers.scite.lexfile("#1")}% + \installscitecommands + \tt + \dontcomplain + \startscite + \getbuffer[lex]% + \stopscite + \stop} + +\unexpanded\def\scitebuffer + {\dodoubleargument\module_scite_buffer} + +\unexpanded\def\module_scite_buffer[#1][#2]% + {\start + \ifsecondargument + \ctxlua{buffers.scite.lexbuffer("#2","#1")}% + \else + \ctxlua{buffers.scite.lexbuffer("#1","tex")}% + \fi + \installscitecommands + \tt + \dontcomplain + \startscite + \getbuffer[lex]% + \stopscite + \stop} + +\protect + +\continueifinputfile{m-scite.mkiv} + +\setupbodyfont[dejavu,8pt] + +\setuplayout + [width=middle, + height=middle, + header=1cm, + footer=1cm, + topspace=1cm, + bottomspace=1cm, + backspace=1cm] + +\startbuffer[demo] +\startsubsubject[title={oeps}] + +\startMPcode + draw fullcircle + scaled 2cm + withpen pencircle scaled 1mm + withcolor .5green; + draw textext ( + lua ( + "local function f(s) return string.upper(s) end mp.quoted(f('foo'))" + ) + ) withcolor .5red ; +\stopMPcode + +\startluacode + context("foo") +\stopluacode + +\stopsubsubject +\stopbuffer + +\starttext + +% \scitefile[../lexers/scite-context-lexer.lua] \page +% \scitefile[t:/manuals/about/about-metafun.tex] \page +% \scitefile[t:/sources/strc-sec.mkiv] \page +% \scitefile[e:/tmp/mp.w] \page +% \scitefile[t:/manuals/hybrid/tugboat.bib] \page +\scitefile[e:/tmp/test.bib] \page + +% \getbuffer[demo] \scitebuffer[demo] + +\stoptext diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex 88d9476f1..f7a228bfc 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 08433c7cd..547c0e785 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua index 059bf5c0d..af4f7c643 100644 --- a/tex/context/base/trac-deb.lua +++ b/tex/context/base/trac-deb.lua @@ -159,12 +159,24 @@ end -- this will work ok in >=0.79 +-- todo: last tex error has ! prepended +-- todo: some nested errors have two line numbers +-- todo: collect errorcontext in string (after code cleanup) +-- todo: have a separate status.lualinenumber + +-- todo: \starttext bla \blank[foo] bla \stoptext + local function processerror(offset) local inputstack = resolvers.inputstack local filename = inputstack[#inputstack] or status.filename local linenumber = tonumber(status.linenumber) or 0 - -- print(status.lasterrorstring) - -- print(status.lastluaerrorstring) + -- + -- print("[[ last tex error: " .. tostring(status.lasterrorstring) .. " ]]") + -- print("[[ last lua error: " .. tostring(status.lastluaerrorstring) .. " ]]") + -- print("[[ start errorcontext ]]") + -- tex.show_context() + -- print("\n[[ stop errorcontext ]]") + -- local lasttexerror = status.lasterrorstring or "?" local lastluaerror = status.lastluaerrorstring or lasttexerror local luaerrorline = match(lastluaerror,[[lua%]?:.-(%d+)]]) or (lastluaerror and find(lastluaerror,"?:0:",1,true) and 0) diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua index 9435fef9b..45cc550d4 100644 --- a/tex/context/base/trac-log.lua +++ b/tex/context/base/trac-log.lua @@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['trac-log'] = { license = "see context related readme files" } +-- In fact all writes could go through lua and we could write the console and +-- terminal handler in lua then. Ok, maybe it's slower then, so a no-go. + -- if tex and (tex.jobname or tex.formatname) then -- -- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2) diff --git a/tex/context/base/util-sci.lua b/tex/context/base/util-sci.lua new file mode 100644 index 000000000..98b05fe75 --- /dev/null +++ b/tex/context/base/util-sci.lua @@ -0,0 +1,262 @@ +local gsub, sub, find = string.gsub, string.sub, string.find +local concat = table.concat +local formatters = string.formatters +local lpegmatch = lpeg.match +local setmetatableindex = table.setmetatableindex + +local scite = scite or { } +utilities.scite = scite + +local report = logs.reporter("scite") + +local lexerroot = file.dirname(resolvers.find_file("scite-context-lexer.lua")) + +local knownlexers = { + tex = "tex", mkiv = "tex", mkvi = "tex", mkxi = "tex", mkix = "tex", mkii = "tex", cld = "tex", + lua = "lua", lfg = "lua", lus = "lua", + w = "web", ww = "web", + c = "cpp", h = "cpp", cpp = "cpp", hpp = "cpp", cxx = "cpp", hxx = "cpp", + xml = "xml", lmx = "xml", ctx = "xml", xsl = "xml", xsd = "xml", rlx = "xml", css = "xml", dtd = "xml", + bib = "bibtex", + rme = "txt", + -- todo: pat/hyp ori +} + +lexer = nil -- main lexer, global (for the moment needed for themes) + +local function loadscitelexer() + if not lexer then + dir.push(lexerroot) + lexer = dofile("scite-context-lexer.lua") + dofile("themes/scite-context-theme.lua") + dir.pop() + end + return lexer +end + +local loadedlexers = setmetatableindex(function(t,k) + local l = knownlexers[k] or k + dir.push(lexerroot) + loadscitelexer() + local v = lexer.load(formatters["scite-context-lexer-%s"](l)) + dir.pop() + t[l] = v + t[k] = v + return v +end) + +scite.loadedlexers = loadedlexers +scite.knownlexers = knownlexers +scite.loadscitelexer = loadscitelexer + +local f_fore_bold = formatters['.%s { display: inline ; font-weight: bold ; color: #%s%s%s ; }'] +local f_fore_none = formatters['.%s { display: inline ; font-weight: normal ; color: #%s%s%s ; }'] +local f_none_bold = formatters['.%s { display: inline ; font-weight: bold ; }'] +local f_none_none = formatters['.%s { display: inline ; font-weight: normal ; }'] +local f_div_class = formatters['<div class="%s">%s</div>'] +local f_linenumber = formatters['\n<div class="linenumber">%s</div>'] +local f_div_number = formatters['.linenumber { display: inline-block ; font-weight: normal ; width: %sem ; margin-right: 2em ; padding-right: .25em ; text-align: right ; background-color: #C7C7C7 ; }'] + +local replacer_regular = lpeg.replacer { + ["<"] = "<", + [">"] = ">", + ["&"] = "&", +} + +local linenumber = 0 + +local replacer_numbered = lpeg.replacer { + ["<"] = "<", + [">"] = ">", + ["&"] = "&", + [lpeg.patterns.newline] = function() linenumber = linenumber + 1 return f_linenumber(linenumber) end, +} + +local css = nil + +local function exportcsslexing() + if not css then + loadscitelexer() + local function black(f) + return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00') + end + local result, r = { }, 0 + for k, v in table.sortedhash(lexer.context.styles) do + local bold = v.bold + local fore = v.fore + r = r + 1 + if fore and not black(fore) then + if bold then + result[r] = f_fore_bold(k,fore[1],fore[2],fore[3]) + else + result[r] = f_fore_none(k,fore[1],fore[2],fore[3]) + end + else + if bold then + result[r] = f_none_bold(k) + else + result[r] = f_none_none(k) + end + end + end + css = concat(result,"\n") + end + return css +end + +local function exportwhites() + return setmetatableindex(function(t,k) + local v = find(k,"white") and true or false + t[k] = v + return v + end) +end + +local function exportstyled(lexer,text,numbered) + local result = lexer.lex(lexer,text,0) + local start = 1 + local whites = exportwhites() + local buffer, b = { "<pre>" }, 1 + linenumber = 1 + local replacer = numbered and replacer_numbered or replacer_regular + if numbered then + b = b + 1 + buffer[b] = f_linenumber(1) + end + local n = #result + for i=1,n,2 do + local ii = i + 1 + local style = result[i] + local position = result[ii] + local txt = sub(text,start,position-1) + if ii == n then + txt = gsub(txt,"[%s]+$","") + end + txt = lpegmatch(replacer,txt) + b = b + 1 + if whites[style] then + buffer[b] = txt + else + buffer[b] = f_div_class(style,txt) + end + start = position + end + buffer[b+1] = "</pre>" + buffer = concat(buffer) + return buffer +end + +local function exportcsslinenumber() + return f_div_number(#tostring(linenumber)/2+1) +end + +local htmlfile = utilities.templates.replacer([[ +<?xml version="1.0"?> +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> + <html xmlns="http://www.w3.org/1999/xhtml"> + <title>context util-sci web page: text</title> + <meta http-equiv="content-type" content="text/html; charset=UTF-8"/> + <style type="text/css"><!-- +%lexingstyles% +%numberstyles% + --></style> + <body> +%lexedcontent% + </body> +</html> +]]) + +function scite.tohtml(data,lexname,numbered) + return htmlfile { + lexedcontent = exportstyled(loadedlexers[lexname],data or "",numbered), -- before numberstyles + lexingstyles = exportcsslexing(), + numberstyles = exportcsslinenumber(), + } +end + +function scite.filetohtml(filename,lexname,targetname,numbered) + io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered)) +end + +function scite.css() + return exportcsslexing() .. "\n" .. exportcsslinenumber() +end + +function scite.html(data,lexname,numbered) + return exportstyled(loadedlexers[lexname],data or "",numbered) +end + +local f_tree_entry = formatters['<a href="%s" class="dir-entry">%s</a>'] + +local htmlfile = utilities.templates.replacer([[ +<?xml version="1.0"?> +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> + <html xmlns="http://www.w3.org/1999/xhtml"> + <title>context util-sci web page: text</title> + <meta http-equiv="content-type" content="text/html; charset=UTF-8"/> + <style type="text/css"><!-- +%styles% + --></style> + <body> + <pre> +%dirlist% + </pre> + </body> +</html> +]]) + +function scite.converttree(sourceroot,targetroot,numbered) + if lfs.isdir(sourceroot) then + statistics.starttiming() + local skipped = { } + local noffiles = 0 + dir.makedirs(targetroot) + local function scan(sourceroot,targetroot) + local tree = { } + for name in lfs.dir(sourceroot) do + if name ~= "." and name ~= ".." then + local sourcename = file.join(sourceroot,name) + local targetname = file.join(targetroot,name) + local mode = lfs.attributes(sourcename,'mode') + if mode == 'file' then + local filetype = file.suffix(sourcename) + local basename = file.basename(name) + local targetname = file.replacesuffix(targetname,"html") + if knownlexers[filetype] then + report("converting file %a to %a",sourcename,targetname) + scite.filetohtml(sourcename,nil,targetname,numbered) + noffiles = noffiles + 1 + tree[#tree+1] = f_tree_entry(file.basename(targetname),basename) + else + skipped[filetype] = true + report("no lexer for %a",sourcename) + end + else + dir.makedirs(targetname) + scan(sourcename,targetname) + tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name) + end + end + end + report("saving tree in %a",treename) + local htmldata = htmlfile { + dirlist = concat(tree,"\n"), + styles = "", + } + io.savedata(file.join(targetroot,"files.html"),htmldata) + end + scan(sourceroot,targetroot) + if next(skipped) then + report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," ")) + end + statistics.stoptiming() + report("conversion time for %s files: %s",noffiles,statistics.elapsedtime()) + end +end + +-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true) +-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true) + +-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true) + +return scite diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index 1ac353e30..dd9868626 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 04/25/14 00:45:16 +-- merge date : 04/28/14 23:24:10 do -- begin closure to overcome local limits and interference |