From 5210d9a67dba47edb0e5c6944c4a5aa8bc6d60fb Mon Sep 17 00:00:00 2001 From: Hans Hagen Date: Fri, 26 Nov 2010 21:21:00 +0100 Subject: beta 2010.11.26 21:21 --- tex/context/base/buff-ini.lua | 565 ++---------- tex/context/base/buff-ini.mkiv | 345 ++------ tex/context/base/buff-ver.lua | 377 ++++++++ tex/context/base/buff-ver.mkiv | 1273 +++++++++------------------ tex/context/base/buff-vis.lua | 74 -- tex/context/base/buff-vis.mkiv | 24 - tex/context/base/char-utf.lua | 8 +- tex/context/base/char-utf.mkiv | 8 +- tex/context/base/cldf-com.lua | 30 + tex/context/base/cldf-ver.lua | 2 +- tex/context/base/colo-ini.mkiv | 13 +- tex/context/base/cont-new.mkiv | 10 +- tex/context/base/cont-new.tex | 2 +- tex/context/base/context.mkiv | 12 +- tex/context/base/context.tex | 2 +- tex/context/base/core-sys.mkiv | 107 ++- tex/context/base/data-env.lua | 40 +- tex/context/base/data-met.lua | 7 +- tex/context/base/data-sch.lua | 21 +- tex/context/base/data-tex.lua | 296 ++++--- tex/context/base/font-ini.mkiv | 42 +- tex/context/base/font-otn.lua | 2 +- tex/context/base/font-syn.lua | 2 +- tex/context/base/l-lpeg.lua | 82 +- tex/context/base/l-pdfview.lua | 3 +- tex/context/base/l-unicode.lua | 291 ++++-- tex/context/base/lang-ini.lua | 5 + tex/context/base/lang-ini.mkiv | 2 + tex/context/base/luat-lib.mkiv | 1 + tex/context/base/luat-mac.lua | 162 ++++ tex/context/base/lxml-dir.lua | 2 +- tex/context/base/math-ini.lua | 10 +- tex/context/base/math-ini.mkiv | 4 + tex/context/base/mlib-pdf.lua | 16 +- tex/context/base/mlib-pps.lua | 18 +- tex/context/base/mult-aux.mkiv | 48 +- tex/context/base/mult-cld.lua | 129 ++- tex/context/base/mult-ini.mkiv | 8 +- tex/context/base/node-aux.lua | 5 +- tex/context/base/node-pro.lua | 22 +- tex/context/base/node-tra.lua | 2 +- tex/context/base/node-tsk.lua | 2 +- tex/context/base/regi-ini.lua | 42 +- tex/context/base/s-abr-01.tex | 5 +- tex/context/base/scrn-int.mkiv | 1 - tex/context/base/scrp-ini.lua | 4 +- tex/context/base/strc-sec.mkiv | 2 +- tex/context/base/tabl-ntb.mkiv | 43 - tex/context/base/tabl-tab.mkiv | 507 ++++++----- tex/context/base/tabl-tbl.mkiv | 26 +- tex/context/base/util-prs.lua | 4 +- tex/context/base/util-seq.lua | 137 +-- tex/context/base/v-default.lua | 40 + tex/context/base/v-default.mkiv | 43 + tex/context/base/v-escaped.lua | 14 + tex/context/base/v-escaped.mkiv | 18 + tex/context/base/v-lua.lua | 238 +++++ tex/context/base/v-lua.mkiv | 61 ++ tex/context/base/v-nested.lua | 80 ++ tex/context/base/v-nested.mkiv | 21 + tex/context/base/v-tex.lua | 94 +- tex/context/base/v-tex.mkiv | 31 +- tex/context/base/v-xml.lua | 133 +++ tex/context/base/v-xml.mkiv | 71 ++ tex/generic/context/luatex-fonts-merged.lua | 86 +- 65 files changed, 3219 insertions(+), 2556 deletions(-) create mode 100644 tex/context/base/buff-ver.lua delete mode 100644 tex/context/base/buff-vis.lua delete mode 100644 tex/context/base/buff-vis.mkiv create mode 100644 tex/context/base/luat-mac.lua create mode 100644 tex/context/base/v-default.lua create mode 100644 tex/context/base/v-default.mkiv create mode 100644 tex/context/base/v-escaped.lua create mode 100644 tex/context/base/v-escaped.mkiv create mode 100644 tex/context/base/v-lua.lua create mode 100644 tex/context/base/v-lua.mkiv create mode 100644 tex/context/base/v-nested.lua create mode 100644 tex/context/base/v-nested.mkiv create mode 100644 tex/context/base/v-xml.lua create mode 100644 tex/context/base/v-xml.mkiv (limited to 'tex') diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua index 19e3a142b..593166038 100644 --- a/tex/context/base/buff-ini.lua +++ b/tex/context/base/buff-ini.lua @@ -6,15 +6,6 @@ if not modules then modules = { } end modules ['buff-ini'] = { license = "see context related readme files" } --- todo: deal with jobname here, or actually, "" is valid as well - --- ctx lua reference model / hooks and such --- to be optimized - --- redefine buffers.get - --- if needed we can make 'm local - local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) @@ -22,6 +13,8 @@ local report_buffers = logs.new("buffers") local utf = unicode.utf8 +-- todo: weed the next list + local concat, texprint, texwrite = table.concat, tex.print, tex.write local utfbyte, utffind, utfgsub = utf.byte, utf.find, utf.gsub local type, next = type, next @@ -37,22 +30,15 @@ local allocate = utilities.storage.allocate local tabtospace = utilities.strings.tabtospace buffers = { - data = allocate(), - hooks = { }, - flags = { }, - commands = { }, - visualizers = { }, + data = allocate(), + flags = { }, } local buffers = buffers local context = context -local data = buffers.data -local flags = buffers.flags -local hooks = buffers.hooks -local visualizers = buffers.visualizers - -visualizers.defaultname = variables.typing +local data = buffers.data +local flags = buffers.flags function buffers.raw(name) return data[name] or { } @@ -109,40 +95,6 @@ function buffers.doifelsebuffer(name) commands.testcase(data[name] ~= nil) end --- handy helpers --- --- \sop[color] switch_of_pretty --- \bop[color] begin_of_pretty --- \eop end_of_pretty --- \obs obeyedspace --- \char special characters - -local sop = context.sop -local bop = context.bop -local eop = context.eop -local obs = context.obs -local par = context.par -local chr = context.char - -local bgroup = context.bgroup -local egroup = context.egroup - -flags.optimizeverbatim = true -flags.countemptylines = false - -local doverbatimnobreak = context.doverbatimnobreak -local doverbatimgoodbreak = context.doverbatimgoodbreak - -function buffers.verbatimbreak(n,m) - if flags.optimizeverbatim then - if n == 2 or n == m then - doverbatimnobreak() - elseif n > 1 then - doverbatimgoodbreak() - end - end -end - function buffers.strip(lines,first,last) local first, last = first or 1, last or #lines for i=first,last do @@ -209,41 +161,83 @@ function buffers.range(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere return first, last end -function buffers.type(name,realign,range) - local lines = data[name] - local action = buffers.typeline +-- this will go to buff-ver.lua + +-- there is some overlap in the following + +flags.tablength = 7 + +local function flush(content,method,settings) + local tab = settings.tab + tab = tab and (tab == variables.yes and flags.tablength or tonumber(tab)) + if tab then + content = utilities.strings.tabtospace(content,tab) + end + local visualizer = settings.visualizer + if visualizer and visualizer ~= "" then + visualizers.visualize(visualizer,method,content,settings) + else -- todo: + visualizers.visualize("",method,content,settings) + end +end + +local function filter(lines,settings) -- todo: inline or display in settings + local strip = settings.strip + if strip then + lines = buffers.realign(lines,strip) + end + local line, n = 0, 0 + local first, last, m = buffers.strip(lines) + if range then + first, last = buffers.range(lines,first,last,range) + first, last = buffers.strip(lines,first,last) + end + local content = concat(lines,(settings.nature == "inline" and " ") or "\n",first,last) + return content, m +end + +function buffers.typestring(settings) -- todo: settings.nature = "inline" + local content = settings.data + if content and content ~= "" then + flush(content,"inline",settings) + end +end + +function buffers.typebuffer(settings) -- todo: settings.nature = "display" + local name = settings.name + local lines = name and data[name] if lines then if type(lines) == "string" then lines = splitlines(lines) data[name] = lines end - if realign then - lines = buffers.realign(lines,realign) - end - local line, n = 0, 0 - local first, last, m = buffers.strip(lines) - if range then - first, last = buffers.range(lines,first,last,range) - first, last = buffers.strip(lines,first,last) + local content, m = filter(lines,settings) + if content and content ~= "" then + flush(content,"display",settings) end - hooks.begin_of_display() - for i=first,last do - n, line = action(lines[i], n, m, line) - end - hooks.end_of_display() end end -function buffers.loaddata(filename) -- this one might go away - -- this will be cleaned up when we have split supp-fil completely - -- instead of half-half - local ok, str, n = resolvers.loaders.tex(filename) - if not str then - ok, str, n = resolvers.loaders.tex(file.addsuffix(filename,'tex')) +function buffers.typefile(settings) -- todo: settings.nature = "display" + local name = settings.name + local str = buffers.loaddata(name) + if str and str ~= "" then + local regime = settings.regime + if regime and regime ~= "" then + regimes.load(regime) + str = regimes.translate(str,regime) + end + if str and str~= "" then + local lines = splitlines(str) + local content, m = filter(lines,settings) + if content and content ~= "" then + flush(content,"display",settings) + end + end end end -function buffers.loaddata(filename) -- this one might go away +function buffers.loaddata(filename) -- this one might go away or become local local foundname = resolvers.findtexfile(filename) or "" if foundname == "" then foundname = resolvers.findtexfile(file.addsuffix(filename,'tex')) or "" @@ -255,48 +249,6 @@ function buffers.loaddata(filename) -- this one might go away end end -function buffers.typefile(name,realign,range,regime) -- still somewhat messy, since name can be be suffixless - local str = buffers.loaddata(name) - if regime and regime ~= "" then - regimes.load(regime) - str = regimes.translate(str,regime) - end - if str and str~= "" then - local lines = splitlines(str) - if realign then - lines = buffers.realign(lines,realign) - end - local line, n, action = 0, 0, buffers.typeline - local first, last, m = buffers.strip(lines) - hooks.begin_of_display() - if range then - first, last = buffers.range(lines,first,last,range) - first, last = buffers.strip(lines,first,last) - end - for i=first,last do - n, line = action(lines[i], n, m, line) - end - hooks.end_of_display() - end -end - -function buffers.typeline(str,n,m,line) - n = n + 1 - buffers.verbatimbreak(n,m) - if find(str,"%S") then - line = line + 1 - hooks.begin_of_line(line) - hooks.flush_line(hooks.line(str)) - hooks.end_of_line() - else - if flags.countemptylines then - line = line + 1 - end - hooks.empty_line(line) - end - return n, line -end - -- The optional prefix hack is there for the typesetbuffer feature and -- in mkii we needed that (this hidden feature is used in a manual). @@ -397,7 +349,7 @@ function buffers.collect(names,separator) -- no print return concat(t,separator or "\r") -- "\n" is safer due to comments and such end -function buffers.feedback(names,separator) +function buffers.feedback(names,separator) -- we can use cld -- don't change the texprint into texsprint as it fails on mp buffers -- because (penddef) becomes penddef then texprint(ctxcatcodes,splitlines(buffers.collect(names,separator))) @@ -422,271 +374,6 @@ function buffers.inspect(name) end end --- maybe just line(n,str) empty(n,str) - -visualizers.tablength = 7 -visualizers.enabletab = true -- false -visualizers.obeyspace = true - -function buffers.settablength(tablength) - visualizers.tablength = tablength and tonumber(tablength) or 7 -end - -visualizers.handlers = visualizers.handlers or { } - -local handlers = visualizers.handlers - -function buffers.newvisualizer(name) - name = lower(name) - local handler = { } - handlers[name] = handler - return handler -end - -function buffers.getvisualizer(name) - name = lower(name) - return handlers[name] or buffers.loadvisualizer(name) -end - -function buffers.loadvisualizer(name) - name = lower(name) - local hn = handlers[name] - if hn then - return hn - else - environment.loadluafile("pret-" .. name) - local hn = handlers[name] - if not hn then - -- hn = buffers.newvisualizer(name) - hn = handlers[visualizers.defaultname] - handlers[name] = hn - if trace_visualize then - report_buffers("mapping '%s' visualizer onto '%s'",name,visualizers.defaultname) - end - elseif trace_visualize then - report_buffers("loading '%s' visualizer",name) - end - return hn - end -end - --- was "default", should be set at tex end (todo) - -local default = buffers.newvisualizer(visualizers.defaultname) - -default.begin_of_display = context.doverbatimbeginofdisplay -default.end_of_display = context.doverbatimendofdisplay -default.begin_of_inline = context.doverbatimbeginofinline -default.end_of_inline = context.doverbatimendofinline -default.begin_of_line = context.doverbatimbeginofline -default.end_of_line = context.doverbatimendofline -default.empty_line = context.doverbatimemptyline - ---~ print(variables.typing) os.exit() - --- will become cleaner - -local currentvisualizer, currenthandler - -function buffers.setvisualizer(str) - currentvisualizer = lower(str) - currenthandler = handlers[currentvisualizer] - if currenthandler then - -- if trace_visualize then - -- report_buffers("enabling specific '%s' visualizer",currentvisualizer) - -- end - else - currentvisualizer = visualizers.defaultname - currenthandler = handlers.default - -- if trace_visualize then - -- report_buffers("enabling default visualizer '%s'",currentvisualizer) - -- end - end - if currenthandler.reset then - currenthandler.reset() - end -end - -function buffers.resetvisualizer() - currentvisualizer = visualizers.defaultname - currenthandler = handlers.default - if currenthandler.reset then - currenthandler.reset() - end -end - -buffers.setvisualizer(visualizers.defaultname) - -function visualizers.reset() -end - -function buffers.doifelsevisualizer(str) - commands.testcase((str ~= "") and (handlers[lower(str)] ~= nil)) -end - --- calling routines, don't change - -function hooks.begin_of_display() - (currenthandler.begin_of_display or default.begin_of_display)(currentvisualizer) -end - -function hooks.end_of_display() - (currenthandler.end_of_display or default.end_of_display)() -end - -function hooks.begin_of_inline() - (currenthandler.begin_of_inline or default.begin_of_inline)(currentvisualizer) -end - -function hooks.end_of_inline() - (currenthandler.end_of_inline or default.end_of_inline)() -end - -function hooks.flush_line(str,nesting) - local fl = currenthandler.flush_line - if fl then - str = gsub(str," *[\n\r]+ *"," ") ; -- semi colon needed - fl(str,nesting) - else - -- gsub done later - default.flush_line(str,nesting) - end -end - -function hooks.flush_inline(str,nesting) - hooks.begin_of_inline() - hooks.flush_line(str,nesting) - hooks.end_of_inline() -end - -function hooks.begin_of_line(n) - (currenthandler.begin_of_line or default.begin_of_line)(n) -end - -function hooks.end_of_line() - (currenthandler.end_of_line or default.end_of_line)() -end - -function hooks.empty_line() - (currenthandler.empty_line or default.empty_line)() -end - -function hooks.line(str) - if visualizers.enabletab then - str = tabtospace(str,visualizers.tablength) - else - str = gsub(str,"\t"," ") - end - return (currenthandler.line or default.line)(str) -end - -buffers.currentcolors = { } -- todo: registercurrentcolor and preset sop then ... faster or at least precreate tables - -function buffers.changestate(n, state) - if n then - if state ~= n then - if state > 0 then - sop { buffers.currentcolors[n] } - else - bop { buffers.currentcolors[n] } - end - return n - end - elseif state > 0 then - eop() - return 0 - end - return state -end - -function default.line(str) - return str -end - -function default.flush_line(str) - str = gsub(str," *[\n\r]+ *"," ") - if visualizers.obeyspace then - for c in utfcharacters(str) do - if c == " " then - obs() - else - texwrite(c) - end - end - else - texwrite(str) - end -end - --- special one - -buffers.commands.nested = "\\switchslantedtype " - -function visualizers.flushnested(str, enable) -- todo: no utf, vrb catcodes, kind of obsolete mess - str = gsub(str," *[\n\r]+ *"," ") - local c, nested, i = "", 0, 1 - local commands = buffers.commands -- otherwise wrong commands - while i < #str do -- slow - c = sub(str,i,i+1) - if c == "<<" then - nested = nested + 1 - bgroup() - if enable then - context(commands.nested) - end - i = i + 2 - elseif c == ">>" then - if nested > 0 then - nested = nested - 1 - egroup() - end - i = i + 2 - else - c = sub(str,i,i) - if c == " " then - obs() - elseif find(c,"%a") then - context(c) - else - chr(byte(c)) - end - i = i + 1 - end - end - chr(byte(sub(str,i,i))) - for i=1,#nested do - egroup() - end -end - -function buffers.finishstate(state) - if state > 0 then - eop() - return 0 - else - return state - end -end - -local opennested = rep("\\char"..byte('<').." ",2) -local closenested = rep("\\char"..byte('>').." ",2) - -function buffers.replacenested(result) - result = gsub(result,opennested, "{") - result = gsub(result,closenested,"}") - return result -end - -function buffers.flushresult(result,nested) - if nested then - context(buffers.replacenested(concat(result))) - else - context(concat(result)) - end -end - --- new - function buffers.realign(name,forced_n) -- no, auto, local n, d if type(name) == "string" then @@ -725,111 +412,3 @@ function buffers.realign(name,forced_n) -- no, auto, end -- escapes: buffers.setescapepair("tex","/BTEX","/ETEX") - -local function flush_escaped_line(str,pattern,flushline) - while true do - local a, b, c = match(str,pattern) - if a and a ~= "" then - flushline(a) - end - if b and b ~= "" then - context(b) - end - if c then - if c == "" then - break - else - str = c - end - else - flushline(str) - break - end - end -end - -function buffers.setescapepair(name,pair) - if pair and pair ~= "" then - local visualizer = buffers.getvisualizer(name) - visualizer.normal_flush_line = visualizer.normal_flush_line or visualizer.flush_line - if pair == variables.no then - visualizer.flush_line = visualizer.normal_flush_line or visualizer.flush_line - if trace_visualize then - report_buffers("resetting escape range for visualizer '%s'",name) - end - else - local start, stop - if pair == variables.yes then - start, stop = "/BTEX", "/ETEX" - else - pair = string.split(pair,",") - start, stop = escapedpattern(pair[1] or ""), escapedpattern(pair[2] or "") - end - if start ~= "" then - local pattern - if stop == "" then - pattern = "^(.-)" .. start .. "(.*)(.*)$" - else - pattern = "^(.-)" .. start .. "(.-)" .. stop .. "(.*)$" - end - function visualizer.flush_line(str) - flush_escaped_line(str,pattern,visualizer.normal_flush_line) - end - if trace_visualize then - report_buffers("setting escape range for visualizer '%s' to %s -> %s",name,start,stop) - end - elseif trace_visualize then - report_buffers("problematic escape specification '%s' for visualizer '%s'",pair,name) - end - end - end -end - --- THIS WILL BECOME A FRAMEWORK: the problem with pretty printing is that --- we deal with snippets and therefore we need tolerant parsing - ---~ local type = type - ---~ visualizers = visualizers or { } - ---~ local function fallback(s) return s end - ---~ function visualizers.visualize(visualizer,kind,pattern) ---~ if type(visualizer) == "table" and type(kind) == "string" then ---~ kind = visualizer[kind] or visualizer.default or fallback ---~ else ---~ kind = fallback ---~ end ---~ return (lpeg.C(pattern))/kind ---~ end - ---~ local flusher = texio.write ---~ local format = string.format - ---~ local visualizer = { ---~ word = function(s) return flusher(format("\\bold{%s}",s)) end, ---~ number = function(s) return flusher(format("\\slanted{%s}",s)) end, ---~ default = function(s) return flusher(s) end, ---~ } - ---~ local word = lpeg.R("AZ","az")^1 ---~ local number = lpeg.R("09")^1 ---~ local any = lpeg.P(1) - ---~ local pattern = lpeg.P { "start", ---~ start = ( ---~ visualizers.visualize(visualizer,"word",word) + ---~ visualizers.visualize(visualizer,"number",number) + ---~ visualizers.visualize(visualizer,"default",any) ---~ )^1 ---~ } - ---~ str = [[test 123 test $oeps$]] - ---~ lpegmatch(pattern,str) - -buffers.obsolete = buffers.obsolete or { } -local obsolete = buffers.obsolete - -buffers.finish_state = buffers.finishstate obsolete.finish_state = buffers.finishstate -buffers.change_state = buffers.finishstate obsolete.change_state = buffers.finishstate diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv index 19fabcff6..f195cd8cf 100644 --- a/tex/context/base/buff-ini.mkiv +++ b/tex/context/base/buff-ini.mkiv @@ -15,75 +15,32 @@ \registerctxluafile{buff-ini}{1.001} -% todo: move all to lua, also before and after, just context.beforebuffer() -% todo: commalist to lua end -% todo: jobname == "" so no need for testing - -% todo: -% -% \startluacode -% local locations = { } -% function document.set_number(name) -% locations[name] = { -% line = status.linenumber, -% file = status.filename -% } -% end -% function document.add_number(name) -% local b, l = buffers.raw(name), locations[name] -% if b and l then -% for i=1,#b do -% b[i] = string.gsub(b[i],"# line: ","# line: " .. l.line + 2) -% end -% end -% end -% \stopluacode -% -% \starttext -% -% \ctxlua{document.set_number("oeps")} -% \startbuffer[oeps] -% # line: -% -% test -% test -% \stopbuffer -% \ctxlua{document.add_number("oeps")} -% -% \typebuffer[oeps] -% -% \stoptext - -\ifdefined\doinitializeverbatim \else% temp hack - \def\doinitializeverbatim{\tttf} -\fi - \unprotect -\setnewconstant\buffernestmode\plusone % 0: not nested, 1: startbuffer nested, 2: all buffers nested +% number is messy and not needed as we store the number anyway +% we can get rid of \c!number -\newconditional\segmentatebuffermode % not implemented in mkiv (yet) +\newcount\nofdefinedbuffers -\def\currentbuffer{\jobname} +\let\currentbuffer\empty -\def\setcurrentbuffer#1% - {\doifelsenothing{#1}{\edef\currentbuffer{\jobname}}{\edef\currentbuffer{#1}}} +\def\doifelsebuffer#1% + {\ctxlua{buffers.doifelsebuffer("#1")}} \def\resetbuffer {\dosingleempty\doresetbuffer} \def\doresetbuffer[#1]% - {\begingroup - \setcurrentbuffer{#1}% - \ctxlua{buffers.erase("\currentbuffer")}% - \endgroup} + {\ctxlua{buffers.erase("#1")}} -\def\dostartbuffer +\unexpanded\def\dostartdefinedbuffer {\bgroup - \obeylines % nodig, anders gaat 't fout als direct \starttable (bv) + \obeylines \doquadrupleempty\dodostartbuffer} -\def\dodostartbuffer[#1][#2][#3][#4]% upward compatible +\let\dostartbuffer\dostartdefinedbuffer % used in some modules + +\def\dodostartbuffer[#1][#2][#3][#4]% [category] [name] [start] [stop] {\iffourthargument \def\next{\dododostartbuffer{#1}{#2}{#3}{#4}}% \else @@ -91,65 +48,43 @@ \fi \next} -\def\dododostartbuffer#1#2#3#4% - {%\showmessage\m!systems{15}{#2}% - \doifelsevalue{\??bu#1\c!paragraph}\v!yes - {\settrue\segmentatebuffermode} % todo in mkiv - {\doifnumberelse{\getvalue{\??bu#1\c!paragraph}} - {\settrue \segmentatebuffermode} - {\setfalse\segmentatebuffermode}}% - \doifvalue{\??bu#1\c!local}\v!yes - {\buffernestmode\plustwo}% permit nesting - \setcurrentbuffer{#2}% - \doifelsenothing{#4} - {\normalexpanded{\noexpand\setbuffercapsules{\e!start\v!buffer}{\e!stop\v!buffer}}% - \letvalue\bufferstop\relax} - %{\@EA\setbuffercapsules\@EA{\csname#3\@EA\endcsname\@EA}\@EA{\csname#4\endcsname}}% if we strip later - {\setbuffercapsules{#3}{#4}}% - \normalexpanded{\noexpand\dodowithbuffer - {\currentbuffer} - {\bufferstart} - {\bufferstop} - {\donothing} - {\egroup - \noexpand\getvalue{\bufferstop}}}} - -\letvalue{\e!start\v!buffer}\dostartbuffer - -\let\endbuffer\undefined % to please the dep parser +\def\dododostartbuffer#1#2#3#4% \donothing needed ! + {\normalexpanded{\dodowithbuffer{#2}{#3}{#4}{\donothing}{\egroup\noexpand\getvalue{#4}}}} + +\setvalue{\e!start\v!buffer}% + {\bgroup + \obeylines + \dosingleempty\redostartbuffer} + +\def\redostartbuffer[#1]% + {\dododostartbuffer{}{#1}{\e!start\v!buffer}{\e!stop\v!buffer}} \def\dowithbuffer#1#2#3% name, startsequence, stopsequence, before, after - {\setbuffercapsules{#2}{#3}% - \normalexpanded{\noexpand\dodowithbuffer{#1}{\bufferstart}{\bufferstop}}} + {\normalexpanded{\dodowithbuffer{#1}{#2}{#3}}} -\long\def\dodowithbuffer#1#2#3#4#5% name, startsequence, stopsequence, before, after +\unexpanded\long\def\dodowithbuffer#1#2#3#4#5% name, startsequence, stopsequence, before, after {#4% \bgroup - \setcatcodetable \vrbcatcodes - \catcode`\\=12 \ctxlua{buffers.erase("#1")}% + \setcatcodetable \vrbcatcodes \long\def\nododowithbuffer {\egroup #5}% \long\def\dododowithbuffer##1#3% is detokenize needed? TEST - {\ctxlua - {buffers.grab("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} - \dododowithbuffer - \nododowithbuffer}% + {\ctxlua{buffers.grab("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} % space ? + \dododowithbuffer + \nododowithbuffer}% \dododowithbuffer} -\def\setbuffercapsules#1#2% \scantextokens not needed (had a reason at some point) - {\edef\bufferstart{#1}\edef\bufferstart{\scantextokens\expandafter{\bufferstart}}% - \edef\bufferstop {#2}\edef\bufferstop {\scantextokens\expandafter{\bufferstop }}} - \def\setbuffer {\dosingleempty\dosetbuffer} +\let\endbuffer\relax + \long\def\dosetbuffer[#1]#2\endbuffer % seldom used so we just pass #2 - {\begingroup - \setcurrentbuffer{#1}% - \ctxlua{buffers.set("\currentbuffer", \!!bs\detokenize{#2}\!!es)}% - \endgroup} + {\ctxlua{buffers.set("#1", \!!bs\detokenize{#2}\!!es)}} + +\def\namedbufferparameter#1#2{\csname\??bu#1#2\endcsname} \unexpanded\def\setupbuffer {\dodoubleempty\dosetupbuffer} @@ -161,191 +96,71 @@ \getparameters[\??bu][#1]% \fi} -\def\dodefinebuffer[#1][#2]% - {\iffirstargument % else problems - \doglobal\increment\nofdefinedbuffers - \letvalue{\??bu#1\c!number }\nofdefinedbuffers - \letvalue{\??bu#1\c!paragraph}\v!no - \setuevalue{\e!start#1}{\noexpand\dostartbuffer[#1][def-\nofdefinedbuffers][\e!start#1][\e!stop#1]}% maybe also relax stop - \setuevalue{\e!get #1}{\noexpand\dogetbuffer [#1][def-\nofdefinedbuffers]}% - \setuevalue{\e!type #1}{\noexpand\dotypebuffer [#1][def-\nofdefinedbuffers]}% - \getparameters[\??bu#1][#2]% - \fi} +\newtoks\everydefinebuffer \unexpanded\def\definebuffer {\dodoubleempty\dodefinebuffer} -\def\thebuffernumber#1% - {\csname\??bu#1\c!number\endcsname} - -\def\thedefinedbuffer#1% - {def-\csname\??bu#1\c!number\endcsname} - -\unexpanded\def\getbuffer - {\dodoubleempty\dogetbuffer} - -\def\dogetbuffer[#1][#2]% - {\ifsecondargument - \dodogetbuffer[#1][#2]% - \else - \dodogetbuffer[][#1]% - \fi} - -\def\dogetbufferasis{\ctxlua{buffers.get("\currentbuffer")}} - -\def\dodogetbuffer[#1][#2]% - {\getvalue{\??bu#1\c!before}% - \dobuffer{16}{#2}\dogetbufferasis - \getvalue{\??bu#1\c!after}} - -\unexpanded\def\typebuffer - {\dodoubleempty\dotypebuffer} - -\def\doprocessbufferverbatim - {\doinitializeverbatim - \beginofverbatimlines - \dostarttagged\t!verbatim\currentbuffer - \ctxlua{buffers.type("\currentbuffer","\typingparameter\c!strip")}% - \dostoptagged - \endofverbatimlines} - -\def\doprocessbufferlinesverbatim#1#2#3% - {#2\doprocessbufferverbatim#3} - -\def\doifelsebuffer#1% - {\ctxlua{buffers.doifelsebuffer("#1")}} - -\def\dodotypebuffer#1#2#3% see dodotypefile - {\doifelsebuffer{#3} - {\dosometyping{#1}{#2}{#3}\doprocessbufferverbatim\doprocessbufferlinesverbatim} - {\reporttypingerror{#3}}} - -\def\dotypefilebuffer{\dodotypebuffer{\v!file}{}{\currentbuffer}}% - -\def\dotypebuffer[#1][#2]% +\def\dodefinebuffer[#1][#2]% {\iffirstargument - \dobuffer{17}{#1}\dotypefilebuffer + \global\advance\nofdefinedbuffers\plusone + \setevalue{\??bu#1\c!number}{\number\nofdefinedbuffers}% + \def\currentbuffer{#1}% + \getparameters[\??bu#1][#2]% + \the\everydefinebuffer \else - \dobuffer{17}{#2}\dotypefilebuffer + % fatal error \fi} -\def\dobuffer#1#2#3% - {\doifelsenothing{#2} - {\dodobuffer#3\jobname} - {\processcommalist[#2]{\dodobuffer#3}}} +\def\thebuffernumber #1{\csname\??bu#1\c!number\endcsname} +\def\thedefinedbuffer#1{def-\csname\??bu#1\c!number\endcsname} + +\appendtoks + \setuevalue{\e!start\currentbuffer}% + {\noexpand\dostartdefinedbuffer + [\currentbuffer]% + [def-\number\nofdefinedbuffers]% + [\e!start\currentbuffer]% + [\e!stop\currentbuffer]}% + \setuevalue{\e!get\currentbuffer}% + {\noexpand\dogetdefinedbuffer + [\currentbuffer]% + [def-\number\nofdefinedbuffers]}% +\to \everydefinebuffer + +\def\doprocessbufferlist#1#2% + {\doifelsenothing{#1} + {\dododogetbuffer\empty} + {\processcommalist[#1]#2}} + +\unexpanded\def\getbuffer % no [settings yet] + {\dosingleempty\dogetbuffer} -\def\dodobuffer#1#2% command name - {\pushmacro\currentbuffer - \edef\currentbuffer{\ifcsname\??bu#2\c!number\endcsname def-\csname\??bu#2\c!number\endcsname\else#2\fi}% - #1% - \popmacro\currentbuffer} +\unexpanded\def\dogetbuffer[#1]% [name] + {\namedbufferparameter\empty\c!before + \doprocessbufferlist{#1}\dododogetbuffer + \namedbufferparameter\empty\c!after} -\def\processTEXbuffer{\getbuffer} % handy +\def\dogetdefinedbuffer[#1][#2]% + {\namedbufferparameter{#1}\c!before + \dododogetbuffer{#2}% + \namedbufferparameter{#1}\c!after} -% extras: +\def\dododogetbuffer#1% + {\ctxlua{buffers.get("#1")}} \def\inspectbuffer {\dosingleempty\doinspectbuffer} \def\doinspectbuffer[#1]% - {\setcurrentbuffer{#1}% - \ctxlua{buffers.inspect("\currentbuffer")}} - -% seldom used, only in a few projects that demanded more speed - -\let\usememorybuffers\relax -\let\usefilebuffers \relax - -% this features is soldom used (complex examns where we need to fetch -% special parts of a text -% -% this is not yet supported in mkiv (relatively easy to do but there -% we don't have the par tags but need to grab 'm - -\def\skippedbufferparagraphs{0} - -\let\startbufferparagraph\relax -\let\stopbufferparagraph \par % \relax - -\newcount\currentbufferparagraph - -\def\getbufferparagraphs - {\dodoubleempty\dogetbufferparagraphs} - -\def\dosetbufferoffset#1% - {\doifnumberelse{\getvalue{\??bu#1\c!paragraph}} - {\currentbufferparagraph-\getvalue{\??bu#1\c!paragraph}} - {\currentbufferparagraph \zerocount}% - \relax} - -\def\dogetbufferparagraphs[#1][#2]% - {\iffirstargument - \ifsecondargument - \dosetbufferoffset{#1}% - \doifelse{#2}\v!all - {\unexpanded\def\startbufferparagraph{\normalbufferparagraph{#1}}} - {\unexpanded\def\startbufferparagraph{\filterbufferparagraph{#1}{#2}}}% - \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{#1}}% - \def\next{\getparagraphedbuffer[#1]}% - \else - \dosetbufferoffset\empty - \unexpanded\def\startbufferparagraph{\filterbufferparagraph{}{#1}}% - \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{}}% - \def\next{\getparagraphedbuffer[]}% - \fi - \else - \dosetbufferoffset\empty - \unexpanded\def\startbufferparagraph{\normalbufferparagraph{}}% - \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{}}% - \def\next{\getparagraphedbuffer[]}% - \fi - \next} - -\def\dotypeparagraphbuffer{\ctxlua{buffers.get("\currentbuffer")}} - -\def\getparagraphedbuffer[#1]% - {\dobuffer{16}{#1}\dotypeparagraphbuffer} + {\ctxlua{buffers.inspect("#1")}} -\def\dostopbufferparagraph#1% - {\getvalue{\??bu#1\c!after}\par} +\definebuffer[\v!hiding] \setupbuffer[\v!hiding][\c!before=,\c!after=] -\def\dostartbufferparagraph#1% - {\par\getvalue{\??bu#1\c!before}} - -\def\normalbufferparagraph - {\advance\currentbufferparagraph \plusone - \ifnum\currentbufferparagraph>\zerocount - \expandafter\dostartbufferparagraph - \else - \expandafter\gobbleoneargument - \fi} - -\def\filterbufferparagraph#1#2% - {\advance\currentbufferparagraph \plusone - \ifcase\currentbufferparagraph - \@EA\gobblebufferparagraph - \else - \doifinsetelse{\the\currentbufferparagraph}{#2} - {\@EA\dostartbufferparagraph} - {\@EA\fakebufferparagraph}% - \fi - {#1}} - -\long\def\gobblebufferparagraph#1#2\stopbufferparagraph - {} - -\def\fakebufferparagraph#1% - {\bgroup - \unexpanded\def\stopbufferparagraph{\dostopbufferparagraph{#1}\egroup\egroup}% - \setbox\scratchbox\vbox\bgroup\dostartbufferparagraph{#1}} - -% definitions - -\definebuffer[\v!hiding] \setupbuffer[\v!hiding][\c!local=\v!yes] +\let\processTEXbuffer\getbuffer % handy synonym \setupbuffer - [\c!paragraph=\v!no, - \c!before=, + [\c!before=, \c!after=] % only mkiv: @@ -375,8 +190,6 @@ {\dosingleempty\doctxluabuffer} \def\doctxluabuffer[#1]% - {\doifelsenothing{#1} - {\ctxlua{buffers.evaluate("\jobname")}} - {\ctxlua{buffers.evaluate("#1")}}} + {\ctxlua{buffers.evaluate("#1")}} \protect \endinput diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua new file mode 100644 index 000000000..9574768b5 --- /dev/null +++ b/tex/context/base/buff-ver.lua @@ -0,0 +1,377 @@ +if not modules then modules = { } end modules ['buff-ver'] = { + version = 1.001, + comment = "companion to buff-ver.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- The default visualizers have reserved names starting with v-*. Users are +-- supposed to use different names for their own variants. + +local type, rawset, rawget, setmetatable, getmetatable = type, rawset, rawget, setmetatable, getmetatable +local format, lower, match = string.format, string.lower, string.match +local C, P, V, Carg = lpeg.C, lpeg.P, lpeg.V, lpeg.Carg +local patterns, lpegmatch, lpegtype = lpeg.patterns, lpeg.match, lpeg.type + +local function is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +visualizers = visualizers or { } + +local specifications = { } visualizers.specifications = specifications + +local verbatim = context.verbatim +local variables = interfaces.variables +local findfile = resolvers.findfile +local addsuffix = file.addsuffix + +local v_yes = variables.yes + +-- beware, these all get an argument (like newline) + +local doinlineverbatimnewline = context.doinlineverbatimnewline +local doinlineverbatimbeginline = context.doinlineverbatimbeginline +local doinlineverbatimemptyline = context.doinlineverbatimemptyline +local doinlineverbatimstart = context.doinlineverbatimstart +local doinlineverbatimstop = context.doinlineverbatimstop + +local dodisplayverbatimnewline = context.dodisplayverbatimnewline +local dodisplayverbatimbeginline = context.dodisplayverbatimbeginline +local dodisplayverbatimemptyline = context.dodisplayverbatimemptyline +local dodisplayverbatimstart = context.dodisplayverbatimstart +local dodisplayverbatimstop = context.dodisplayverbatimstop + +local doverbatimspace = context.doverbatimspace + +local CargOne = Carg(1) + +local function f_emptyline(s,settings) + if settings and settings.currentnature == "inline" then + doinlineverbatimemptyline() + else + dodisplayverbatimemptyline() + end +end + +local function f_beginline(s,settings) + if settings and settings.currentnature == "inline" then + doinlineverbatimbeginline() + else + dodisplayverbatimbeginline() + end +end + +local function f_newline(s,settings) + if settings and settings.currentnature == "inline" then + doinlineverbatimnewline() + else + dodisplayverbatimnewline() + end +end + +local function f_start(s,settings) + if settings and settings.currentnature == "inline" then + doinlineverbatimstart() + else + dodisplayverbatimstart() + end +end + +local function f_stop(s,settings) + if settings and settings.currentnature == "inline" then + doinlineverbatimstop() + else + dodisplayverbatimstop() + end +end + +local function f_default(s) -- (s,settings) + verbatim(s) +end + +local function f_space() -- (s,settings) + doverbatimspace() +end + +local functions = { __index = { + emptyline = f_emptyline, + newline = f_newline, + default = f_default, + beginline = f_beginline, + space = f_space, + start = f_start, + stop = f_stop, + } +} + +local handlers = { } + +function visualizers.newhandler(name,data) + local tname, tdata = type(name), type(data) + if tname == "table" then -- (data) + setmetatable(name,getmetatable(name) or functions) + return name + elseif tname == "string" then + if tdata == "string" then -- ("name","parent") + local result = { } + setmetatable(result,getmetatable(handlers[data]) or functions) + handlers[name] = result + return result + elseif tdata == "table" then -- ("name",data) + setmetatable(data,getmetatable(data) or functions) + handlers[name] = data + return data + else -- ("name") + local result = { } + setmetatable(result,functions) + handlers[name] = result + return result + end + else -- () + local result = { } + setmetatable(result,functions) + return result + end +end + +function visualizers.newgrammar(name,t) + t = t or { } + local g = visualizers.specifications[name] + g = g and g.grammar + if g then + for k,v in next, g do + if not t[k] then + t[k] = v + end + if is_lpeg(v) then + t[name..":"..k] = v + end + end + end + return t +end + +local fallback = context.verbatim + +local function makepattern(visualizer,kind,pattern) + if not pattern then + logs.simple("error in visualizer: %s",kind) + return patterns.alwaystrue + else + if type(visualizer) == "table" and type(kind) == "string" then + kind = visualizer[kind] or fallback + else + kind = fallback + end + return (C(pattern) * CargOne) / kind + end +end + +visualizers.pattern = makepattern +visualizers.makepattern = makepattern + +function visualizers.load(name) + if rawget(specifications,name) == nil then + name = lower(name) + local texname = findfile(format("v-%s.mkiv",name)) + local luaname = findfile(format("v-%s.lua" ,name)) + if texname == "" or luaname == "" then + -- assume a user specific file + luaname = findfile(addsuffix(name,"mkiv")) + texname = findfile(addsuffix(name,"lua" )) + end + if texname == "" or luaname == "" then + -- error message + else + lua.registercode(luaname) + context.input(texname) + end + if rawget(specifications,name) == nil then + rawset(specifications,name,false) + end + end +end + +function commands.doifelsevisualizer(name) + commands.testcase(specifications[lower(name)]) +end + +function visualizers.register(name,specification) + specifications[name] = specification + local parser, handler = specification.parser, specification.handler + local displayparser = specification.display or parser + local inlineparser = specification.inline or parser + local isparser = is_lpeg(parser) + local start, stop + if isparser then + start = makepattern(handler,"start",patterns.alwaysmatched) + stop = makepattern(handler,"stop",patterns.alwaysmatched) + end + if handler then + if isparser then + specification.display = function(content,settings) + if handler.startdisplay then handler.startdisplay(settings) end + lpegmatch(start * displayparser * stop,content,1,settings) + if handler.stopdisplay then handler.stopdisplay(settings) end + end + specification.inline = function(content,settings) + if handler.startinline then handler.startinline(settings) end + lpegmatch(start * inlineparser * stop,content,1,settings) + if handler.stopinline then handler.stopinline(settings) end + end + specification.direct = function(content,settings) + lpegmatch(parser,content,1,settings) + end + elseif parser then + specification.display = function(content,settings) + if handler.startdisplay then handler.startdisplay(settings) end + parser(content,settings) + if handler.stopdisplay then handler.stopdisplay(settings) end + end + specification.inline = function(content,settings) + if handler.startinline then handler.startinline(settings) end + parser(content,settings) + if handler.stopinline then handler.stopinline(settings) end + end + specification.direct = parser + end + elseif isparser then + specification.display = function(content,settings) + lpegmatch(start * displayparser * stop,content,1,settings) + end + specification.inline = function(content,settings) + lpegmatch(start * inlineparser * stop,content,1,settings) + end + specification.direct = function(content,settings) + lpegmatch(parser,content,1,settings) + end + elseif parser then + specification.display = parser + specification.inline = parser + specification.direct = parser + end + return specification +end + +local function getvisualizer(method,nature) + local m = specifications[method] or specifications.default + if nature then + return m and (m[nature] or m.parser) or nil + else + return m and m.parser or nil + end +end + +local escapepatterns = { } visualizers.escapepatterns = escapepatterns + +local function texmethod(s) + context.bgroup() + context(s) + context.egroup() +end + +local function defaultmethod(s,settings) + lpegmatch(getvisualizer("default"),s,1,settings) +end + +function visualizers.registerescapepattern(name,before,after,normalmethod,escapemethod) + local escapepattern = escapepatterns[name] + if not escapepattern then + before, after = P(before) * patterns.space^0, patterns.space^0 * P(after) + escapepattern = ( + (before / "") + * ((1 - after)^0 / (escapemethod or texmethod)) + * (after / "") + + ((1 - before)^1) / (normalmethod or defaultmethod) + )^0 + escapepatterns[name] = escapepattern + end + return escapepattern +end + +local escapedvisualizers = { } + +local function visualize(method,nature,content,settings) -- maybe also method and nature in settings + if content and content ~= "" then + local m + local e = settings.escape + if e and e ~= "" then + local newname = format("%s-%s",e,method) + local newspec = specifications[newname] + if newspec then + m = newspec + else + local start, stop + if e == v_yes then + start, stop = "/BTEX", "/ETEX" + else + start,stop = match(e,"^(.-),(.-)$") -- todo: lpeg + end + if start and stop then + local oldvisualizer = specifications[method] or specifications.default + local oldparser = oldvisualizer.direct + local newparser = visualizers.registerescapepattern(newname,start,stop,oldparser) + m = visualizers.register(newname, { + parser = newparser, + handler = oldvisualizer.handler, + }) + else + -- visualizers.register(newname,n) + specifications[newname] = m -- old spec so that we have one lookup only + end + end + else + m = specifications[method] or specifications.default + end + local n = m and m[nature] + settings.currentnature = nature or "display" -- tricky ... why sometimes no nature + if n then + n(content,settings) + else + fallback(content,1,settings) + end + end +end + +visualizers.visualize = visualize +visualizers.getvisualizer = getvisualizer + +function visualizers.visualizestring(method,content,settings) + visualize(method,"inline",content) +end + +function visualizers.visualizefile(method,name,settings) + visualize(method,"display",resolvers.loadtexfile(name),settings) +end + +function visualizers.visualizebuffer(method,name,settings) + visualize(method,"display",buffers.content(name),settings) +end + +-- -- + +local space = C(patterns.space) * CargOne / f_space +local newline = C(patterns.newline) * CargOne / f_newline +local emptyline = C(patterns.emptyline) * CargOne / f_emptyline +local beginline = C(patterns.beginline) * CargOne / f_beginline +local anything = C(patterns.somecontent^1) * CargOne / f_default + +local verbosed = (space + newline * (emptyline^0) * beginline + anything)^0 + +local function write(s,settings) -- bad name + lpegmatch(verbosed,s,1,settings or false) +end + +visualizers.write = write +visualizers.writenewline = f_newline +visualizers.writeemptyline = f_emptyline +visualizers.writespace = f_space +visualizers.writedefault = f_default + +function visualizers.writeargument(...) + context("{") -- If we didn't have tracing then we could + write(...) -- use a faster print to tex variant for the + context("}") -- { } tokens as they always have ctxcatcodes. +end diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv index 3be410300..2c478d46d 100644 --- a/tex/context/base/buff-ver.mkiv +++ b/tex/context/base/buff-ver.mkiv @@ -13,166 +13,147 @@ \writestatus{loading}{ConTeXt Buffer Macros / Verbatim} -%D We can optimize esp the initializations a bit. +\registerctxluafile{buff-ver}{1.001} \unprotect -\ifdefined\startlinenumbering\else \let\startlinenumbering \relax \fi -\ifdefined\stoplinenumbering \else \let\stoplinenumbering \relax \fi -\ifdefined\setuplinenumbering\else \unexpanded\def\setuplinenumbering[#1]{} \fi - \definesystemattribute[verbatimline][public] \appendtoksonce \attribute\verbatimlineattribute \attributeunsetvalue \to \everyforgetall -% D \macros -% D {iflinepar} -% D -% D A careful reader will see that \type{\linepar} is reset. -% D This boolean can be used to determine if the current line is -% D the first line in a pseudo paragraph and this boolean is set -% D after each empty line. The \type{\relax} can be used to -% D determine the end of the line when one implements a scanner -% D routine. -% -% will become obsolete +%D Initializations. -\newif\iflinepar +\newtoks\everyinitializeverbatim -% \type{ char} geeft bagger +\appendtoks + \the\everyresettypesetting + \resetfontfeature + \resetcharacterspacing +\to \everyinitializeverbatim -%D We are going to embed the general verbatim support macros in -%D a proper environment. First we show the common setup -%D macro, so we know what features are supported. The options -%D are hooked into the support macros via the \type{\obey} -%D macros. +\def\setverbatimspaceskip % to be checked: must happen after font switch + {\spaceskip\fontcharwd\font`x\relax + \xspaceskip\spaceskip} -\newif\ifslantedtypeactivated -\newif\ifslantedtypepermitted +\unexpanded\def\specialcontrolspace{\hskip\zeropoint\fastcontrolspace\hskip\zeropoint} -\def\switchslantedtype - {\ifslantedtypepermitted - \ifslantedtypeactivated - \slantedtypeactivatedfalse\tttf - \else - \slantedtypeactivatedtrue\ttsl - \fi - \fi} +\setvalue{\??tp:\c!lines:\v!yes }{\obeybreakpoints} +\setvalue{\??tp:\c!lines:\v!hyphenated}{\obeyhyphens} -\def\prettyidentifier {TEX} -\def\prettypalet {} +\setvalue{\??tp:\c!space:\v!on }{\let\obeyedspace\specialcontrolspace} +\setvalue{\??tp:\c!space:\v!stretch }{\let\obeyedspace\specialstretchedspace} +\setvalue{\??tp:\c!space:\v!normal }{} + +\setvalue{\??tp:\c!blank:\v!standard }{\ctxparskip} +\setvalue{\??tp:\c!blank:\v!small }{\smallskipamount} +\setvalue{\??tp:\c!blank:\v!medium }{\medskipamount} +\setvalue{\??tp:\c!blank:\v!big }{\bigskipamount} +\setvalue{\??tp:\c!blank:\v!halfline }{.5\baselineskip} +\setvalue{\??tp:\c!blank:\v!line }{\baselineskip} +\setvalue{\??tp:\c!blank:\v!none }{\zeropoint} + +\def\secondstageinitializetype + {\let\obeyedspace\specialobeyedspace + \csname\??tp:\c!space:\typeparameter\c!space\endcsname + \relax\the\everyinitializeverbatim\relax} -\def\installprettytype - {\dodoubleargument\doinstallprettytype} +\def\secondstageinitializetyping + {\let\obeyedspace\specialobeyedspace + \csname\??tp:\c!space:\typingparameter\c!space\endcsname + \csname\??tp:\c!lines:\typingparameter\c!lines\endcsname + \relax\the\everyinitializeverbatim\relax} -\def\doinstallprettytype[#1][#2]% map #1 onto #2 - {\uppercasestring#1\to\asciia - \uppercasestring#2\to\asciib - \setevalue{\??ty\??ty\asciia}{\asciib}} +\def\firststageinitializetype + {\let\obeylines\ignorelines + \ignorehyphens % default + \dosettypeattributes\c!style\c!color + \setcatcodetable\vrbcatcodes} -\unexpanded\def\setupprettiesintype#1% - {\uppercasestring#1\to\ascii - \edef\prettyidentifier{\executeifdefined{\??ty\??ty\ascii}{TEX}}% - \begingroup - \ctxlua{buffers.loadvisualizer("\ascii")}% - \endgroup} +\let\doverbatimsetlinemargin\relax -\unexpanded\def\setupprettytype - {\processingverbatimtrue % will move - \ctxlua{buffers.visualizers.reset()}} +\def\dodoverbatimsetlinemargin + {\getpagestatus + \ifrightpage + \hskip\typingparameter\c!oddmargin\relax + \else + \hskip\typingparameter\c!evenmargin\relax + \fi} -\def\setverbatimspaceskip - {\setbox\scratchbox\hbox{x}% - \spaceskip\wd\scratchbox - \xspaceskip\spaceskip} +\def\docheckverbatimmargins + {\scratchskip\typingparameter\c!oddmargin\relax + \ifzeropt\scratchskip + \else + \let\doverbatimsetlinemargin\dodoverbatimsetlinemargin + \fi + \scratchskip\typingparameter\c!evenmargin\relax + \ifzeropt\scratchskip + \else + \let\doverbatimsetlinemargin\dodoverbatimsetlinemargin + \fi + \ifx\doverbatimsetlinemargin\relax + \doadaptleftskip{\typingparameter\c!margin}% + \fi} -\let\obeycharacters\relax % tab mess can go +\def\firststageinitializetyping + {\switchtobodyfont[\typingparameter\c!bodyfont]% + \docheckverbatimmargins + \dosettypingattributes\c!style\c!color + \doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}% + \ignorehyphens} % default -\setvalue{\??tp:\c!lines:\v!yes }{\obeybreakpoints} -\setvalue{\??tp:\c!lines:\v!hyphenated}{\obeyhyphens} +%D \macros +%D {definetype,setuptype} +%D +%D Specific inline verbatim commands can be defined with the +%D following command. -%setvalue{\??tp:\c!empty:\v!yes }{\obeyemptylines} -%setvalue{\??tp:\c!empty:\v!all }{\obeyallemptylines} - -\setvalue{\??tp:\c!option:\v!none }{\let\obeycharacters\relax} -\setvalue{\??tp:\c!option:\empty }{\let\obeycharacters\relax} -\setvalue{\??tp:\c!option:\v!color }{\setupprettiesintype{\typingparameter\c!option}% - \let\obeycharacters\setupprettytype} -\setvalue{\??tp:\c!option:\v!normal }{\let\obeycharacters\setupgroupedtype} -\setvalue{\??tp:\c!option:\v!slanted }{\let\obeycharacters\setupslantedtype} -\setvalue{\??tp:\c!option:\s!unknown }{\setupprettiesintype{\typingparameter\c!option}% - \let\obeycharacters\setupprettytype} -%setvalue{\??tp:\c!option:\v!commands }{\def\obeycharacters{\setupcommandsintype}} - -\def\dosetverbatimfont - {\redoconvertfont\dosetfontattribute{\currenttypingclass\currenttyping}\c!style - \normalnoligatures\font} - -\unexpanded\def\setupcommonverbatim - {\let\prettyidentifier\s!default - % - \def\prettyidentifierfont{\typingparameter\c!icommand}% - \def\prettyvariablefont {\typingparameter\c!vcommand}% - \def\prettynaturalfont {\typingparameter\c!ccommand}% - % - \ignorehyphens % default - \getvalue{\??tp:\c!lines:\typingparameter\c!lines}% - \getvalue{\??tp:\c!empty:\typingparameter\c!empty}% - \getvalue{\??tp:\c!option:\ifcsname\??tp:\c!option:\typingparameter\c!option\endcsname\typingparameter\c!option\else\s!unknown\fi}% - \setupverbatimcolor} +\installcommandhandler{\??ty}{type}{\??ty} -\newtoks \everyinitializeverbatim +\appendtoks + \normalexpanded{\dodefinetype{\currenttype}}% +\to \everydefinetype + +\unexpanded\def\dodefinetype#1% + {\setuvalue{#1}{\dotype{#1}}} \appendtoks - \the\everyresettypesetting -\to \everyinitializeverbatim + \doinitializevisualizer{\typeparameter\c!option}% +\to \everysetuptype -\def\ignorebeginofpretty [#1]{} -\def\ignoreendofpretty {} - -\def\doverbatimbop{\bgroup\beginofpretty} -\def\doverbatimeop{\endofpretty\egroup} -\def\doverbatimsop{\endofpretty\egroup\bgroup\beginofpretty} - -\let\noverbatimbop\ignorebeginofpretty -\let\noverbatimeop\ignoreendofpretty -\let\noverbatimsop\ignorebeginofpretty - -\setvalue{\??tp:\c!space:\v!on }{\let\obs\fastcontrolspace} -\setvalue{\??tp:\c!space:\v!stretch}{\let\obs\specialstretchedspace - \let\specialobeyedspace\specialstretchedspace % I need to clean - \let\obeyedspace\specialstretchedspace} % up this mess. -\setvalue{\??tp:\c!space:\v!normal }{\let\obs\specialobeyedspace} - -\def\doinitializeverbatim % todo: combine all in one call is faster - {\ctxlua{buffers.visualizers.reset()}% - \executeifdefined{\??tp:\c!space:\typingparameter\c!space}{\let\obs\specialobeyedspace}% - \edef\askedverbatimtab{\typingparameter\c!tab}% - \doifelse\askedverbatimtab\v!no - {\ctxlua{buffers.settablength(1)}} - {\doifnumberelse{\askedverbatimtab} - {\ctxlua{buffers.settablength(\askedverbatimtab)}} - {\ctxlua{buffers.settablength()}}}% - \ctxlua{buffers.doifelsevisualizer("\prettyidentifier")} - {\ctxlua{buffers.setvisualizer("\prettyidentifier")}% - \let\bop\doverbatimbop - \let\eop\doverbatimeop - \let\sop\doverbatimsop}% - {\ctxlua{buffers.setvisualizer("\v!typing")}% or resetdefaultvisualizer - \let\bop\noverbatimbop - \let\eop\noverbatimeop - \let\sop\noverbatimsop}% - \relax\the\everyinitializeverbatim\relax} +%D \macros +%D {definetyping,setuptyping} +%D +%D For most users the standard \type{\start}||\type{\stop}||pair +%D will suffice, but for documentation purposes the next +%D definition command can be of use: +%D +%D \starttyping +%D \definetyping[extratyping][margin=3em] +%D +%D \startextratyping +%D these extra ones are indented by 1 em +%D \stopextratyping +%D \stoptyping +%D +%D The definitions default to the standard typing values. + +\installcommandhandler{\??tp}{typing}{\??tp} \appendtoks - \resetfontfeature - \resetcharacterspacing -\to \everyinitializeverbatim + \normalexpanded{\dodefinetyping{\currenttyping}}% +\to \everydefinetyping + +\unexpanded\def\dodefinetyping#1% + {\setvalue{\e!start#1}{\dostarttyping{#1}}% + \setvalue{\e!stop #1}{\dostoptyping {#1}}} + +\appendtoks + \doinitializevisualizer{\typingparameter\c!option}% we can check at the tex end +\to \everysetuptyping -% BEWARE: the noligatures will globally change the verbatim font's behaviour +\def\doinitializevisualizer#1% + {\ifproductionrun\ctxlua{visualizers.load("#1")}\fi} -% test case: -% % \definetype[typeTEX][option=tex] % % \typeTEX|\example---oeps|. this---ligates---again. @@ -219,33 +200,9 @@ %D %D \typebuffer \start \getbuffer \stop -\unexpanded\def\setupcommandsintype - {\ctxlua{buffers.setescapepair("\currenttyping",\!!bs\typingparameter\c!escape\!!es)}} - -\appendtoks - \setupcommandsintype -\to \everyinitializeverbatim - -\unexpanded\def\setupslantedtype - {\slantedtypepermittedtrue} - -\ifx\setupprettytype \undefined \let\setupprettytype \relax \fi -\ifx\setupslantedtype \undefined \let\setupslantedtype \relax \fi -\ifx\setupgroupedtype \undefined \let\setupgroupedtype \relax \fi -\ifx\normalnoligatures\undefined \let\normalnoligatures\gobbleoneargument \fi - -%D The verbatim commands have a rather long and turbulent -%D history. Most users of \CONTEXT\ probably will never use -%D some of the features, but I've kept in mind that when one is -%D writing a users manual, about everything can and undoubtly -%D will be subject to a verbatim treatment. -%D %D Verbatim command are very sensitive to argument processing, %D which is a direct result of the \CATCODES\ being fixed at -%D reading time. With our growing understanding of \TEX, -%D especially of the mechanism that can be used for looking -%D ahead and manipulating \CATCODES, the verbatim support -%D became more and more advanced and natural. +%D reading time. %D %D Typesetting inline verbatim can be accomplished by %D \type{\type}, which in this sentence was typeset by saying @@ -262,14 +219,11 @@ %D \type{<<} and \type{>>} as delimiters. This alternative can %D be used in situations where slanted typeseting is needed. -% todo: we can use \letter... here: - \def\lesscharacter {<} \def\morecharacter {>} - -\chardef\texescape = `\\ -\chardef\leftargument = `\{ -\chardef\rightargument = `\} +\let\texescape \textbackslash +\let\leftargument \textbraceleft +\let\rightargument \textbraceright %D \macros %D {type} @@ -277,17 +231,17 @@ %D We define \type{\type} as a protected command. This command %D has several invocations: grouped, wirt boundary characters, %D and with font switches. - -% \starttyping -% normal: \par \type{xx<<..xx..<> >>..>>xx} \par \type<<....>> \par \type<<..<>..>> \par -% normal: \par \type{xx<..xx.. >..>xx} \par \type{<....>} \par \type{<....>} -% \setuptype[option=slanted] -% slanted: \par \type{xx<<..sl..<> xx>>..sl..>>xx} \par \type<<..xx..>> \par \type<<..<>..>> \par -% slanted: \par \type{xx<<..sl.. xx>..sl..>>xx} \par \type<<..xx..>> \par \type<<....>> \par -% \setuptype[option=none] -% none: \par \type{xx<<..xx..<> >>..>>xx} \par \type<<....>> \par \type<<..<>..>> \par -% \stoptyping - +%D +%D \starttyping +%D normal: \par \type{xx<<..xx..<> >>..>>xx} \par \type<<....>> \par \type<<..<>..>> \par +%D normal: \par \type{xx<..xx.. >..>xx} \par \type{<....>} \par \type{<....>} +%D \setuptype[option=slanted] +%D slanted: \par \type{xx<<..sl..<> xx>>..sl..>>xx} \par \type<<..xx..>> \par \type<<..<>..>> \par +%D slanted: \par \type{xx<<..sl.. xx>..sl..>>xx} \par \type<<..xx..>> \par \type<<....>> \par +%D \setuptype[option=none] +%D none: \par \type{xx<<..xx..<> >>..>>xx} \par \type<<....>> \par \type<<..<>..>> \par +%D \stoptyping +%D %D When writing the manual to \CONTEXT\ and documenting this %D source we needed to typeset \type{<<} and \type{>>}. Because %D we wanted to do this in the natural way, we've adapted the @@ -296,7 +250,7 @@ %D further and treats the lone \type{<<} and \type{>>} a bit %D different. The \type {\null} prevents ligatures, which %D unfortunately turn up in Lucida fonts. - +%D %D The following lines show what happens when we set %D \type {option=commands}. %D @@ -316,127 +270,90 @@ %D \typebuffer \unexpanded\def\type{\dotype\empty} +\unexpanded\def\typ {\dotyp \empty} -\def\dotype#1% was \dotype - {\dontleavehmode \bgroup - % new, \strut enables leading space in \type { abc } at par start / begstrut - % else no hyphenation (replaced by \dontleavehmode which saves unboxing) - % \begstrut - \let\currenttypingclass\??ty - \edef\currenttyping{#1}% - \catcode`\<=\othercatcode - \catcode`\>=\othercatcode - \futurelet\next\dodotype} +\def\dotype#1% + {\dontleavehmode + \bgroup + \edef\currenttype{#1}% + \doifnextoptionalelse\redotype\dodotype} + +\def\redotype[#1]% + {\getparameters[\??ty\currenttype][#1]% + \dodotype} \def\dodotype + {\futurelet\next\dodotypeone} + +\def\dotyp#1% + {\dontleavehmode + \bgroup + \edef\currenttype{#1}% + \dolettypeparameter\v!lines\v!hyphenated + \let\specialobeyedspace\specialstretchedspace + \doifnextoptionalelse\redotype\dodotype} + +\def\dodotypeone {\ifx\next\bgroup \@EA\dodotypeA \else - \@EA\dodotypeAD + \@EA\dodotypetwo \fi} -\def\dodotypeAD +\def\dodotypetwo + {\catcode`<=\othercatcode % old precaution + \catcode`>=\othercatcode % old precaution + \futurelet\next\dodotypethree} + +\def\dodotypethree {\if\next<% - \doifelse{\typingparameter\c!option}\v!none{\@EA\dodotypeB}{\@EA\dodotypeC}% + \@EA\dodotypeB \else - \@EA\dodotypeD + \@EA\dodotypeC \fi} \def\dodotypeA - {\initializetype % probably too much - \verbatimcolor - \dosetverbatimfont - \setcatcodetable \typcatcodesa - \dodotypeAA} - -\def\dodotypeAA#1% - {\doinitializeverbatim - \def\obs{\obeyedspace}% - \dostarttagged\t!verbatim\currenttyping - \ctxlua{buffers.hooks.flush_inline(\!!bs\detokenize{#1}\!!es)}% - \dostoptagged - \egroup} + {\firststageinitializetype + \setcatcodetable\typcatcodesa + \dodotypenormal} \def\dodotypeB#1% - {\initializetype - \verbatimcolor - \dosetverbatimfont - \setcatcodetable \typcatcodesb - \dodotypeBB} - -\def\dodotypeBB#1% - {\doinitializeverbatim - \dostarttagged\t!verbatim\currenttyping - \ctxlua{buffers.visualizers.flushnested(\!!bs\detokenize{#1}\!!es,false)}% - \dostoptagged - \egroup - \gobbleoneargument} % grab last > + {\firststageinitializetype + \setcatcodetable\typcatcodesb + \dodotypenested} \def\dodotypeC#1% - {\initializetype - \verbatimcolor - \dosetverbatimfont - \setcatcodetable \typcatcodesb - \dodotypeCC} - -\def\dodotypeCC#1% - {\doinitializeverbatim - \ifx\obeycharacters\setupprettytype % temp hack, we need a proper signal - \dostarttagged\t!verbatim\currenttyping - \ctxlua{buffers.hooks.flush_inline([\!!bs\detokenize{#1}\!!es,true)}% - \dostoptagged - \else - \def\obs{\obeyedspace}% - \ctxlua{buffers.visualizers.flushnested(\!!bs\detokenize{#1}\!!es,true)}% - \fi - \egroup - \gobbleoneargument} % grab last > + {\firststageinitializetype + \setcatcodetable\typcatcodesc + \def\next##1#1{\dodotypenormal{##1}}% + \next} + +\def\dodotypenormal#1% + {\secondstageinitializetype + \dostarttagged\t!verbatim\currenttype + \ctxlua{buffers.typestring { + data = \!!bs\detokenize{#1}\!!es, + tab = "\typeparameter\c!tab", + visualizer = "\typeparameter\c!option", + }}% + \dostoptagged + \egroup} -\def\dodotypeD#1% - {\initializetype - \verbatimcolor - \dosetverbatimfont - \setcatcodetable \typcatcodesc % was a - \def\dodotypeDD##1#1{\dodotypeAA{##1}}% - \dodotypeDD} - -\def\dodotypeDD#1% - {\doinitializeverbatim - \dostarttagged\t!verbatim\currenttyping - \ctxlua{buffers.hooks.flush_inline(\!!bs\detokenize{#1}\!!es,true)}% +\def\dodotypenested#1% + {\secondstageinitializetype + \dostarttagged\t!verbatim\currenttype + \ctxlua{buffers.typestring { + data = \!!bs\detokenize{#1}\!!es, + tab = "\typeparameter\c!tab", + visualizer = "nested", % we force a special visualizer + option = "\typeparameter\c!option", % extra visualizer (maybe: nested,\typeparameter\c!option) + }}% \dostoptagged \egroup \gobbleoneargument} % grab last > -%D The neccessary initializations are done by calling -%D \type{\initializetype} which in return calls for the support -%D macro \type{\setupinlineverbatim}. - -\def\initializetype - {\let\obeylines\ignorelines - \setupcommonverbatim - %\dosetverbatimfont - %\setverbatimspaceskip - %\setupcopyverbatim % not needed - \setcatcodetable\vrbcatcodes} - %D \macros -%D {setuptype} -%D -%D Some characteristics of \type{\type} can be set up by: - -\unexpanded\def\setuptype - {\dodoubleempty\dosetuptype} - -\def\dosetuptype[#1][#2]% - {\ifsecondargument - \getparameters[\??ty#1][#2]% - \else - \getparameters[\??ty][#1]% - \fi} - -%D \macros -%D {typ,obeyhyphens,obeybreakpoints} +%D {obeyhyphens,obeybreakpoints} %D %D Although it's not clear from the macros, one character %D trait of this macros, which are build on top of the support @@ -464,12 +381,6 @@ \let\controlspace\specialcontrolspace \spaceskip.5em\relax} -\unexpanded\def\typ - {\bgroup - \let\@@tylines\v!hyphenated - \let\specialobeyedspace\specialstretchedspace - \futurelet\next\dodotype} - %D \macros %D {tex,arg,mat,dis} %D @@ -480,36 +391,9 @@ %D but we've decided not to use that slow and sometimes %D troublesome solution. Instead we stick to some 'old' %D \CONTEXT\ macros for typesetting typical \TEX\ characters. -%D -%D The next implementation is more clear but less versatile, -%D so we treated it for a beter one. -%D -%D \starttyping -%D \def\dospecialtype#1#2% -%D {\bgroup -%D \initializetype -%D \catcode`\{=\begingroupcatcode -%D \catcode`\}=\endgroupcatcode -%D \def\dospecialtype% -%D {\def\dospecialtype{#2\egroup}% -%D \bgroup -%D \aftergroup\dospecialtype -%D #1}% -%D \afterassignment\dospecialtype -%D \let\next=} -%D -%D \unexpanded\def\tex{\dospecialtype\texescape\relax} -%D \unexpanded\def\arg{\dospecialtype\leftargument\rightargument} -%D \unexpanded\def\mat{\dospecialtype\$\$} -%D \unexpanded\def\dis{\dospecialtype{\$\$}{\$\$}} -%D \stoptyping \def\setgroupedtype - {\let\currenttypingclass\??ty - \initializetype - \verbatimcolor - \dosetverbatimfont - %\setcatcodetable \typcatcodesa + {\firststageinitializetype \catcode`\{=\begingroupcatcode \catcode`\}=\endgroupcatcode} @@ -523,105 +407,17 @@ \let\normaltextmat\mat \let\normaltextdis\dis -\def\astype - {\groupedcommand\dorawtype\relax} - -\def\dorawtype - {\let\currenttypingclass\??ty - \normalverbatimcolor % \verbatimcolor - \dosetverbatimfont} - -%D \macros -%D {starttyping} -%D -%D Display verbatim is realized far more easy, which is mostly -%D due to the fact that we use \type{\stop...} as delimiter. -%D The implementation inherits some features, for instance the -%D support of linenumbering, which can best be studied in the -%D documented support module. - -\let\currenttyping \empty -\let\currenttypingclass\??ty % saveguard - -\def\typingparameter#1% - {\ifcsname\currenttypingclass\currenttyping#1\endcsname - \csname\currenttypingclass\currenttyping#1\endcsname - \else\ifcsname\currenttypingclass#1\endcsname - \csname\currenttypingclass#1\endcsname - \fi\fi} - -\def\settypingparameter#1#2% - {\setvalue{\currenttypingclass\currenttyping#1}{#2}} - -\def\setxtypingparameter#1#2% - {\setxvalue{\currenttypingclass\currenttyping#1}{#2}} - -\setvalue{\??tp:\c!blank:\v!standard}{\ctxparskip} -\setvalue{\??tp:\c!blank:\v!small }{\smallskipamount} -\setvalue{\??tp:\c!blank:\v!medium }{\medskipamount} -\setvalue{\??tp:\c!blank:\v!big }{\bigskipamount} -\setvalue{\??tp:\c!blank:\v!halfline}{.5\baselineskip} -\setvalue{\??tp:\c!blank:\v!line }{\baselineskip} -\setvalue{\??tp:\c!blank:\v!none }{\zeropoint} - -\def\doopenupverbatimlineindeed - {\getpagestatus - \ifrightpage - \hskip\typingparameter\c!oddmargin\relax - \else - \hskip\typingparameter\c!evenmargin\relax - \fi} - -\def\initializetyping - {%\donefalse - \switchtobodyfont[\typingparameter\c!bodyfont]% - \donefalse - \scratchskip\typingparameter\c!oddmargin\relax - \ifzeropt\scratchskip\else\donetrue\fi - \scratchskip\typingparameter\c!evenmargin\relax - \ifzeropt\scratchskip\else\donetrue\fi - \ifdone - \let\doopenupverbatimline\doopenupverbatimlineindeed - \else - \doadaptleftskip{\typingparameter\c!margin}% - \fi - % no symbolic blanks ! - \edef\!!stringa{\executeifdefined{\??bo\typingparameter\c!blank}{\typingparameter\c!blank}}% - \scratchskip\executeifdefined{\??tp:\c!blank:\!!stringa}\!!stringa\relax - \ifgridsnapping - % this will be adapted - \ifdim\scratchskip=.5\baselineskip\relax - \edef\verbatimbaselineskip{\the\scratchskip}% new - \else - \edef\verbatimbaselineskip{\the\baselineskip}% - \fi - \else - \edef\verbatimbaselineskip{\the\scratchskip}% - \fi -\doifsomething{\typingparameter\c!align}{\setupalign[\typingparameter\c!align]}% - \setupcommonverbatim} +\def\astype{\groupedcommand{\dosettypeattributes\c!style\c!color}{}} %D The basic display verbatim commands are defined in an %D indirect way. As we will see, they are a specific case of a %D more general mechanism. -\newif\ifoptimizeverbatim \optimizeverbatimtrue - -\let \beginofverbatimlines \relax -\let \endofverbatimlines \relax - -\def\doverbatimnobreak - {\ifoptimizeverbatim\penalty500 \fi} - -\def\doverbatimgoodbreak - {\ifoptimizeverbatim\penalty\linepenalty\fi} - -% we need this hack because otherwise verbatim skips -% the first line (everything after the initial command) +\let\beginofverbatimlines\relax +\let\endofverbatimlines \relax \def\dostarttyping#1% tricky non standard lookahead {\bgroup - \let\currenttypingclass\??tp \edef\currenttyping{#1}% \obeylines \futurelet\nexttoken\dodostarttyping} @@ -636,40 +432,37 @@ \def\nododostarttyping {\dododostarttyping[]} -\def\dotypefileverbatim - {\doinitializeverbatim - \beginofverbatimlines - \dostarttagged\t!verbatimblock\currenttyping - \ctxlua{buffers.typefile("\readfilename","\typingparameter\c!strip","\typingparameter\c!range","\currentregime")}% - \dostoptagged - \endofverbatimlines} - -\def\dotypefilelinesverbatim#1#2% - {#1\dotypefileverbatim#2} +\def\dododostarttyping[#1]% + {\typingparameter\c!before + \startpacked[\v!blank] + \doifassignmentelse{#1} + {\setuptyping[\currenttyping][#1]} + {\doifinset\v!continue{#1}{\setuptyping[\currenttyping][\c!continue=\v!yes]}}% + \dosetuptypelinenumbering + \firststageinitializetyping + \normalexpanded{\dotypeblockverbatim{\e!start\currenttyping}{\e!stop\currenttyping}}} \unexpanded\def\dotypeblockverbatim#1#2% - {\dowithbuffer{_typing_}{#1}{#2} - {} - {\doinitializeverbatim - \beginofverbatimlines - \dostarttagged\t!verbatimblock\currenttyping - \ctxlua{buffers.type("_typing_","\typingparameter\c!strip","\typingparameter\c!range")}% - \dostoptagged - \endofverbatimlines - \csname#2\endcsname}} + {\dowithbuffer{_typing_}{#1}{#2}{}{\dodotypeblockverbatim{#1}{#2}}} -\def\dododostarttyping[#1]% - {\typingparameter\c!before - \startpacked % includes \bgroup - \dosetuptypelinenumbering{#1}% - \initializetyping - \dosetverbatimfont - \startverbatimcolor - \normalexpanded{\dotypeblockverbatim{\e!start\currenttyping}{\e!stop\currenttyping}}} % was s!start +\def\dodotypeblockverbatim#1#2% + {\secondstageinitializetyping + \beginofverbatimlines + \dostarttagged\t!verbatimblock\currenttyping + \ctxlua{buffers.typebuffer { + name = "_typing_", + strip = "\typingparameter\c!strip", + range = "\typingparameter\c!range", + tab = "\typingparameter\c!tab", + visualizer = "\typingparameter\c!option", + escape = \!!bs\typingparameter\c!escape\!!es, + }}% + \dostoptagged + \endofverbatimlines + \csname#2\endcsname} \def\dostoptyping#1% hm, currenttyping - {\stopverbatimcolor - \stoppacked % includes \egroup + {\stoppacked \typingparameter\c!after \normalexpanded{\egroup\checknextindentation[\typingparameter\c!indentnext]}% \dorechecknextindentation} @@ -710,8 +503,7 @@ %D %D \starttext %D -%D % \typefile[file][range={3,6}]{whatever.tex} -%D +%D % \typefile[file][range={3,6}] {whatever.tex} %D % \typefile[file][range={3,+2}]{whatever.tex} %D %D \typefile[file][range={label:start:one,label:stop:one}]{whatever.tex} @@ -719,128 +511,6 @@ %D \stoptext %D \stoptyping -%D \macros -%D {setuptyping} -%D -%D The setup of typing accepts two arguments. The optional -%D first one identifies the user defined ones. If only one -%D argument is given, the values apply to both the standard -%D command \type{\starttyping} and \type{\typefile}. - -\def\dosetuptyping[#1][#2]% - {\ifsecondargument - \getparameters[\??tp#1][#2]% - \else - \getparameters[\??tp][#1]% - \fi} - -\unexpanded\def\setuptyping - {\dodoubleempty\dosetuptyping} - -%D \macros -%D {definetype} -%D -%D Specific inline verbatim commands can be defined with the -%D following command. - -\unexpanded\def\definetype - {\dodoubleempty\dodefinetype} - -\def\dodefinetype[#1][#2]% - {\setuvalue{#1}{\dotype{#1}}% - \getparameters[\??ty#1][#2]} - -%D \macros -%D {definetyping} -%D -%D For most users the standard \type{\start}||\type{\stop}||pair -%D will suffice, but for documentation purposes the next -%D definition command can be of use: -%D -%D \starttyping -%D \definetyping[extratyping][margin=3em] -%D -%D \startextratyping -%D these extra ones are indented by 1 em -%D \stopextratyping -%D \stoptyping -%D -%D The definitions default to the standard typing values. - -% TODO: parent etc ! - -\def\presettyping[#1][#2]% brrr also use parent here - {\copyparameters[\??tp#1][\??tp][\c!color,\c!style]% - \getparameters [\??tp#1][#2]} - -\def\dodefinetyping[#1][#2]% - {\setvalue{\e!start#1}{\dostarttyping{#1}}% - \setvalue{\e!stop #1}{\dostoptyping {#1}}% - \presettyping[#1][#2]} - -\unexpanded\def\definetyping - {\dodoubleempty\dodefinetyping} - -%D We can use some core color commands. These are faster than -%D the standard color switching ones and work ok on a line by -%D line basis. -%D -%D \starttyping -%D \unexpanded\def\setupverbatimcolor% -%D {\edef\prettypalet{\prettyidentifier\typingparameter\c!palet}% -%D \def\beginofpretty[##1]{\startcolormode{\prettypalet:##1}}% -%D \def\endofpretty {\stopcolormode}} -%D \stoptyping -%D -%D Since we support a global color too, the folowing -%D definition is better: - -\def\normalbeginofpretty [#1]{\startcolor[\prettypalet:#1]}% -\def\normalendofpretty {\stopcolor} -\def\normalstartverbatimcolor{\startcolor[\typingparameter\c!color]}% -\def\normalstopverbatimcolor {\stopcolor} -\def\normalverbatimcolor {\getvalue{\typingparameter\c!color}}% command ! - -\unexpanded\def\setupnormalprettyverbatim - {\edef\prettypalet{\prettyidentifier\prettypalet}% - \let\beginofpretty \normalbeginofpretty - \let\endofpretty \normalendofpretty - \let\startverbatimcolor\normalstartverbatimcolor - \let\stopverbatimcolor \normalstopverbatimcolor - \let\verbatimcolor \normalverbatimcolor} - -\unexpanded\def\setupignoreprettyverbatim - {\let\prettypalet \empty - \let\beginofpretty \ignorebeginofpretty - \let\endofpretty \ignoreendofpretty - \let\startverbatimcolor\normalstartverbatimcolor - \let\stopverbatimcolor \normalstopverbatimcolor - \let\verbatimcolor \normalverbatimcolor} - -\unexpanded\def\setupverbatimcolor - {\edef\prettypalet{\typingparameter\c!palet}% - \ifx\prettypalet\empty - \setupignoreprettyverbatim - \else - \setupnormalprettyverbatim - \fi} - -\let\beginofpretty \ignorebeginofpretty -\let\endofpretty \ignoreendofpretty -\let\prettypalet \empty -\let\startverbatimcolor\relax -\let\stopverbatimcolor \relax -\let\verbatimcolor \relax - -%D \macros -%D {EveryPar, EveryLine, iflinepar} -%D -%D One of the features of these commands is the support of -%D \type{\EveryPar}, \type{\EveryLine} and \type{\iflinepar}. -%D In the documentation of the verbatim support module we give -%D some examples of line- and paragraph numbering using these -%D macros. - %D \macros %D {typefile} %D @@ -849,118 +519,92 @@ %D setup values are inherited from display verbatim. %D The implementation of \type{\typefile} is straightforward: -% new feature (not yet 100\% ok) -% -% \setuptyping[file][numbering=file] -% -% \typefile[start=2,nlines=3]{zapf} -% \typefile[start=continue,nlines=13]{zapf} -% \typefile{zapf} -% -% \setuptyping[file][numbering=line] -% -% \typefile[start=4,step=3]{zapf} -% \typefile{zapf} +% [category] [settings] {name} % for historic reasons, all filenames are {} \def\typefile {\dodoubleempty\dotypefile} \def\dotypefile[#1][#2]#3% - {\ifsecondargument - \dodotypefile[#1][#2]{#3}% + {\bgroup + \ifsecondargument + \setuptyping[#1][#2]% + \dododotypefile{#1}{#3}% \else\iffirstargument \doifassignmentelse{#1} - {\dodotypefile[\v!file][#1]{#3}} - {\dodotypefile[#1][]{#3}}% + {\setuptyping[\v!file][#1] + \dododotypefile\v!file{#3}} + {\dododotypefile{#1}{#3}}% \else - \dodotypefile[\v!file][]{#3}% - \fi\fi} - -\def\dosetuptypelinenumbering#1% fuzzy - {%\ifcsname\currenttypingclass\currenttyping\c!start\endcsname \else - % \setuptyping[\currenttyping][\c!start=1,\c!stop=,\c!step=1,\c!continue=\v!no,\c!nlines=]% - %\fi - \doifassignmentelse{#1}{\setuptyping[\currenttyping][#1]}\donothing - \doifelse{\typingparameter\c!numbering}\v!file - {% kind of special: filters lines ! - \setuplinenumbering[\currenttyping][\c!method=\v!file]% - \donetrue} - {\doifelse{\typingparameter\c!numbering}\v!line - {\doifinset\v!continue{#1}{\setuptyping[\currenttyping][\c!continue=\v!yes]}% fails: \settypingparameter\c!continue{\v!yes} - \donetrue} - {\donefalse}}% - \ifdone - \edef\beginofverbatimlines{\noexpand\startlinenumbering - [\currenttyping]% - [\c!continue=\typingparameter\c!continue, - \c!method=\v!type, - \c!start=\typingparameter\c!start, - \c!stop=\typingparameter\c!stop, % ? - \c!step=\typingparameter\c!step]% - }% - \def\endofverbatimlines{\stoplinenumbering}% - \fi} - -\def\reporttypingerror#1% temp hack - {\blank - \dontleavehmode\hbox\bgroup - \expanded{\defconvertedargument\noexpand\ascii{#1}}% - \tttf[\makemessage\m!verbatims1\ascii]% - \showmessage\m!verbatims1\ascii - \egroup - \blank} - -\def\dosometyping#1#2#3#4#5% - {\bgroup - \let\currenttypingclass\??tp - \edef\currenttyping{#1}% - \typingparameter\c!before - \startpacked % includes \bgroup - \dosetuptypelinenumbering{#2}% - \doifinset{\typingparameter\c!option}{\v!commands,\v!slanted,\v!normal} - {\setuptyping[#1][\c!option=\v!none]}% - \doif{\typingparameter\c!option}\v!color - {\expandafter\aftersplitstring#3\at.\to\prettyidentifier - \settypingparameter\c!option{\prettyidentifier}}% - \initializetyping - \dosetverbatimfont - \startverbatimcolor - \scratchcounter - \ifcsname\currenttypingclass#3\v!global\c!start\endcsname - \numexpr\csname\currenttypingclass#3\v!global\c!start\endcsname+\plusone\relax - \else - \plusone - \fi - \setxvalue{\currenttypingclass#3\v!global\c!start}{\the\scratchcounter}% no direct setxvalue as it defines beforehand - \doifelsenothing{\typingparameter\c!start} - {#4} - {\doif{\typingparameter\c!start}\v!continue - {\setevalue{\currenttypingclass#1\c!start}% - {\getvalue{\currenttypingclass#3\v!global\c!start}}}% - \doifelsenothing{\typingparameter\c!stop} - {\doifelsenothing{\typingparameter\c!nlines} - {#4} - {\setxvalue{\currenttypingclass#3\v!global\c!start}% - {\the\numexpr\typingparameter\c!start+\typingparameter\c!nlines+\minusone\relax}% - #5{\typingparameter\c!start}{\getvalue{\currenttypingclass#3\v!global\c!start}}}}% - {#5{\typingparameter\c!start}{\typingparameter\c!stop}}}% - \stopverbatimcolor - \stoppacked - \typingparameter\c!after + \dododotypefile\v!file{#3}% + \fi\fi \egroup} +\def\dododotypefile#1#2% + {\doifelsetypingfile{#2} + {\dodododotypefile{#1}\readfilename} + {\reporttypingerror{#2}}} + \def\doifelsetypingfile#1% sets \readfilename (we will make this proper mkiv i.e. less messy) {\doiflocfileelse{#1} {\firstoftwoarguments} {\doifinputfileelse{#1} -% {\def\readfilename{\pathplusfile\filepath{#1}}\firstoftwoarguments} % messy, looks wrong too {\def\readfilename{#1}\firstoftwoarguments} % messy, looks wrong too {\secondoftwoarguments}}} -\def\dodotypefile[#1][#2]#3% - {\doifelsetypingfile{#3} - {\dosometyping{#1}{#2}{#3}\dotypefileverbatim\dotypefilelinesverbatim} - {\reporttypingerror{#3}}} +\def\dodododotypefile#1#2% category name + {\edef\currenttyping{#1}% + \typingparameter\c!before + \startpacked[\v!blank] + \dosetuptypelinenumbering + \firststageinitializetyping + \secondstageinitializetyping + \beginofverbatimlines + \dostarttagged\t!verbatimblock\currenttyping + \ctxlua{buffers.typefile { + name = "#2", + strip = "\typingparameter\c!strip", + range = "\typingparameter\c!range", + regime = "\currentregime", + tab = "\typingparameter\c!tab", + visualizer = "\typingparameter\c!option", + }}% + \dostoptagged + \endofverbatimlines + \stoppacked + \typingparameter\c!after} + +%D Line numbering: + +\newconditional\verbatimnumberinglines + +\def\beginofverbatimlinenumbering + {\startlinenumbering + [\currenttyping]% + [\c!continue=\typingparameter\c!continue, + \c!method=\v!type, + \c!start=\typingparameter\c!start, + \c!stop=\typingparameter\c!stop, % ? + \c!step=\typingparameter\c!step]} + +\def\endofverbatimlinenumbering + {\stoplinenumbering} + +\def\dosetuptypelinenumbering + {\ifdefined\startlinenumbering + \edef\currenttypingnumbering{\typingparameter\c!numbering}% + \ifx\currenttypingnumbering\v!file + \setuplinenumbering[\currenttyping][\c!method=\v!file]% + \settrue\verbatimnumberinglines + \else\ifx\currenttypingnumbering\v!line + \settrue\verbatimnumberinglines + \else + \setfalse\verbatimnumberinglines + \fi\fi + \ifconditional\verbatimnumberinglines + \let\beginofverbatimlines\beginofverbatimlinenumbering + \let\endofverbatimlines \endofverbatimlinenumbering + \fi + \fi} %D \macros %D {filename} @@ -975,9 +619,12 @@ \unexpanded\def\filename#1{{\tttf\hyphenatedfilename{#1}}} -%D And a bonus macro: +%D \macros +%D {verbatim} +%d +%D And a bonus macro, an unexpanded detokenize: -\def\verbatim#1{\defconvertedargument\ascii{#1}\ascii} +\unexpanded\def\verbatim#1{\detokenize{#1}} %D The setups for display verbatim and file verbatim are %D shared. One can adapt the extra defined typing environments, @@ -987,224 +634,182 @@ \setuptyping [ \c!before=\blank, \c!after=\blank, - \c!bodyfont=, - \c!color=, + %\c!bodyfont=, + %\c!color=, \c!space=\v!off, \c!page=\v!no, \c!tab=\s!ascii, \c!option=\v!none, - \c!palet=colorpretty, \c!text=\v!no, \c!style=\tttf, - \c!icommand=\ttsl, - \c!vcommand=, - \c!ccommand=\tttf, \c!indentnext=\v!yes, \c!margin=\!!zeropoint, \c!evenmargin=\!!zeropoint, \c!oddmargin=\!!zeropoint, \c!blank=\v!line, - \c!escape=, % yes | no | START,STOP BTEX,ETEX + %\c!escape=, % yes | no | {START,STOP} | default when yes: {BTEX,ETEX} \c!numbering=\v!no, - \c!lines=, - \c!range=, - \c!empty=, + %\c!lines=, + %\c!range=, \c!start=1, - \c!stop=, + %\c!stop=, \c!step=1, - \c!continue=, - \c!strip=\v!no, % auto or number - \c!nlines=] + %\c!continue=, + \c!strip=\v!no] %auto or number \definetyping[\v!typing] -\presettyping[\v!file][] - -% \setuptyping % not needed -% [\v!file] -% [\c!start=1, -% \c!stop=, -% \c!step=1, -% \c!continue=, -% \c!nlines=] +\setuptyping[\v!file] [\s!parent=\??tp\v!typing] % we don't want \start..\stop overload +\setuptyping[\v!buffer][\s!parent=\??tp\v!file] % we don't want \start..\stop overload %D The setups for inline verbatim default to: \setuptype [ \c!space=\v!off, - \c!color=, + %\c!color=, \c!style=\tt\tf, % \tttf gives problems with {\tx \type...} + %\c!option=\v!normal, \c!page=\v!no, - \c!tab=\v!yes, - \c!palet=colorpretty, - \c!option=\v!normal] - -%D Beware: only a few are currently (re)implemented in \MKIV. - -\definetyping[RAW] [\c!option=RAW] -\definetyping[MP] [\c!option=MP] % done -\definetyping[PL] [\c!option=PL] -\definetyping[PM] [\c!option=PL] -\definetyping[JS] [\c!option=JS] -\definetyping[JV] [\c!option=JV] -\definetyping[SQL] [\c!option=SQL] -\definetyping[TEX] [\c!option=TEX] % done -\definetyping[PAS] [\c!option=PAS] -\definetyping[PASCAL][\c!option=PAS] -\definetyping[MOD] [\c!option=PAS] -\definetyping[MODULA][\c!option=PAS] -\definetyping[DELPHI][\c!option=PAS] -\definetyping[EIFFEL][\c!option=EIF] -\definetyping[XML] [\c!option=XML] -\definetyping[LUA] [\c!option=LUA] % done - -\installprettytype [RAW] [RAW] - -\installprettytype [TEX] [TEX] - -\installprettytype [PERL] [PL] -\installprettytype [PL] [PL] -\installprettytype [PM] [PL] - -\installprettytype [METAPOST] [MP] -\installprettytype [METAFONT] [MP] -\installprettytype [MP] [MP] -\installprettytype [MF] [MP] - -\installprettytype [JAVASCRIPT] [JS] -\installprettytype [JAVA] [JV] -\installprettytype [JS] [JS] -\installprettytype [JV] [JV] - -\installprettytype [SQL] [SQL] - -\installprettytype [PASCAL] [PAS] -\installprettytype [PAS] [PAS] -\installprettytype [MODULA] [PAS] -\installprettytype [MOD] [PAS] - -\installprettytype [EIFFEL] [EIF] -\installprettytype [EIF] [EIF] -\installprettytype [E] [EIF] - -\installprettytype [XML] [XML] - -\installprettytype [LUA] [LUA] - -%D We use the \CONTEXT\ color system for switching to and from -%D color mode. We can always redefine these colors afterwards. - -\definecolor [colorprettyone] [r=.9, g=.0, b=.0] % red -\definecolor [colorprettytwo] [r=.0, g=.8, b=.0] % green -\definecolor [colorprettythree] [r=.0, g=.0, b=.9] % blue -\definecolor [colorprettyfour] [r=.8, g=.8, b=.6] % yellow - -\definecolor [grayprettyone] [s=.30] -\definecolor [grayprettytwo] [s=.45] -\definecolor [grayprettythree] [s=.60] -\definecolor [grayprettyfour] [s=.75] - -\definepalet - [colorpretty] - [ prettyone=colorprettyone, - prettytwo=colorprettytwo, - prettythree=colorprettythree, - prettyfour=colorprettyfour] - -\definepalet - [graypretty] - [ prettyone=grayprettyone, - prettytwo=grayprettytwo, - prettythree=grayprettythree, - prettyfour=grayprettyfour] - -\definepalet [TEXcolorpretty] [colorpretty] -\definepalet [TEXgraypretty] [graypretty] -\definepalet [PLcolorpretty] [colorpretty] -\definepalet [PLgraypretty] [graypretty] -\definepalet [PMcolorpretty] [colorpretty] -\definepalet [PMgraypretty] [graypretty] -\definepalet [MPcolorpretty] [colorpretty] -\definepalet [MPgraypretty] [graypretty] -\definepalet [JVcolorpretty] [colorpretty] -\definepalet [JVgraypretty] [graypretty] -\definepalet [JScolorpretty] [colorpretty] -\definepalet [JSgraypretty] [graypretty] -\definepalet [SQLcolorpretty] [colorpretty] -\definepalet [SQLgraypretty] [graypretty] -\definepalet [PAScolorpretty] [colorpretty] -\definepalet [PASgraypretty] [graypretty] -\definepalet [EIFcolorpretty] [colorpretty] -\definepalet [EIFgraypretty] [graypretty] -\definepalet [XMLcolorpretty] [colorpretty] -\definepalet [XMLgraypretty] [graypretty] -\definepalet [LUAcolorpretty] [colorpretty] -\definepalet [LUAgraypretty] [graypretty] - -% patched from verb-ini (todo) - -% \let\beginverbatimline\relax -% \let\endverbatimline \relax - -% \appendtoks whatever\par\to\everyverbatimbeginofdisplay -% \appendtoks whatever\to\everyverbatimendofdisplay -% \appendtoks [\to\everyverbatimbeginofinline -% \appendtoks ]\to\everyverbatimendofinline - -\let\doopenupverbatimline\empty - -\newtoks\everyverbatimbeginofdisplay -\newtoks\everyverbatimendofdisplay -\newtoks\everyverbatimbeginofinline -\newtoks\everyverbatimendofinline - -\let\currentverbatimpretty\empty - -\def\doverbatimbeginofdisplay#1% - {\edef\currentverbatimpretty{#1}% - \the\everyverbatimbeginofdisplay} - -\def\doverbatimendofdisplay - {\the\everyverbatimendofdisplay} - -\def\doverbatimbeginofinline#1% - {\edef\currentverbatimpretty{#1}% - \the\everyverbatimbeginofinline} - -\def\doverbatimendofinline - {\the\everyverbatimendofinline} + \c!tab=\v!yes] + +%D Buffers + +% [name] [settings] | [name] | [settings] + +\unexpanded\def\typebuffer + {\dodoubleempty\dotypebuffer} + +\unexpanded\def\dotypedefinedbuffer + {\dotripleempty\dodotypedefinedbuffer} + +\appendtoks + \setuevalue{\e!type\currentbuffer}% + {\noexpand\dotypedefinedbuffer + [\v!buffer]% + [def-\number\nofdefinedbuffers]}% +\to \everydefinebuffer + +\def\dotypebuffer[#1][#2]% + {\bgroup + \ifsecondargument + \setuptyping[\v!buffer][#2]% + \processcommalist[#1]{\dododotypebuffer\v!buffer}% [name] [settings] + \else\iffirstargument + \doifassignmentelse{#1} + {\setuptyping[\v!buffer][#1]% + \dododotypebuffer\v!buffer\empty}% [settings] + {\processcommalist[#1]{\dododotypebuffer\v!buffer}}% [name] + \else + \dododotypebuffer\v!buffer\empty% [] + \fi\fi + \egroup} + +\def\dodotypedefinedbuffer[#1][#2][#3]% category name settings + {\bgroup + \ifthirdargument + \setuptyping[#1][#3]% + \fi + \dododotypebuffer{#1}{#2}% + \egroup} + +\def\dododotypebuffer#1#2% category name + {\edef\currenttyping{#1}% + \typingparameter\c!before + \startpacked[\v!blank] + \dosetuptypelinenumbering + \firststageinitializetyping + \secondstageinitializetyping + \beginofverbatimlines + \dostarttagged\t!verbatim{#1}% + \ctxlua{buffers.typebuffer { + name = "#2", + strip = "\typingparameter\c!strip", + range = "\typingparameter\c!range", + regime = "\currentregime", + tab = "\typingparameter\c!tab", + visualizer = "\typingparameter\c!option", + }}% + \dostoptagged + \endofverbatimlines + \stoppacked + \typingparameter\c!after} + +% process buffers .. settings + +\unexpanded\def\processbuffer + {\dodoubleempty\doprocessbuffer} + +\def\doprocessbuffer[#1][#2]% + {\bgroup + \ifsecondargument + \setuptyping[\v!buffer][#2]% + \processcommalist[#1]{\dodoprocessbuffer\v!buffer}% [name] [settings] + \else\iffirstargument + \doifassignmentelse{#1} + {\setuptyping[\v!buffer][#1]% + \dodoprocessbuffer\v!buffer\empty}% [settings] + {\processcommalist[#1]{\dodoprocessbuffer\v!buffer}}% [name] + \else + \dodoprocessbuffer\v!buffer\empty% [] + \fi\fi + \egroup} + +% get : before/after of buffer +% typing : before/after of typing +% process : no before/after (special case anyway) + +\def\dodoprocessbuffer#1#2% + {\edef\currenttyping{#1}% + \ctxlua{buffers.typebuffer { + name = "#2", + strip = "\typingparameter\c!strip", + tab = "\typingparameter\c!tab", + visualizer = "\typingparameter\c!option", + }}} + +% line numbering, keep broken lines together \newcount\nofverbatimlines -\def\doverbatimbeginofline#1% linenumber - {\global\advance\nofverbatimlines\plusone - \attribute\verbatimlineattribute\nofverbatimlines - \bgroup % due to pretty status - \iflinepar\else\EveryPar{}\fi - \noindent % was wrong: \dontleavehmode - \xdef\dokeepverbatimlinedata % hm, still needed? - {\parindent \the\parindent - \hangindent\the\hangindent - \hangafter \the\hangafter - \leftskip \the\leftskip - \rightskip \the\rightskip}% - \egroup - \dokeepverbatimlinedata - \doopenupverbatimline +\def\doverbatimbeginofline + {\ifconditional\verbatimnumberinglines + \global\advance\nofverbatimlines\plusone + \attribute\verbatimlineattribute\nofverbatimlines + \fi + \noindent + \doverbatimsetlinemargin \the\everyline\strut - \dostarttagged\t!verbatimline\empty - }%\beginverbatimline} + \dostarttagged\t!verbatimline\empty} \def\doverbatimendofline - {%\endverbatimline - \dostoptagged - \global\lineparfalse - \obeyedline\par - \attribute\verbatimlineattribute\attributeunsetvalue} + {\dostoptagged + \obeyedline % still needed? + \par + \ifconditional\verbatimnumberinglines + \attribute\verbatimlineattribute\attributeunsetvalue + \fi} \def\doverbatimemptyline - {\strut - \par - \global\linepartrue} + {\ifconditional\verbatimnumberinglines + \par\strut\par % this will be an option where we use a signal instead of a strut + \else + \blank[\typingparameter\c!blank]% + \fi} + +% hooks: + +\def\doinlineverbatimstart {} +\def\doinlineverbatimstop {} +\def\doinlineverbatimbeginline {} +\def\doinlineverbatimnewline {\obeyedspace} +\def\doinlineverbatimemptyline {\obeyedspace} + +\def\dodisplayverbatimstart {\doverbatimbeginofline} +\def\dodisplayverbatimstop {\doverbatimendofline} +\def\dodisplayverbatimbeginline {\doverbatimbeginofline} +\def\dodisplayverbatimnewline {\doverbatimendofline\par} +\def\dodisplayverbatimemptyline {\doverbatimemptyline} + +\def\doverbatimspace {\obeyedspace} \protect \endinput diff --git a/tex/context/base/buff-vis.lua b/tex/context/base/buff-vis.lua deleted file mode 100644 index 1a97f2591..000000000 --- a/tex/context/base/buff-vis.lua +++ /dev/null @@ -1,74 +0,0 @@ -if not modules then modules = { } end modules ['buff-vis'] = { - version = 1.001, - comment = "companion to buff-vis.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local type = type -local format = string.format -local C, P, V, patterns, lpegmatch = lpeg.C, lpeg.P, lpeg.V, lpeg.patterns, lpeg.match - -visualizers = visualizers or { } - -local patterns = { } visualizers.patterns = patterns - -local fallback = context.verbatim - -function visualizers.pattern(visualizer,kind,pattern) - if type(visualizer) == "table" and type(kind) == "string" then - kind = visualizer[kind] or visualizer.default or fallback - else - kind = fallback - end - return C(pattern)/kind -end - -setmetatable(patterns, { - __index = function(t,k) - local v = require(format("v-%s.lua",k)) or false - context.input(format("v-%s.mkiv",k)) - t[k] = v - return v - end -} ) - -local function visualizestring(method,content) - if content and content ~= "" then - lpegmatch(patterns[method],content) - end -end - -visualizers.visualizestring = visualizestring - -function visualizers.visualizefile(method,name) - visualizestring(method,resolvers.loadtexfile(name)) -end - -function visualizers.visualizebuffer(method,name) - lpegmatch(method,buffers.content(name)) -end - -local visualizer = { - start = function() context.startSnippet() end, - stop = function() context.stopSnippet() end , - default = context.verbatim, -} - -local patterns = lpeg.patterns -local pattern = visualizers.pattern - -local texvisualizer = P { "process", - process = - V("start") * V("content") * V("stop"), - start = - pattern(visualizer,"start",patterns.beginofstring), - stop = - pattern(visualizer,"stop",patterns.endofstring), - content = ( - pattern(visualizer,"default",patterns.anything) - )^1 -} - -return texvisualizer diff --git a/tex/context/base/buff-vis.mkiv b/tex/context/base/buff-vis.mkiv deleted file mode 100644 index 7739dc21f..000000000 --- a/tex/context/base/buff-vis.mkiv +++ /dev/null @@ -1,24 +0,0 @@ -%D \module -%D [ file=buff-vis, -%D version=2010.10.19, % replaces old visualizers as we now have lpeg -%D title=\CONTEXT\ Buffer Macros, -%D subtitle=Visualizers, -%D author=Hans Hagen, -%D date=\currentdate, -%D copyright=PRAGMA ADE] -%C -%C This module is part of the \CONTEXT\ macro||package and is -%C therefore copyrighted by \PRAGMA. See mreadme.pdf for -%C details. - -\writestatus{loading}{ConTeXt Buffer Macros / Visualizers} - -\registerctxluafile{buff-vis}{1.001} - -\unprotect - -\definestartstop - [Snippet] - [\c!style=\tt] - -\protect \endinput diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua index 87ed861ed..27e87d29c 100644 --- a/tex/context/base/char-utf.lua +++ b/tex/context/base/char-utf.lua @@ -43,9 +43,6 @@ local filters = characters.filters filters.utf = filters.utf or { } local utffilters = characters.filters.utf -utffilters.collapsing = true -utffilters.expanding = true - --[[ldx--

It only makes sense to collapse at runtime, since we don't expect source code to depend on collapsing.

@@ -169,7 +166,7 @@ not collecting tokens is not only faster but also saves garbage collecting. -- lpeg variant is not faster function utffilters.collapse(str) -- not really tested (we could preallocate a table) - if utffilters.collapsing and str then + if str and str ~= "" then local nstr = #str if nstr > 1 then if initialize then -- saves a call @@ -248,6 +245,9 @@ function utffilters.collapse(str) -- not really tested (we could preallocate a t return str end +utilities.sequencers.appendaction (resolvers.openers.textfileactions,"system","characters.filters.utf.collapse") +utilities.sequencers.disableaction(resolvers.openers.textfileactions,"characters.filters.utf.collapse") + --[[ldx--

Next we implement some commands that are used in the user interface.

--ldx]]-- diff --git a/tex/context/base/char-utf.mkiv b/tex/context/base/char-utf.mkiv index 8992b098e..b59d2f569 100644 --- a/tex/context/base/char-utf.mkiv +++ b/tex/context/base/char-utf.mkiv @@ -28,11 +28,11 @@ %D since the source files are rather simple, we postpone the %D initialization till runtime. +% resolvers.filters.install('utf',characters.filters.utf.collapse) + \appendtoks - \ctxlua { - characters.filters.utf.collapsing = true - resolvers.filters.install('utf',characters.filters.utf.collapse) - }% + \ctxlua{utilities.sequencers.enableaction + (resolvers.openers.textfileactions,"characters.filters.utf.collapse")}% \to \everyjob %D The next one influences input parsing. diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua index 5ab1d8c8d..9d03a450a 100644 --- a/tex/context/base/cldf-com.lua +++ b/tex/context/base/cldf-com.lua @@ -48,3 +48,33 @@ function context.concat(t,separator) end end end + +function context.char(k) -- todo: if catcode == letter or other then just the utf + if type(k) == "table" then + for i=1,#k do + context(format([[\char%s\relax]],k[i])) + end + elseif k then + context(format([[\char%s\relax]],k)) + end +end + +function context.utfchar(k) + context(utfchar(k)) +end + +function context.chardef(cs,u) + context(format([[\chardef\%s=%s\relax]],k)) +end + +function context.par() + context([[\par]]) -- no need to add {} there +end + +function context.bgroup() + context("{") +end + +function context.egroup() + context("}") +end diff --git a/tex/context/base/cldf-ver.lua b/tex/context/base/cldf-ver.lua index 8607cb578..120bc468a 100644 --- a/tex/context/base/cldf-ver.lua +++ b/tex/context/base/cldf-ver.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['cldf-ver'] = { -- tex.print == newline local concat, tohandle = table.concat, table.tohandle -local splitlines = string.splitlines +local find, splitlines = string.find, string.splitlines local tostring, type = tostring, type local context = context diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv index 406cc619d..af3c1ebf4 100644 --- a/tex/context/base/colo-ini.mkiv +++ b/tex/context/base/colo-ini.mkiv @@ -140,7 +140,7 @@ %D \showsetup{color} %D \showsetup{graycolor} - \def\switchtocolor [#1]{\getvalue{#1}} + \def\switchtocolor [#1]{\csname#1\endcsname} \unexpanded\def\color [#1]{\groupedcommand{\doactivatecolor{#1}}{}} \unexpanded\def\startcolor [#1]{\begingroup\doactivatecolor{#1}} \unexpanded\def\stopcolor {\endgroup} @@ -149,10 +149,13 @@ \unexpanded\def\fastcolored [#1]#2{\begingroup\dodefinefastcolor[@colored@][#1]\doactivatecolor{@colored@}#2\endgroup} \def\predefinecolor [#1]{\flushatshipout{\hbox{\color[#1]{}}}} \def\predefineindexcolor[#1]{\flushatshipout{\hbox{\color[#1]{}}}} - \unexpanded\def\startcolorpage {\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]} - \unexpanded\def\stopcolorpage {\stopcolor} - \unexpanded\def\startraster [#1]{\dosetrastercolor{#1}} - \unexpanded\def\stopraster {} + +% some of this will go away + +\unexpanded\def\startcolorpage {\startcolor[\ifx\maintextcolor\empty\defaulttextcolor\else\maintextcolor\fi]} +\unexpanded\def\stopcolorpage {\stopcolor} +\unexpanded\def\startraster [#1]{\dosetrastercolor{#1}} +\unexpanded\def\stopraster {} \def\raster [#1]{\groupedcommand{\dosetrastercolor{#1}}{}} \def\faststartcolor [#1]{\doactivatecolor{#1}} \def\faststopcolor {} diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index e400dfaa0..9e97001b7 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -39,10 +39,10 @@ % for a while as these can be used in user modules -\startluacode - jobregisters = logs.obsolete("jobregisters", "structures.registers") - buffers.finish_state = logs.obsolete("buffers.finish_state","buffers.finishstate") - buffers.change_state = logs.obsolete("buffers.change_state","buffers.finishstate") -\stopluacode +% \startluacode +% jobregisters = logs.obsolete("jobregisters", "structures.registers") +% buffers.finish_state = logs.obsolete("buffers.finish_state","buffers.finishstate") +% buffers.change_state = logs.obsolete("buffers.change_state","buffers.finishstate") +% \stopluacode \protect \endinput diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex index f024ae024..42d506e11 100644 --- a/tex/context/base/cont-new.tex +++ b/tex/context/base/cont-new.tex @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2010.11.20 12:51} +\newcontextversion{2010.11.26 21:21} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index c888140b0..4e790adad 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -232,7 +232,17 @@ \loadmarkfile{buff-ini} \loadmarkfile{buff-ver} -\loadmarkfile{buff-vis} + +\loadmarkfile{v-default} % preloaded as otherwise spurious spaces inline due to loading +\loadmarkfile{v-nested} % preloaded as otherwise spurious spaces inline due to loading +\loadmarkfile{v-escaped} % for completeness +\loadmarkfile{v-tex} % optional as also runtime if not loaded +\loadmarkfile{v-mp} % optional as also runtime if not loaded +\loadmarkfile{v-lua} % optional as also runtime if not loaded +\loadmarkfile{v-xml} % optional as also runtime if not loaded + +\loadmarkfile{v-parsed-xml} % optional +\loadmarkfile{v-parsed-lua} % optional \loadmarkfile{strc-blk} diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex index e88fbee62..11e0e02e1 100644 --- a/tex/context/base/context.tex +++ b/tex/context/base/context.tex @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2010.11.20 12:51} +\edef\contextversion{2010.11.26 21:21} %D For those who want to use this: diff --git a/tex/context/base/core-sys.mkiv b/tex/context/base/core-sys.mkiv index f5bb45e35..cdca352bd 100644 --- a/tex/context/base/core-sys.mkiv +++ b/tex/context/base/core-sys.mkiv @@ -194,48 +194,79 @@ \definecomplexorsimple\start \definecomplexorsimple\stop -\def\dododefinestartstop[#1][#2]% todo: use indirect commands - {\getparameters - [\??be#1] - [\c!before=, - \c!after=, - \c!inbetween=, - \c!commands=, - \c!style=, - #2]% - \setuvalue{#1}% - {\groupedcommand - {\getvalue{\??be#1\c!commands}% - \dostarttagged\t!construct{#1}% - \dostartattributes{\??be#1}\c!style\c!color} - {\dostopattributes - \dostoptagged - \getvalue{\??be#1\c!inbetween}}}% - \setvalue{\e!start#1}% - {\getvalue{\??be#1\c!before}% - \bgroup - \getvalue{\??be#1\c!commands}% - \dostarttagged\t!construct{#1}% - \dostartattributes{\??be#1}\c!style\c!color\empty}% - \setvalue{\e!stop#1}% - {\dostopattributes - \dostoptagged - \egroup - \getvalue{\??be#1\c!after}}} +% \def\dododefinestartstop[#1][#2]% todo: use indirect commands +% {\getparameters +% [\??be#1] +% [\c!before=, +% \c!after=, +% \c!inbetween=, +% \c!commands=, +% \c!style=, +% #2]% +% \setuvalue{#1}% +% {\groupedcommand +% {\getvalue{\??be#1\c!commands}% +% \dostarttagged\t!construct{#1}% +% \dostartattributes{\??be#1}\c!style\c!color} +% {\dostopattributes +% \dostoptagged +% \getvalue{\??be#1\c!inbetween}}}% +% \setuvalue{\e!start#1}% +% {\getvalue{\??be#1\c!before}% +% \bgroup +% \getvalue{\??be#1\c!commands}% +% \dostarttagged\t!construct{#1}% +% \dostartattributes{\??be#1}\c!style\c!color\empty}% +% \setuvalue{\e!stop#1}% +% {\dostopattributes +% \dostoptagged +% \egroup +% \getvalue{\??be#1\c!after}}} +% +% \def\dodefinestartstop[#1][#2]% +% {\def\docommand##1{\dododefinestartstop[##1][#2]}% +% \processcommalist[#1]\docommand} +% +% \unexpanded\def\definestartstop +% {\dodoubleargument\dodefinestartstop} +% +% \def\dosetupstartstop[#1][#2]% +% {\def\docommand##1{\getparameters[\??be##1][#2]}% +% \processcommalist[#1]\docommand} +% +% \unexpanded\def\setupstartstop +% {\dodoubleargument\dosetupstartstop} + -\def\dodefinestartstop[#1][#2]% - {\def\docommand##1{\dododefinestartstop[##1][#2]}% - \processcommalist[#1]\docommand} +% \c!before \c!after \c!inbetween \c!commands \c!style \c!color -\unexpanded\def\definestartstop - {\dodoubleargument\dodefinestartstop} +\installcommandhandler{\??be}{startstop}{\??be} -\def\dosetupstartstop[#1][#2]% - {\def\docommand##1{\getparameters[\??be##1][#2]}% - \processcommalist[#1]\docommand} +\appendtoks + \normalexpanded{\dodefinestartstop{\currentstartstop}}% +\to \everydefinestartstop -\unexpanded\def\setupstartstop - {\dodoubleargument\dosetupstartstop} +\unexpanded\def\dodefinestartstop#1% + {\setuvalue{#1}% + {\groupedcommand + {\def\currentstartstop{#1}% + \startstopparameter\c!commands + \dostarttagged\t!construct\currentstartstop + \dosetstartstopattributes\c!style\c!color} + {\def\currentstartstop{#1}% + \dostoptagged + \startstopparameter\c!inbetween}}% + \setuvalue{\e!start#1}% + {\namedstartstopparameter{#1}\c!before + \bgroup + \def\currentstartstop{#1}% + \startstopparameter\c!commands + \dostarttagged\t!construct\currentstartstop + \dosetstartstopattributes\c!style\c!color}% + \setuvalue{\e!stop#1}% + {\dostoptagged + \egroup + \namedstartstopparameter{#1}\c!after}} % \docommand kan niet worden gebruikt omdat deze macro % soms lokaal wordt gebruikt diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua index c6035d799..d2c38262d 100644 --- a/tex/context/base/data-env.lua +++ b/tex/context/base/data-env.lua @@ -18,7 +18,7 @@ local suffixes = allocate() resolvers.suffixes = suffixes local dangerous = allocate() resolvers.dangerous = dangerous local suffixmap = allocate() resolvers.suffixmap = suffixmap -local relations = allocate { +local relations = allocate { -- todo: handlers also here core = { ofm = { names = { "ofm", "omega font metric", "omega font metrics" }, @@ -88,7 +88,7 @@ local relations = allocate { tex = { names = { "tex" }, variable = 'TEXINPUTS', - suffixes = { 'tex', "mkiv", "mkii" }, + suffixes = { 'tex', "mkiv", "mkiv", "mkii" }, }, icc = { names = { "icc", "icc profile", "icc profiles" }, @@ -202,29 +202,33 @@ resolvers.relations = relations -- formats: maps a format onto a variable -for category, categories in next, relations do - for name, relation in next, categories do - local rn = relation.names - local rv = relation.variable - local rs = relation.suffixes - if rn and rv then - for i=1,#rn do - local rni = lower(gsub(rn[i]," ","")) - formats[rni] = rv - if rs then - suffixes[rni] = rs - for i=1,#rs do - local rsi = rs[i] - suffixmap[rsi] = rni +function resolvers.updaterelations() + for category, categories in next, relations do + for name, relation in next, categories do + local rn = relation.names + local rv = relation.variable + local rs = relation.suffixes + if rn and rv then + for i=1,#rn do + local rni = lower(gsub(rn[i]," ","")) + formats[rni] = rv + if rs then + suffixes[rni] = rs + for i=1,#rs do + local rsi = rs[i] + suffixmap[rsi] = rni + end end end end - end - if rs then + if rs then + end end end end +resolvers.updaterelations() -- push this in the metatable -> newindex + local function simplified(t,k) return rawget(t,lower(gsub(k," ",""))) end diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua index b51c8b57b..c9c3cde5e 100644 --- a/tex/context/base/data-met.lua +++ b/tex/context/base/data-met.lua @@ -21,7 +21,7 @@ resolvers.locators = allocate { notfound = { nil } } -- locate databases resolvers.hashers = allocate { notfound = { nil } } -- load databases resolvers.generators = allocate { notfound = { nil } } -- generate databases -function resolvers.splitmethod(filename) +function resolvers.splitmethod(filename) -- todo: trigger by suffix if not filename then return { } -- safeguard elseif type(filename) == "table" then @@ -40,10 +40,13 @@ function resolvers.methodhandler(what, filename, filetype) -- ... local resolver = resolvers[what] if resolver[scheme] then if trace_locating then - report_resolvers("handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) + report_resolvers("using special handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) end return resolver[scheme](filename,filetype) else + if trace_locating then + report_resolvers("no handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification)) + end return resolver.tex(filename,filetype) -- todo: specification end end diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua index c8d209798..655cd8209 100644 --- a/tex/context/base/data-sch.lua +++ b/tex/context/base/data-sch.lua @@ -23,7 +23,7 @@ schemes.threshold = 24 * 60 * 60 directives.register("schemes.threshold", function(v) schemes.threshold = tonumber(v) or schemes.threshold end) -local cached, loaded, reused = { }, { }, { } +local cached, loaded, reused, thresholds = { }, { }, { }, { } function schemes.curl(name,cachename) -- will use sockets instead or the curl library local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://" @@ -35,7 +35,8 @@ function schemes.fetch(protocol,name,handler) local cachename = caches.setfirstwritablefile(cleanname,"schemes") if not cached[name] then statistics.starttiming(schemes) - if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > schemes.threshold) then + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > + (thresholds[protocol] or schemes.threshold)) then cached[name] = cachename if handler then if trace_schemes then @@ -86,14 +87,15 @@ function loaders.schemes(protocol,filename) return loaders.generic(protocol,filename) end --- could be metatable +-- could be metatable and proper subtables -function schemes.install(protocol,handler) - loaded [protocol] = 0 - reused [protocol] = 0 - finders[protocol] = function (filename,filetype) return finders.schemes(protocol,filename,handler) end - openers[protocol] = function (filename) return openers.schemes(protocol,filename) end - loaders[protocol] = function (filename) return loaders.schemes(protocol,filename) end +function schemes.install(protocol,handler,threshold) + loaded [protocol] = 0 + reused [protocol] = 0 + finders [protocol] = function (filename,filetype) return finders.schemes(protocol,filename,handler) end + openers [protocol] = function (filename) return openers.schemes(protocol,filename) end + loaders [protocol] = function (filename) return loaders.schemes(protocol,filename) end + thresholds[protocol] = threshold or schemes.threshold end local function http_handler(protocol,name,cachename) @@ -109,6 +111,7 @@ local function http_handler(protocol,name,cachename) os.remove(cachename) os.rename(tempname,cachename) end + return cachename end schemes.install('http',http_handler) diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua index bd6eb0599..3c42882fd 100644 --- a/tex/context/base/data-tex.lua +++ b/tex/context/base/data-tex.lua @@ -36,125 +36,216 @@ function finders.generic(tag,filename,filetype) end end ---~ local lpegmatch = lpeg.match ---~ local getlines = lpeg.Ct(lpeg.patterns.textline) +-- -- keep this one as reference as it's the first version +-- +-- resolvers.filters = resolvers.filters or { } +-- +-- local input_translator, utf_translator, user_translator = nil, nil, nil +-- +-- function resolvers.filters.install(name,func) +-- if name == "input" then input_translator = func +-- elseif name == "utf" then utf_translator = func +-- elseif name == "user" then user_translator = func end +-- end +-- +-- function openers.textopener(filename,file_handle,tag) +-- local u = unicode.utftype(file_handle) +-- local t = { } +-- if u > 0 then +-- if trace_locating then +-- report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u]) +-- end +-- local l +-- local data = file_handle:read("*a") +-- if u > 2 then +-- l = unicode.utf32_to_utf8(data,u==4) +-- elseif u > 1 then +-- l = unicode.utf16_to_utf8(data,u==2) +-- else +-- l = string.splitlines(data) +-- end +-- file_handle:close() +-- t = { +-- utftype = u, -- may go away +-- lines = l, +-- current = 0, -- line number, not really needed +-- handle = nil, +-- noflines = #l, +-- close = function() +-- if trace_locating then +-- report_resolvers("%s closer, file '%s' closed",tag,filename) +-- end +-- logs.show_close(filename) +-- t = nil +-- end, +-- reader = function(self) +-- self = self or t +-- local current, lines = self.current, self.lines +-- if current >= #lines then +-- return nil +-- else +-- current = current + 1 +-- self.current = current +-- local line = lines[current] +-- if not line then +-- return nil +-- elseif line == "" then +-- return "" +-- else +-- if input_translator then +-- line = input_translator(line) +-- end +-- if utf_translator then +-- line = utf_translator(line) +-- end +-- if user_translator then +-- line = user_translator(line) +-- end +-- return line +-- end +-- end +-- end +-- } +-- else +-- if trace_locating then +-- report_resolvers("%s opener, file '%s' opened",tag,filename) +-- end +-- -- todo: file;name -> freeze / eerste regel scannen -> freeze +-- --~ local data = lpegmatch(getlines,file_handle:read("*a")) +-- --~ local n = 0 +-- t = { +-- reader = function() -- self +-- local line = file_handle:read() +-- --~ n = n + 1 +-- --~ local line = data[n] +-- --~ print(line) +-- if not line then +-- return nil +-- elseif line == "" then +-- return "" +-- else +-- if input_translator then +-- line = input_translator(line) +-- end +-- if utf_translator then +-- line = utf_translator(line) +-- end +-- if user_translator then +-- line = user_translator(line) +-- end +-- return line +-- end +-- end, +-- close = function() +-- if trace_locating then +-- report_resolvers("%s closer, file '%s' closed",tag,filename) +-- end +-- logs.show_close(filename) +-- file_handle:close() +-- t = nil +-- collectgarbage("step") -- saves some memory, maybe checkgarbage but no # +-- end, +-- handle = function() +-- return file_handle +-- end, +-- noflines = function() +-- t.noflines = io.noflines(file_handle) +-- return t.noflines +-- end +-- } +-- end +-- return t +-- end -resolvers.filters = resolvers.filters or { } -local input_translator, utf_translator, user_translator = nil, nil, nil +-- the main text reader -- -function resolvers.filters.install(name,func) - if name == "input" then input_translator = func - elseif name == "utf" then utf_translator = func - elseif name == "user" then user_translator = func end -end +local sequencers = utilities.sequencers + +local fileprocessor = nil +local lineprocessor = nil + +local textfileactions = sequencers.reset { + arguments = "str,filename", + returnvalues = "str", + results = "str", +} + +local textlineactions = sequencers.reset { + arguments = "str,filename,linenumber", + returnvalues = "str", + results = "str", +} + +openers.textfileactions = textfileactions +openers.textlineactions = textlineactions + +sequencers.appendgroup(textfileactions,"system") +sequencers.appendgroup(textfileactions,"user") + +sequencers.appendgroup(textlineactions,"system") +sequencers.appendgroup(textlineactions,"user") function openers.textopener(filename,file_handle,tag) - local u = unicode.utftype(file_handle) - local t = { } - if u > 0 then - if trace_locating then - report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u]) - end - local l - if u > 2 then - l = unicode.utf32_to_utf8(file_handle:read("*a"),u==4) - else - l = unicode.utf16_to_utf8(file_handle:read("*a"),u==2) - end - file_handle:close() - t = { - utftype = u, -- may go away - lines = l, - current = 0, -- line number, not really needed - handle = nil, - noflines = #l, - close = function() - if trace_locating then - report_resolvers("%s closer, file '%s' closed",tag,filename) - end - logs.show_close(filename) - t = nil - end, - reader = function(self) - self = self or t - local current, lines = self.current, self.lines - if current >= #lines then - return nil - else - current = current + 1 - self.current = current - local line = lines[current] - if not line then - return nil - elseif line == "" then - return "" - else - if input_translator then - line = input_translator(line) - end - if utf_translator then - line = utf_translator(line) - end - if user_translator then - line = user_translator(line) - end - return line - end - end + if trace_locating then + report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u]) + end + if textfileactions.dirty then + fileprocessor = sequencers.compile(textfileactions) + end + local lines = io.loaddata(filename) + local kind = unicode.filetype(lines) + if kind == "utf-16-be" then + lines = unicode.utf16_to_utf8_be(lines) + elseif kind == "utf-16-le" then + lines = unicode.utf16_to_utf8_le(lines) + elseif kind == "utf-32-be" then + lines = unicode.utf32_to_utf8_be(lines) + elseif kind == "utf-32-le" then + lines = unicode.utf32_to_utf8_le(lines) + else -- utf8 or unknown + lines = fileprocessor(lines,filename) or lines + lines = string.splitlines(lines) + end + local t = { + lines = lines, + current = 0, + handle = nil, + noflines = #lines, + close = function() + if trace_locating then + report_resolvers("%s closer, file '%s' closed",tag,filename) end - } - else - if trace_locating then - report_resolvers("%s opener, file '%s' opened",tag,filename) - end - -- todo: file;name -> freeze / eerste regel scannen -> freeze - --~ local data = lpegmatch(getlines,file_handle:read("*a")) - --~ local n = 0 - t = { - reader = function() -- self - local line = file_handle:read() - --~ n = n + 1 - --~ local line = data[n] - --~ print(line) + logs.show_close(filename) + t = nil + end, + reader = function(self) + self = self or t + local current, noflines = self.current, self.noflines + if current >= noflines then + return nil + else + current = current + 1 + self.current = current + local line = lines[current] if not line then return nil elseif line == "" then return "" else - if input_translator then - line = input_translator(line) - end - if utf_translator then - line = utf_translator(line) + if textlineactions.dirty then + lineprocessor = sequencers.compile(textlineactions) end - if user_translator then - line = user_translator(line) - end - return line - end - end, - close = function() - if trace_locating then - report_resolvers("%s closer, file '%s' closed",tag,filename) + return lineprocessor(line,filename,current) or line end - logs.show_close(filename) - file_handle:close() - t = nil - collectgarbage("step") -- saves some memory, maybe checkgarbage but no # - end, - handle = function() - return file_handle - end, - noflines = function() - t.noflines = io.noflines(file_handle) - return t.noflines end - } - end + end + } return t end +-- -- -- + function openers.generic(tag,filename) if filename and filename ~= "" then local f = io.open(filename,"r") @@ -226,6 +317,7 @@ function resolvers.openfile(filename) end function resolvers.loadtexfile(filename, filetype) + -- todo: apply filters local ok, data, size = resolvers.loadbinfile(filename, filetype) return data or "" end diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv index b362b35a0..3a479497b 100644 --- a/tex/context/base/font-ini.mkiv +++ b/tex/context/base/font-ini.mkiv @@ -3206,12 +3206,37 @@ \getvalue\emphasizedtypeface \fi\fi\fi} -\def\emphasistypeface{\doemphasistypeface\sl\it} -\def\emphasisboldface{\doemphasistypeface\bs\bi} +\unexpanded\def\emphasistypeface{\doemphasistypeface\sl\it} +\unexpanded\def\emphasisboldface{\doemphasistypeface\bs\bi} + +\unexpanded\def\normalboldface + {\relax\ifx\fontalternative\c!it \bi + \else \ifx\fontalternative\c!sl \bs + \else \bf \fi\fi} + +\unexpanded\def\normaltypeface + {\relax + \ifx\fontalternative\c!bi \it \else + \ifx\fontalternative\c!bs \sl \else + \tf \fi\fi} + +\let\typeface\normaltypeface +\let\boldface\normalboldface + +\unexpanded\def\swaptypeface + {\relax + \ifx\fontalternative\c!it \tf \else + \ifx\fontalternative\c!sl \tf \else + \ifx\fontalternative\c!bf \emphasisboldface \else + \ifx\fontalternative\c!bs \bf \else + \ifx\fontalternative\c!bi \bf \else + \emphasistypeface \fi\fi\fi\fi\fi} %D To be set with the default body font environment: \type %D {em} being \type {slanted} or \type {italic}. +% maybe a \csname...\fontalternative\endcsname + \newconditional\emneeded \newtoks\everyemphasized @@ -3238,8 +3263,7 @@ \emphasistypeface \fi\fi\fi\fi\fi \the\everyemphasized - \ifconditional\emneeded\relax - \else + \ifconditional\emneeded\else \expandafter\aftergroup \fi \emphasiscorrection} @@ -3250,7 +3274,7 @@ % \setupbodyfontenvironment [default] [em={\italic\color[red]}] %D The next feature was not present in previous versions. It -%D takes care of \type {\em \bf ...} sitiations. +%D takes care of \type {\em \bf ...} situations. \def\setemphasisboldface {\let\savedemphasisboldface\bf @@ -3274,10 +3298,10 @@ \let\italiccorrection=\/ % tex primitive -\def\emphasiscorrection - {\ifhmode - \expandafter\emphasislook - \fi} +\def\emphasiscorrection % not in raw alignment groups, else omit problem + {\ifhmode\ifnum\currentgrouptype=\@@aligngroup\else + \expandafter\expandafter\expandafter\emphasislook + \fi\fi} \def\emphasislook {\begingroup diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index 41169618f..29fed2944 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -239,7 +239,7 @@ local contextsetups = specifiers.contextsetups local contextnumbers = specifiers.contextnumbers local contextmerged = specifiers.contextmerged --- we cannot optimize with "start = first_character(head)" because then we don't +-- we cannot optimize with "start = first_glyph(head)" because then we don't -- know which rlmode we're in which messes up cursive handling later on -- -- head is always a whatsit so we can safely assume that head is not changed diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index a0dbd4f7e..c6f7b0393 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -57,7 +57,7 @@ names.cache = containers.define("fonts","data",names.version,true)

A few helpers.

--ldx]]-- -local P, C, Cc, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg +local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs -- what to do with 'thin' diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index ff49101eb..940652ee3 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -10,6 +10,9 @@ local lpeg = require("lpeg") local type = type +-- Beware, we predefine a bunch of patterns here and one reason for doing so +-- is that we get consistent behaviour in some of the visualizers. + lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns @@ -26,19 +29,38 @@ local alwaysmatched = P(true) patterns.anything = anything patterns.endofstring = endofstring patterns.beginofstring = alwaysmatched +patterns.alwaysmatched = alwaysmatched local digit, sign = R('09'), S('+-') local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local newline = crlf + cr + lf local utf8next = R("\128\191") local escaped = P("\\") * anything local squote = P("'") local dquote = P('"') +local space = P(" ") + +patterns.somecontent = (anything - newline - space)^1 +patterns.beginline = #(1-newline) + +local utfbom_32_be = P('\000\000\254\255') +local utfbom_32_le = P('\255\254\000\000') +local utfbom_16_be = P('\255\254') +local utfbom_16_le = P('\254\255') +local utfbom_8 = P('\239\187\191') +local utfbom = utfbom_32_be + utfbom_32_le + + utfbom_16_be + utfbom_16_le + + utfbom_8 +local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le" + + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le" + + utfbom_8 / "utf-8" + alwaysmatched / "unknown" patterns.utf8one = R("\000\127") patterns.utf8two = R("\194\223") * utf8next patterns.utf8three = R("\224\239") * utf8next * utf8next patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next -patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') +patterns.utfbom = utfbom +patterns.utftype = utftype local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false) @@ -64,24 +86,30 @@ patterns.hexadecimal = P("0x") * R("09","AF","af")^1 patterns.lowercase = R("az") patterns.uppercase = R("AZ") patterns.letter = patterns.lowercase + patterns.uppercase -patterns.space = P(" ") +patterns.space = space patterns.tab = P("\t") patterns.spaceortab = patterns.space + patterns.tab patterns.eol = S("\n\r") patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -patterns.newline = crlf + cr + lf -patterns.nonspace = 1 - patterns.space +patterns.newline = newline +patterns.emptyline = newline^1 patterns.nonspacer = 1 - patterns.spacer patterns.whitespace = patterns.eol + patterns.spacer patterns.nonwhitespace = 1 - patterns.whitespace +patterns.equal = P("=") patterns.comma = P(",") patterns.commaspacer = P(",") * patterns.spacer^0 patterns.period = P(".") +patterns.colon = P(":") +patterns.semicolon = P(";") +patterns.underscore = P("_") patterns.escaped = escaped patterns.squote = squote patterns.dquote = dquote -patterns.undouble = (dquote/"") * ((escaped + (1-dquote))^0) * (dquote/"") -patterns.unsingle = (squote/"") * ((escaped + (1-squote))^0) * (squote/"") +patterns.nosquote = (escaped + (1-squote))^0 +patterns.nodquote = (escaped + (1-dquote))^0 +patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") +patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble patterns.unspacer = ((patterns.spacer^1)/"")^0 @@ -104,19 +132,6 @@ function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end -local spacing = patterns.spacer^0 * patterns.newline -- sort of strip -local empty = spacing * Cc("") -local nonempty = Cs((1-spacing)^1) * spacing^-1 -local content = (empty + nonempty)^1 - -local capture = Ct(content^0) - -function string.splitlines(str) - return match(capture,str) -end - -patterns.textline = content - local splitters_s, splitters_m = { }, { } local function splitat(separator,single) @@ -163,6 +178,35 @@ function string.split(str,separator) return match(c,str) end +local spacing = patterns.spacer^0 * newline -- sort of strip +local empty = spacing * Cc("") +local nonempty = Cs((1-spacing)^1) * spacing^-1 +local content = (empty + nonempty)^1 + +patterns.textline = content + +--~ local linesplitter = Ct(content^0) +--~ +--~ function string.splitlines(str) +--~ return match(linesplitter,str) +--~ end + +local linesplitter = Ct(splitat(newline)) + +patterns.linesplitter = linesplitter + +function string.splitlines(str) + return match(linesplitter,str) +end + +local utflinesplitter = utfbom^-1 * Ct(splitat(newline)) + +patterns.utflinesplitter = utflinesplitter + +function string.utfsplitlines(str) + return match(utflinesplitter,str) +end + --~ lpeg.splitters = cache -- no longer public local cache = { } diff --git a/tex/context/base/l-pdfview.lua b/tex/context/base/l-pdfview.lua index c16df036c..85545cd10 100644 --- a/tex/context/base/l-pdfview.lua +++ b/tex/context/base/l-pdfview.lua @@ -26,7 +26,8 @@ local allcalls = { } if os.type == "windows" then - opencalls['okular'] = 'start "test" "c:/program files/kde/bin/okular.exe" --unique' -- todo: get focus +--~ opencalls['okular'] = 'start "test" "c:/program files/kde/bin/okular.exe" --unique' -- todo: get focus + opencalls['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique' -- todo: get focus else opencalls['okular'] = 'okular --unique' end diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua index a97f01d1e..445909d88 100644 --- a/tex/context/base/l-unicode.lua +++ b/tex/context/base/l-unicode.lua @@ -36,6 +36,8 @@ utf = utf or unicode.utf8 local concat, utfchar, utfgsub = table.concat, utf.char, utf.gsub local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format +local utfsplitlines = string.utfsplitlines + -- 0 EF BB BF UTF-8 -- 1 FF FE UTF-16-little-endian -- 2 FE FF UTF-16-big-endian @@ -80,111 +82,234 @@ function unicode.utftype(f) end end -function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg - local result, tmp, n, m, p, r, t = { }, { }, 0, 0, 0, 0, 0 -- we reuse tmp - -- lf | cr | crlf / (cr:13, lf:10) - local function doit() - if n == 10 then - if p ~= 13 then - if t > 0 then +--~ function unicode.utf16_to_utf8(str, endian) -- maybe a gsub is faster or an lpeg +--~ local result, tmp, n, m, p, r, t = { }, { }, 0, 0, 0, 0, 0 -- we reuse tmp +--~ -- lf | cr | crlf / (cr:13, lf:10) +--~ local function doit() -- inline this +--~ if n == 10 then +--~ if p ~= 13 then +--~ if t > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) +--~ t = 0 +--~ end +--~ p = 0 +--~ end +--~ elseif n == 13 then +--~ if t > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) +--~ t = 0 +--~ end +--~ p = n +--~ else +--~ t = t + 1 +--~ tmp[t] = utfchar(n) +--~ p = 0 +--~ end +--~ end +--~ for l,r in bytepairs(str) do +--~ if r then +--~ if endian then -- maybe make two loops +--~ n = 256*l + r +--~ else +--~ n = 256*r + l +--~ end +--~ if m > 0 then +--~ n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000 +--~ m = 0 +--~ doit() +--~ elseif n >= 0xD800 and n <= 0xDBFF then +--~ m = n +--~ else +--~ doit() +--~ end +--~ end +--~ end +--~ if t > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) -- we reused tmp, hence t +--~ end +--~ return result +--~ end + +--~ function unicode.utf32_to_utf8(str, endian) +--~ local result, tmp, n, m, p, r, t = { }, { }, 0, -1, 0, 0, 0 +--~ -- lf | cr | crlf / (cr:13, lf:10) +--~ local function doit() -- inline this +--~ if n == 10 then +--~ if p ~= 13 then +--~ if t > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) +--~ t = 0 +--~ end +--~ p = 0 +--~ end +--~ elseif n == 13 then +--~ if t > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) +--~ t = 0 +--~ end +--~ p = n +--~ else +--~ t = t + 1 +--~ tmp[t] = utfchar(n) +--~ p = 0 +--~ end +--~ end +--~ for a,b in bytepairs(str) do +--~ if a and b then +--~ if m < 0 then +--~ if endian then -- maybe make two loops +--~ m = 256*256*256*a + 256*256*b +--~ else +--~ m = 256*b + a +--~ end +--~ else +--~ if endian then -- maybe make two loops +--~ n = m + 256*a + b +--~ else +--~ n = m + 256*256*256*b + 256*256*a +--~ end +--~ m = -1 +--~ doit() +--~ end +--~ else +--~ break +--~ end +--~ end +--~ if #tmp > 0 then +--~ r = r + 1 +--~ result[r] = concat(tmp,"",1,t) -- we reused tmp, hence t +--~ end +--~ return result +--~ end + +local function utf16_to_utf8_be(t) + if type(t) == "string" then + t = utfsplitlines(str) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*left + right + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else r = r + 1 - result[r] = concat(tmp,"",1,t) - t = 0 + result[r] = utfchar(now) end - p = 0 - end - elseif n == 13 then - if t > 0 then - r = r + 1 - result[r] = concat(tmp,"",1,t) - t = 0 end - p = n - else - t = t + 1 - tmp[t] = utfchar(n) - p = 0 end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t end - for l,r in bytepairs(str) do - if r then - if endian then - n = 256*l + r - else - n = 256*r + l - end - if m > 0 then - n = (m-0xD800)*0x400 + (n-0xDC00) + 0x10000 - m = 0 - doit() - elseif n >= 0xD800 and n <= 0xDBFF then - m = n - else - doit() + return t +end + +local function utf16_to_utf8_le(t) + if type(t) == "string" then + t = utfsplitlines(str) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, 0 + for left, right in bytepairs(t[i]) do + if right then + local now = 256*right + left + if more > 0 then + now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 + more = 0 + r = r + 1 + result[r] = utfchar(now) + elseif now >= 0xD800 and now <= 0xDBFF then + more = now + else + r = r + 1 + result[r] = utfchar(now) + end end end + t[i] = concat(result,"",1,r) -- we reused tmp, hence t end - if t > 0 then - r = r + 1 - result[r] = concat(tmp,"",1,t) - end - return result + return t end -function unicode.utf32_to_utf8(str, endian) - local result, tmp, n, m, p, r, t = { }, { }, 0, -1, 0, 0, 0 - -- lf | cr | crlf / (cr:13, lf:10) - local function doit() - if n == 10 then - if p ~= 13 then - if t > 0 then +local function utf32_to_utf8_be(str) + if type(t) == "string" then + t = utfsplitlines(str) + end + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(str) do + if a and b then + if more < 0 then + more = 256*256*256*a + 256*256*b + else r = r + 1 - result[r] = concat(tmp,"",1,t) - t = 0 + result[t] = utfchar(more + 256*a + b) + more = -1 end - p = 0 - end - elseif n == 13 then - if t > 0 then - r = r + 1 - result[r] = concat(tmp,"",1,t) - t = 0 + else + break end - p = n - else - t = t + 1 - tmp[t] = utfchar(n) - p = 0 end + t[i] = concat(result,"",1,r) + end + return result +end + +local function utf32_to_utf8_le(str) + if type(t) == "string" then + t = utfsplitlines(str) end - for a,b in bytepairs(str) do - if a and b then - if m < 0 then - if endian then - m = 256*256*256*a + 256*256*b + local result = { } -- we reuse result + for i=1,#t do + local r, more = 0, -1 + for a,b in bytepairs(str) do + if a and b then + if more < 0 then + more = 256*b + a else - m = 256*b + a + r = r + 1 + result[t] = utfchar(more + 256*256*256*b + 256*256*a) + more = -1 end else - if endian then - n = m + 256*a + b - else - n = m + 256*256*256*b + 256*256*a - end - m = -1 - doit() + break end - else - break end - end - if #tmp > 0 then - r = r + 1 - result[r] = concat(tmp,"",1,t) + t[i] = concat(result,"",1,r) end return result end +unicode.utf32_to_utf8_be = utf32_to_utf8_be +unicode.utf32_to_utf8_le = utf32_to_utf8_le +unicode.utf16_to_utf8_be = utf16_to_utf8_be +unicode.utf16_to_utf8_le = utf16_to_utf8_le + +function unicode.utf8_to_utf8(t) + return type(t) == "string" and utfsplitlines(t) or t +end + +function unicode.utf16_to_utf8(t,endian) + return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t +end + +function unicode.utf32_to_utf8(t,endian) + return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t +end + local function little(c) local b = byte(c) if b < 0x10000 then @@ -225,3 +350,7 @@ function unicode.utfcodes(str) end --~ print(unicode.utfcodes(str)) + +function unicode.filetype(data) + return data and lpeg.match(lpeg.patterns.utftype,data) or "unknown" +end diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua index 507d59a9a..8744bff95 100644 --- a/tex/context/base/lang-ini.lua +++ b/tex/context/base/lang-ini.lua @@ -16,6 +16,7 @@ if not modules then modules = { } end modules ['lang-ini'] = { --~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation() +local type, tonumber = type, tonumber local utf = unicode.utf8 local utfbyte = utf.byte local format, gsub = string.format, string.gsub @@ -160,6 +161,10 @@ function languages.installed(separator) context(concat(table.sortedkeys(registered),separator or ",")) end +function languages.current(n) + return numbers[n and tonumber(n) or tex.language] +end + function languages.associate(tag,script,language) -- not yet used associated[tag] = { script, language } end diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv index 0a0cbf016..a7371e298 100644 --- a/tex/context/base/lang-ini.mkiv +++ b/tex/context/base/lang-ini.mkiv @@ -13,6 +13,8 @@ %D This module needs a further cleanup (real split between ii/iv). +% \ctxlua{tex.sprint(languages.numbers[tex.count.mainlanguagenumber])} + %D This module implements the (for the moment still simple) %D multi||language support of \CONTEXT, which should not be %D confused with the multi||lingual interface. This support diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv index 2f73fae3d..b02a88b51 100644 --- a/tex/context/base/luat-lib.mkiv +++ b/tex/context/base/luat-lib.mkiv @@ -62,6 +62,7 @@ \registerctxluafile{luat-exe}{1.001} \registerctxluafile{luat-iop}{1.001} \registerctxluafile{luat-bwc}{1.001} +\registerctxluafile{luat-mac}{1.001} \registerctxluafile{lxml-tab}{1.001} \registerctxluafile{lxml-lpt}{1.001} diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua new file mode 100644 index 000000000..f4f7779da --- /dev/null +++ b/tex/context/base/luat-mac.lua @@ -0,0 +1,162 @@ +if not modules then modules = { } end modules ['luat-mac'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local P, V, S, R, C, Cs = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local insert, remove = table.insert, table.remove +local rep = string.rep +local setmetatable = setmetatable + +local report_macros = logs.new("macros") + +local stack, top, n, hashes = { }, nil, 0, { } + +local function set(s) + if top then + n = n + 1 + if n > 9 then + report_macros("number of arguments > 9, ignoring %s",s) + else + local ns = #stack + local h = hashes[ns] + if not h then + h = rep("#",ns) + hashes[ns] = h + end + m = h .. n + top[s] = m + return m + end + end +end + +local function get(s) + local m = top and top[s] or s + return m +end + +local function push() + top = { } + n = 0 + local s = stack[#stack] + if s then + setmetatable(top,{ __index = s }) + end + insert(stack,top) +end + +local function pop() + top = remove(stack) +end + +local leftbrace = P("{") +local rightbrace = P("}") +local escape = P("\\") + +local space = patterns.space +local spaces = space^1 +local newline = patterns.newline +local nobrace = 1 - leftbrace - rightbrace + +local name = R("AZ","az")^1 +local variable = P("#") * name +local escapedname = escape * name +local definer = escape * (P("def") + P("egdx") * P("def")) +local startcode = P("\\starttexdefinition") +local stopcode = P("\\stoptexdefinition") +local anything = patterns.anything +local always = patterns.alwaysmatched + +local pushlocal = always / push +local poplocal = always / pop +local declaration = variable / set +local identifier = variable / get + +local grammar = { "converter", + texcode = pushlocal + * startcode + * spaces + * name + * spaces + * (declaration + (1 - newline - space))^0 + * V("texbody") + * stopcode + * poplocal, + texbody = ( V("definition") + + V("braced") + + identifier + + (1 - stopcode) + )^0, + definition = pushlocal + * definer + * escapedname + * (declaration + (1-leftbrace))^0 + * V("braced") + * poplocal, + braced = leftbrace + * ( V("definition") + + V("texcode") + + V("braced") + + identifier + + nobrace + )^0 + * rightbrace, + + pattern = V("definition") + V("texcode") + anything, + + converter = V("pattern")^1, +} + +local parser = Cs(grammar) + +local checker = P("%") * (1 - newline - P("macros"))^0 + * P("macros") * space^0 * P("=") * space^0 * C(patterns.letter^1) + +-- maybe namespace + +local macros = { } resolvers.macros = macros + +function macros.preprocessed(data) + return lpegmatch(parser,data) +end + +function macros.convertfile(oldname,newname) + local data = resolvers.loadtexfile(oldname) + data = interfaces.preprocessed(data) or "" + io.savedata(newname,data) +end + +function macros.version(data) + return lpegmatch(checker,data) +end + +local function handler(protocol,name,cachename) + local hashed = url.hashed(name) + local path = hashed.path + if path and path ~= "" then + local data = resolvers.loadtexfile(path) + data = lpegmatch(parser,data) or "" + io.savedata(cachename,data) + end + return cachename +end + +resolvers.schemes.install('mkvi',handler,1) -- this will cache ! + +function macros.processmkvi(str,filename) + if file.suffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then + return lpegmatch(parser,str) or str + else + return str + end +end + +utilities.sequencers.appendaction(resolvers.openers.textfileactions,"system","resolvers.macros.processmkvi") +-- utilities.sequencers.disableaction(resolvers.openers.textfileactions,"resolvers.macros.processmkvi") + diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua index bcb846bba..ecf7e3eb7 100644 --- a/tex/context/base/lxml-dir.lua +++ b/tex/context/base/lxml-dir.lua @@ -41,7 +41,7 @@ local function load_setup(filename) if fullname ~= "" then filename = fullname end - local collection = xmlparseapply({ getid(xml.load(filename)) },"directive") + local collection = xml.applylpath({ getid(xml.load(filename)) },"directive") -- is { } needed ? if collection then local valid = 0 for i=1,#collection do diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua index b4716d06a..d84e30189 100644 --- a/tex/context/base/math-ini.lua +++ b/tex/context/base/math-ini.lua @@ -100,8 +100,14 @@ end local function mathtopaccent(class,family,slot) return format('\\Umathaccent "%X "%X "%X ',0,family,slot) -- no class end -local function mathbotaccent(class,family,slot) - return format('\\Umathbotaccent "%X "%X "%X ',0,family,slot) -- no class +if tex.luatexversion < 65 then -- this will disappear at 0.70 + local function mathbotaccent(class,family,slot) + return format('\\Umathbotaccent "%X "%X "%X ',0,family,slot) -- no class + end +else + local function mathbotaccent(class,family,slot) + return format('\\Umathaccent bottom "%X "%X "%X ',0,family,slot) -- no class + end end local function mathtopdelimiter(class,family,slot) return format('\\Udelimiterover "%X "%X ',family,slot) -- no class diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv index e057e1cc6..bab8792f7 100644 --- a/tex/context/base/math-ini.mkiv +++ b/tex/context/base/math-ini.mkiv @@ -681,4 +681,8 @@ \def\mathoptext#1{\mathop{\text{#1}}} +% for a while: + +\def\Umathbotaccent{\Umathaccent bottom } + \protect \endinput diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua index 5ca554292..80ab9b1d0 100644 --- a/tex/context/base/mlib-pdf.lua +++ b/tex/context/base/mlib-pdf.lua @@ -315,7 +315,7 @@ function metapost.flush(result,flusher,askedfig) -- pdf flusher, table en dan co postscript = object.postscript, } -- - local before, inbetween, after = nil, nil, nil + local before, inbetween, after, grouped = nil, nil, nil, false -- local cs, cr = currentobject.color, nil -- todo document why ... @@ -329,7 +329,7 @@ function metapost.flush(result,flusher,askedfig) -- pdf flusher, table en dan co -- move test to function local special = metapost.specials[prescript] if special then - currentobject, before, inbetween, after = special(currentobject.postscript,currentobject,t,flusher) + currentobject, before, inbetween, after, grouped = special(currentobject.postscript,currentobject,t,flusher) objecttype = currentobject.type end end @@ -339,7 +339,9 @@ function metapost.flush(result,flusher,askedfig) -- pdf flusher, table en dan co t[#t+1], cr = colorconverter(cs) end -- - if before then currentobject, t = before() end + if before then + currentobject, t = before() + end local ml = currentobject.miterlimit if ml and ml ~= miterlimit then miterlimit = ml @@ -426,7 +428,13 @@ function metapost.flush(result,flusher,askedfig) -- pdf flusher, table en dan co if cr then t[#t+1] = cr end - if after then currentobject, t = after() end + if after then + currentobject, t = after() + end + if grouped then + -- can be qQ'd so changes can end up in groups + miterlimit, linecap, linejoin, dashed = -1, -1, -1, false + end end end end diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua index 6fae400fc..396853d3e 100644 --- a/tex/context/base/mlib-pps.lua +++ b/tex/context/base/mlib-pps.lua @@ -245,7 +245,7 @@ function specials.fg(specification,object,result,flusher) -- graphics object.path = nil return object, { } end - return { } , before, nil, nil -- replace { } by object for tracing + return { } , before, nil, nil, true -- replace { } by object for tracing end function specials.ps(specification,object,result) -- positions @@ -257,7 +257,7 @@ function specials.ps(specification,object,result) -- positions x = x - metapost.llx y = metapost.ury - y context.MPLIBpositionwhd(label,x,y,w,h) - return { }, nil, nil, nil + return { }, nil, nil, nil, true end local nofshades = 0 -- todo: hash resources, start at 1000 in order not to clash with older @@ -347,7 +347,7 @@ local function resources(object,name,flusher,result) return object, result end object.color, object.type = nil, nil - return object, before, nil, after + return object, before, nil, after, true end -- todo: we need a way to move/scale @@ -361,7 +361,7 @@ function specials.cs(specification,object,result,flusher) -- spot colors? local coordinates = { tonumber(t[5]), tonumber(t[6]), tonumber(t[7]), tonumber(t[9]), tonumber(t[10]), tonumber(t[11]) } local ca, cb, colorspace, name = checkandconvert(ca,cb) lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates) -- backend specific (will be renamed) - return resources(object,name,flusher,result) -- object, before, nil, after + return resources(object,name,flusher,result) -- object, before, nil, after, grouped end function specials.ls(specification,object,result,flusher) @@ -373,7 +373,7 @@ function specials.ls(specification,object,result,flusher) local coordinates = { tonumber(t[5]), tonumber(t[6]), tonumber(t[8]), tonumber(t[9]) } local ca, cb, colorspace, name = checkandconvert(ca,cb) lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates) -- backend specific (will be renamed) - return resources(object,name,flusher,result) -- object, before, nil, after + return resources(object,name,flusher,result) -- object, before, nil, after, grouped end -- no need for a before here @@ -428,7 +428,7 @@ function specials.tf(specification,object) context.MPLIBsettext(n,str) metapost.multipass = true end - return { }, nil, nil, nil + return { }, nil, nil, nil, true end local factor = 65536*(7227/7200) @@ -473,9 +473,9 @@ function specials.ts(specification,object,result,flusher) result = { "Q" } return object, result end - return { }, before, nil, nil -- replace { } by object for tracing + return { }, before, nil, nil, true -- replace { } by object for tracing else - return { }, nil, nil, nil -- replace { } by object for tracing + return { }, nil, nil, nil, true -- replace { } by object for tracing end end @@ -804,7 +804,7 @@ function specials.gt(specification,object) -- number, so that we can reorder graphics[#graphics+1] = format("\\MPLIBgraphictext{%s}",specification) metapost.intermediate.needed = true metapost.multipass = true - return { }, nil, nil, nil + return { }, nil, nil, nil, true end function metapost.intermediate.actions.makempy() diff --git a/tex/context/base/mult-aux.mkiv b/tex/context/base/mult-aux.mkiv index 628914b6d..c86c522cb 100644 --- a/tex/context/base/mult-aux.mkiv +++ b/tex/context/base/mult-aux.mkiv @@ -57,12 +57,14 @@ % todo: \def\detokenized...parameter#1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2\endcsname}} % always root -\unexpanded\def\doinstallparameterhandler#1#2#3#4#5#6#7% +\unexpanded\def\doinstallparameterhandler#1#2#3#4#5#6#7#8#9% {\def#3##1{\csname#4{#1#2}{##1}\endcsname}% \def#4##1##2{\ifcsname##1##2\endcsname##1##2\else\expandafter#5\csname##1\s!parent\endcsname{##2}\fi}% \def#5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% \def#6##1##2{\csname#4{#1##1}{##2}\endcsname}% - \def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1##1\endcsname}}} % always root + \def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1##1\endcsname}}% always root + \def#8{\dosetvalue{#1}}% ##1 {##2} (braces are mandate) + \def#9{\doletvalue{#1}}}% ##1 ##2 \unexpanded\def\installparameterhandler#1#2% {%\message{\detokenize{#1}/\detokenize{#2}}% @@ -74,7 +76,9 @@ \expandafter\noexpand\csname do#2parameter\endcsname \expandafter\noexpand\csname do#2parentparameter\endcsname \expandafter\noexpand\csname named#2parameter\endcsname - \expandafter\noexpand\csname detokenized#2parameter\endcsname}} + \expandafter\noexpand\csname detokenized#2parameter\endcsname + \expandafter\noexpand\csname doset#2parameter\endcsname + \expandafter\noexpand\csname dolet#2parameter\endcsname}} \unexpanded\def\doinstallparameterhashhandler#1#2#3#4#5% {\def#3##1{#4{#1#2}{##1}}% @@ -104,18 +108,38 @@ \expandafter\noexpand\csname doset#2attributes\endcsname \expandafter\noexpand\csname #2parameterhash\endcsname}} +% \unexpanded\def\doinstalldefinehandler#1#2#3#4#5#6#7% +% {\unexpanded\def#2{\dotripleempty#5}% +% \newtoks#6% +% \newtoks#7% +% \def#5[##1][##2][##3]% [child][parent][settings] +% {\edef#4{##1}% [child][settings] +% \the#6% predefine [child] +% \ifsecondargument +% \doifassignmentelse{##2} +% {\getparameters[#1#4][\s!parent=#3,##2]} +% {\getparameters[#1#4][\s!parent=#1##2,##3]}% +% \else +% \getparameters[#1#4][\s!parent=#3]% +% \fi +% \the#7}} + \unexpanded\def\doinstalldefinehandler#1#2#3#4#5#6#7% {\unexpanded\def#2{\dotripleempty#5}% \newtoks#6% \newtoks#7% - \def#5[##1][##2][##3]% - {\edef#4{##1}% - \the#6% predefine - \ifsecondargument + \def#5[##1][##2][##3]% [child][parent][settings] + {\edef#4{##1}% % [child] [settings] + \the#6% predefine % [child][parent] + \ifthirdargument % [child] + \getparameters[#1#4][\s!parent=#1##2,##3]% + \else\ifsecondargument \doifassignmentelse{##2} {\getparameters[#1#4][\s!parent=#3,##2]} - {\getparameters[#1#4][\s!parent=#1##2,##3]}% - \fi + {\getparameters[#1#4][\s!parent=#1##2]}% + \else + \getparameters[#1#4][\s!parent=#3]% + \fi\fi \the#7}} \unexpanded\def\installdefinehandler#1#2#3% @@ -125,14 +149,14 @@ \expandafter\noexpand\csname define#2\endcsname {\noexpand#3}% root \expandafter\noexpand\csname current#2\endcsname - \expandafter\noexpand\csname dodefine#2\endcsname + \expandafter\noexpand\csname d@define#2\endcsname \expandafter\noexpand\csname everypreset#2\endcsname \expandafter\noexpand\csname everydefine#2\endcsname}} \unexpanded\def\doinstallsetuphandler#1#2#3#4#5% {\unexpanded\def#2{\dodoubleempty#4}% \newtoks#5% - \def#4[##1][##2]% + \def#4[##1][##2]% maybe helper {\ifsecondargument \def\docommand####1% we will have a simple one as well {\edef#3{####1}% @@ -151,7 +175,7 @@ {\noexpand#1}% \??aa \expandafter\noexpand\csname setup#2\endcsname \expandafter\noexpand\csname current#2\endcsname - \expandafter\noexpand\csname dosetup#2\endcsname + \expandafter\noexpand\csname d@setup#2\endcsname \expandafter\noexpand\csname everysetup#2\endcsname}} \unexpanded\def\installcommandhandler#1#2#3% \??self name \??parent (can be \??self) diff --git a/tex/context/base/mult-cld.lua b/tex/context/base/mult-cld.lua index 0542c5fcf..1154eefe3 100644 --- a/tex/context/base/mult-cld.lua +++ b/tex/context/base/mult-cld.lua @@ -19,6 +19,8 @@ if not modules then modules = { } end modules ['mult-cld'] = { -- tflush needs checking ... sort of weird that it's not a table +-- __flushlines is an experiment and rather ugly so it will go away + context = context or { } local context = context @@ -130,40 +132,41 @@ local emptyline = space^0 * newline^2 local endofline = space^0 * newline * space^0 local simpleline = endofline * lpeg.P(-1) -function lpeg.texlinesplitter(f_content,f_endofline,f_emptyline,f_simpleline) - local splitlines = - simpleline / (f_simpleline or f_endofline) - + ( - emptyline / f_emptyline - + endofline / f_endofline - + content / f_content - )^0 - return function(str) return lpegmatch(splitlines,str) end -end - -local function f_content(s) +local function n_content(s) flush(contentcatcodes,s) end -local function f_endofline() +local function n_endofline() texsprint(" ") end -local function f_emptyline() +local function n_emptyline() texprint("") end -local function f_simpleline() +local function n_simpleline() texprint("") end -local flushlines = lpeg.texlinesplitter(f_content,f_endofline,f_emptyline,f_simpleline) +function lpeg.texlinesplitter(f_content,f_endofline,f_emptyline,f_simpleline) + local splitlines = + simpleline / (f_simpleline or n_simpleline) + + ( + emptyline / (f_emptyline or n_emptyline) + + endofline / (f_endofline or n_emptyline) + + content / (f_content or n_content) + )^0 + return function(str) return lpegmatch(splitlines,str) end +end -context.flushlines = flushlines -- maybe context.helpers.flushtexlines +local flushlines = lpeg.texlinesplitter(n_content,n_endofline,n_emptyline,n_simpleline) + +context.__flushlines = flushlines -- maybe context.helpers.flushtexlines +context.__flush = flush -- -- -- -local function writer(command,first,...) +local function writer(parent,command,first,...) local t = { first, ... } flush(currentcatcodes,command) -- todo: ctx|prt|texcatcodes local direct = false @@ -184,6 +187,7 @@ local function writer(command,first,...) elseif typ == "string" then if processlines and find(ti,"\n") then -- we can check for ti == "\n" flush(currentcatcodes,"{") + local flushlines = parent.__flushlines or flushlines flushlines(ti) flush(currentcatcodes,"}") elseif currentcatcodes == contentcatcodes then @@ -250,28 +254,25 @@ local function writer(command,first,...) trace_context("error: '%s' gets a weird argument '%s'",command,tostring(ti)) end end - if direct then - trace_context("error: direct flushing used in '%s' without following argument",command) - end end local generics = { } context.generics = generics -local function indexer(t,k) +local function indexer(parent,k) local c = "\\" .. (generics[k] or k) local f = function(first,...) if first == nil then flush(currentcatcodes,c) else - return writer(c,first,...) + return writer(parent,c,first,...) end end - t[k] = f + parent[k] = f return f end -local function caller(t,f,a,...) - if not t then +local function caller(parent,f,a,...) + if not parent then -- so we don't need to test in the calling (slower but often no issue) (will go) elseif f ~= nil then local typ = type(f) @@ -279,6 +280,7 @@ local function caller(t,f,a,...) if a then flush(contentcatcodes,format(f,a,...)) -- was currentcatcodes elseif processlines and find(f,"\n") then + local flushlines = parent.__flushlines or flushlines flushlines(f) else flush(contentcatcodes,f) @@ -295,6 +297,7 @@ local function caller(t,f,a,...) elseif typ == "boolean" then if f then if a ~= nil then + local flushlines = parent.__flushlines or flushlines flushlines(f) -- ignore ... maybe some day else @@ -304,7 +307,7 @@ local function caller(t,f,a,...) else if a ~= nil then -- no command, same as context(a,...) - writer("",a,...) + writer(parent,"",a,...) else -- ignored end @@ -339,7 +342,7 @@ statistics.register("traced context", function() end end) -local tracedwriter = function(...) +local tracedwriter = function(parent,...) nofwriters = nofwriters + 1 local t, f, n = { "w : " }, flush, 0 flush = function(...) @@ -347,7 +350,7 @@ local tracedwriter = function(...) t[n] = concat({...},"",2) normalflush(...) end - normalwriter(...) + normalwriter(parent,...) flush = f currenttrace(concat(t)) end @@ -376,12 +379,14 @@ local function pushlogger(trace) insert(trace_stack,currenttrace) currenttrace = trace flush, writer = tracedflush, tracedwriter + context.__flush = flush end local function poplogger() currenttrace = remove(trace_stack) if not currenttrace then flush, writer = normalflush, normalwriter + context.__flush = flush end end @@ -393,6 +398,8 @@ local function settracing(v) end end +-- todo: share flushers so that we can define in other files + trackers.register("context.trace",settracing) context.pushlogger = pushlogger @@ -427,58 +434,26 @@ end function context.direct(first,...) if first ~= nil then - return writer("",first,...) - end -end - --- todo: use flush directly - -function context.char(k) -- todo: if catcode == letter or other then just the utf - if type(k) == "table" then - for i=1,#k do - context(format([[\char%s\relax]],k[i])) - end - elseif k then - context(format([[\char%s\relax]],k)) + return writer(context,"",first,...) end end -function context.utfchar(k) - context(utfchar(k)) -end - -function context.chardef(cs,u) - context(format([[\chardef\%s=%s\relax]],k)) -end - -function context.par() - context([[\par]]) -- no need to add {} there -end - -function context.bgroup() - context("{") -end - -function context.egroup() - context("}") -end - -- context.delayed (todo: lines) local delayed = { } context.delayed = delayed -- maybe also store them -local function indexer(t,k) +local function indexer(parent,k) local f = function(...) local a = { ... } return function() return context[k](unpack(a)) end end - t[k] = f + parent[k] = f return f end -local function caller(t,...) +local function caller(parent,...) local a = { ... } return function() return context(unpack(a)) @@ -491,7 +466,7 @@ setmetatable(delayed, { __index = indexer, __call = caller } ) local nested = { } context.nested = nested -local function indexer(t,k) +local function indexer(parent,k) local f = function(...) local t, savedflush, n = { }, flush, 0 flush = function(c,f,s,...) -- catcodes are ignored @@ -502,11 +477,11 @@ local function indexer(t,k) flush = savedflush return concat(t) end - t[k] = f + parent[k] = f return f end -local function caller(t,...) +local function caller(parent,...) local t, savedflush, n = { }, flush, 0 flush = function(c,f,s,...) -- catcodes are ignored n = n + 1 @@ -523,7 +498,7 @@ setmetatable(nested, { __index = indexer, __call = caller } ) local verbatim = { } context.verbatim = verbatim -local function indexer(t,k) +local function indexer(parent,k) local command = context[k] local f = function(...) local savedcatcodes = contentcatcodes @@ -531,14 +506,14 @@ local function indexer(t,k) command(...) contentcatcodes = savedcatcodes end - t[k] = f + parent[k] = f return f end -local function caller(t,...) +local function caller(parent,...) local savedcatcodes = contentcatcodes contentcatcodes = vrbcatcodes - defaultcaller(t,...) + defaultcaller(parent,...) contentcatcodes = savedcatcodes end @@ -550,8 +525,8 @@ local metafun = { } context.metafun = metafun local mpdrawing = "\\MPdrawing" -local function caller(t,f,a,...) - if not t then +local function caller(parent,f,a,...) + if not parent then -- skip elseif f then local typ = type(f) @@ -602,19 +577,19 @@ end local delayed = { } metafun.delayed = delayed -local function indexer(t,k) +local function indexer(parent,k) local f = function(...) local a = { ... } return function() return metafun[k](unpack(a)) end end - t[k] = f + parent[k] = f return f end -local function caller(t,...) +local function caller(parent,...) local a = { ... } return function() return metafun(unpack(a)) diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv index b8b1c5387..eddee98a4 100644 --- a/tex/context/base/mult-ini.mkiv +++ b/tex/context/base/mult-ini.mkiv @@ -433,11 +433,8 @@ %D interface. (We no longer need the link back to the main %D internal interface.) -\def\doresetvalue#1#2% - {\dosetvalue{#1}{#2}{}} - -\def\doignorevalue#1#2#3% - {\dosetvalue{#1}{#2}{}} +\def\doresetvalue #1#2{\dosetvalue{#1}{#2}{}} +\def\doignorevalue#1#2#3{\dosetvalue{#1}{#2}{}} % \def\dosetvalue#1#2% % {\let\c!internal!\c!internal!n @@ -502,6 +499,7 @@ % \@EA\def\csname#1#3\@EA\endcsname\@EA{\csname#2#3\endcsname}% % \fi} +\def\doletvalue #1#2{\@EA \let\csname#1#2\endcsname} \def\dosetvalue #1#2{\@EA \def\csname#1#2\endcsname} \def\dosetevalue #1#2{\@EA\edef\csname#1#2\endcsname} \def\dosetgvalue #1#2{\@EA\gdef\csname#1#2\endcsname} diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua index 1eb42d95e..2fd6fc005 100644 --- a/tex/context/base/node-aux.lua +++ b/tex/context/base/node-aux.lua @@ -22,7 +22,7 @@ local has_attribute = node.has_attribute local set_attribute = node.set_attribute local get_attribute = node.get_attribute local unset_attribute = node.unset_attribute -local first_character = node.first_character +local first_glyph = node.first_glyph or node.first_character local texbox = tex.box @@ -69,6 +69,7 @@ end nodes.set_attribute = set_attribute nodes.unset_attribute = unset_attribute nodes.has_attribute = has_attribute +nodes.first_glyph = first_glyph nodes.set_attributes = set_attributes nodes.set_unset_attributes = set_unset_attributes @@ -169,7 +170,7 @@ nodes.unsetattributes = unset_attributes function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255 if untagged then - return first_character(n) + return first_glyph(n) else for g in traverse_id(glyph_code,n) do return g diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua index 9eb431d6b..0ed510cd2 100644 --- a/tex/context/base/node-pro.lua +++ b/tex/context/base/node-pro.lua @@ -16,15 +16,15 @@ local report_nodes = logs.new("nodes") local nodes, node = nodes, node -local nodecodes = nodes.nodecodes -local glyph_code = nodecodes.glyph -local tasks = nodes.tasks +local nodecodes = nodes.nodecodes +local glyph_code = nodecodes.glyph +local tasks = nodes.tasks -local free_node = node.free -local first_character = node.first_character +local free_node = node.free +local first_glyph = node.first_glyph or node.first_character -nodes.processors = nodes.processors or { } -local processors = nodes.processors +nodes.processors = nodes.processors or { } +local processors = nodes.processors -- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional -- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional @@ -67,7 +67,7 @@ processors.tracer = tracer processors.enabled = true -- this will become a proper state (like trackers) function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) - local first, found = first_character(head) + local first, found = first_glyph(head) if found then if trace_callbacks then local before = nodes.count(head,true) @@ -91,7 +91,7 @@ function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) end function processors.hpack_filter(head,groupcode,size,packtype,direction) - local first, found = first_character(head) + local first, found = first_glyph(head) if found then if trace_callbacks then local before = nodes.count(head,true) @@ -119,13 +119,13 @@ callbacks.register('hpack_filter' , processors.hpack_filter,"all kind of local actions = tasks.actions("finalizers",1) -- head, where --- beware, these are packaged boxes so no first_character test +-- beware, these are packaged boxes so no first_glyph test -- maybe some day a hash with valid groupcodes -- -- beware, much can pass twice, for instance vadjust passes two times function processors.post_linebreak_filter(head,groupcode) ---~ local first, found = first_character(head) +--~ local first, found = first_glyph(head) --~ if found then if trace_callbacks then local before = nodes.count(head,true) diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua index ff038a816..27665d60b 100644 --- a/tex/context/base/node-tra.lua +++ b/tex/context/base/node-tra.lua @@ -235,7 +235,7 @@ function step_tracers.glyphs(n,i) end function step_tracers.features() - -- we cannot use first_character here as it only finds characters with subtype < 256 + -- we cannot use first_glyph here as it only finds characters with subtype < 256 local f = collection[1] while f do if f.id == glyph_code then diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua index a6890c2d5..d19d1c6ae 100644 --- a/tex/context/base/node-tsk.lua +++ b/tex/context/base/node-tsk.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['node-tsk'] = { license = "see context related readme files" } --- this might move to task-* +-- this might move to task-* .. we already have dirty flags there local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end) diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua index 561b8d8f4..eb9a58593 100644 --- a/tex/context/base/regi-ini.lua +++ b/tex/context/base/regi-ini.lua @@ -26,7 +26,11 @@ regimes.utf = regimes.utf or { } regimes.synonyms = regimes.synonyms or { } local synonyms = regimes.synonyms -storage.register("regimes/synonyms", synonyms, "regimes.synonyms") +if storage then + storage.register("regimes/synonyms", synonyms, "regimes.synonyms") +else + regimes.synonyms = { } +end -- setmetatable(regimes.data,_empty_table_) @@ -72,20 +76,44 @@ function regimes.translate(line,regime) return line end +-- function regimes.enable(regime) +-- regime = synonyms[regime] or regime +-- if data[regime] then +-- regimes.currentregime = regime +-- local translate = regimes.translate +-- resolvers.filters.install('input',function(s) +-- return translate(s,regime) +-- end) +-- else +-- regimes.disable() +-- end +-- end +-- +-- function regimes.disable() +-- regimes.currentregime = "utf" +-- resolvers.filters.install('input',nil) +-- end + +local sequencers = utilities.sequencers + +function regimes.process(s) + return translate(s,regimes.currentregime) +end + function regimes.enable(regime) regime = synonyms[regime] or regime if data[regime] then regimes.currentregime = regime - local translate = regimes.translate - resolvers.filters.install('input',function(s) - return translate(s,regime) - end) + sequencers.enableaction(resolvers.openers.textfileactions,"regimes.process") else - regimes.disable() + sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process") end end function regimes.disable() regimes.currentregime = "utf" - resolvers.filters.install('input',nil) + sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process") end + +utilities.sequencers.prependaction(resolvers.openers.textfileactions,"system","regimes.process") +utilities.sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process") diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex index b233eeee2..85b263468 100644 --- a/tex/context/base/s-abr-01.tex +++ b/tex/context/base/s-abr-01.tex @@ -19,10 +19,11 @@ \protect -\logo [MKI] {MkI} +\logo [MKI] {MkI} % joke \logo [MKII] {MkII} -\logo [MKIII] {MkIII} +\logo [MKIII] {MkIII} % joke \logo [MKIV] {MkIV} +\logo [MKVI] {MkVI} %logo [FGA] {fga} %logo [FGBBS] {fgbbs} diff --git a/tex/context/base/scrn-int.mkiv b/tex/context/base/scrn-int.mkiv index dc919a8aa..7f143f7b4 100644 --- a/tex/context/base/scrn-int.mkiv +++ b/tex/context/base/scrn-int.mkiv @@ -274,7 +274,6 @@ \def\dostartcomment[#1][#2][#3]% {\bgroup \doifassignmentelse{#1}{\getparameters[\??cc][#1]}{\getparameters[\??cc][\c!title=#1,#2]}% - \setcurrentbuffer{\v!comment\v!buffer}% \dostartbuffer[\v!comment\v!buffer][\v!comment\v!buffer][\e!start\v!comment][\e!stop\v!comment]} \unexpanded\def\stopcomment diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index 11ffd02f8..5b012890d 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -19,7 +19,7 @@ local allocate = utilities.storage.allocate local set_attribute = node.set_attribute local has_attribute = node.has_attribute -local first_character = node.first_character +local first_glyph = node.first_glyph or node.first_character local traverse_id = node.traverse_id local nodecodes = nodes.nodecodes @@ -284,7 +284,7 @@ end -- one of the time consuming functiions: function scripts.preprocess(head) - local start = first_character(head) + local start = first_glyph(head) if not start then return head, false else diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv index 0d1cd17a6..6fa9348b2 100644 --- a/tex/context/base/strc-sec.mkiv +++ b/tex/context/base/strc-sec.mkiv @@ -729,7 +729,7 @@ \setvalue{\??nh:\??mk:n:\v!page }{} \setvalue{\??nh:\??mk:n:\v!reset}{\resetcurrentstructuremarks} -\setvalue{\??nh:\??mk:y:\v!page }{\resetcurrentstructuremarks} +\setvalue{\??nh:\??mk:y:\v!page }{} % to be checked: {\resetcurrentstructuremarks} \setvalue{\??nh:\??mk:y:\v!reset}{\resetcurrentstructuremarks} \def\docheckstructureheadlayout diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv index e2420850d..d5eee61a0 100644 --- a/tex/context/base/tabl-ntb.mkiv +++ b/tex/context/base/tabl-ntb.mkiv @@ -1574,49 +1574,6 @@ \fi \fi} -%D Spacing: -% -% \starttabulate -% \NC text \NC text \NC \NR -% \TB[small] -% \NC text \NC text \NC \NR -% \TB[4*big] -% \NC text \NC text \NC \NR -% \stoptabulate -% -% \starttable[|||] -% \VL text \VL text \VL \AR -% \TB[small] -% \VL text \VL text \VL \AR -% \TB[4*big] -% \VL text \VL text \VL \AR -% \stoptable - -\def\complexTableTB[#1]{\TABLEnoalign{\blank[#1]}} -\def\simpleTableTB {\TABLEnoalign{\blank}} - -\def\TabulateTB - {\complexorsimpleTable{TB}} - -\def\doTableinterline% #1 - {\ifnum\currentTABLEcolumn>\maxTABLEcolumn - \chuckTABLEautorow - \else\ifnum\currentTABLEcolumn=\zerocount - \TABLEnoalign - {\globalletempty\checkTABLEautorow - \globalletempty\chuckTABLEautorow}% - \else - \setTABLEerror\TABLEmissingcolumn - \handleTABLEerror - \fi\fi - \complexorsimpleTable} % {#1} - -\def\TableHL{\doTableinterline{HL}} -\def\TableTB{\doTableinterline{TB}} - -\appendtoks\let\TB\TableTB \to\everytable -\appendtoks\let\TB\TabulateTB\to\everytabulate % strange place - % new (for Olivier Turlier) % % \defineTABLEsetup [xx] [foregroundcolor=red] diff --git a/tex/context/base/tabl-tab.mkiv b/tex/context/base/tabl-tab.mkiv index d6007372d..3c9515e60 100644 --- a/tex/context/base/tabl-tab.mkiv +++ b/tex/context/base/tabl-tab.mkiv @@ -13,6 +13,8 @@ \writestatus{loading}{ConTeXt Table Macros / TaBlE Embedding} +% Todo: consistent namespace and get rid of not used code + % In \MKIV\ the old table macros are sort of obsolete. The % color extensions have been removed and some code is stripped. % For practical reasons the \TABLE\ macros that are used are @@ -36,7 +38,6 @@ \newdimen\tablecolumnwidthunit \newdimen\tablekernunit -\def\tablelinethicknessfactor {2} \def\tablestrutheightfactor {8} \def\tablestrutdepthfactor {3} \def\tableintercolumnspacefactor {3} @@ -44,27 +45,6 @@ \def\tablevspacefactor {2} \def\tablekernfactor {1} -\newtoks\NormalTLTU -\newtoks\NormalTSU -\newtoks\NormalTICSU -\newtoks\NormalTCWU -\newtoks\NormalTKU - -\NormalTLTU ={1in \divide \tablelinethicknessunit by 300 } -\NormalTSU ={\normalbaselineskip \divide \tablestrutunit by 11 } -\NormalTICSU={.5em plus 1fil minus .25em} -\NormalTCWU ={.5em} -\NormalTKU ={.5em} - -\def\NormalTableUnits - {\tablelinethicknessunit \the\NormalTLTU - \tablestrutunit \the\NormalTSU - \tableintercolumnspaceunit\the\NormalTICSU - \tablecolumnwidthunit \the\NormalTCWU - \tablekernunit \the\NormalTKU} - -\NormalTableUnits - \newtoks\everytable \newtoks\everytableparbox @@ -219,7 +199,7 @@ \noexpand\!!width \ifnum\!tgCode=\plusone \ifx\!tgValue\empty - \tablelinethicknessfactor + \tablevrulethicknessfactor \else \!tgValue \fi @@ -770,7 +750,7 @@ \def\tableStandardTableStrut {\tableMakeStrut {\tablestrutheightfactor\tablestrutunit} - {\tablestrutdepthfactor\tablestrutunit }} + {\tablestrutdepthfactor \tablestrutunit }} \def\tableAugmentedTableStrut#1#2% {\tableMakeStrut @@ -996,11 +976,13 @@ \def\@VLn{1} \def\@VLd{.125em} +\let\tablecurrentvrulecolor\empty + \def\do!ttInsertVrule % will be merged in 2005 {\vrule\!!width \ifnum\!tgCode=\plusone \ifx\!tgValue\empty - \tablelinethicknessfactor + \tablevrulethicknessfactor \else \!tgValue \fi @@ -1012,6 +994,9 @@ \def\!ttInsertVrule {\hfil + \ifx\tablecurrentvrulecolor\empty\else + \switchtocolor[\tablecurrentvrulecolor]% + \fi \ifcase\@VLn\or \do!ttInsertVrule \unskip @@ -1020,6 +1005,7 @@ \gdef\@VLn{1}% \unskip \fi + \global\let\tablecurrentvrulecolor\empty \hfil &} @@ -1043,26 +1029,23 @@ {\ifnum#1>\plusone \omit \global\TABLEdivisionfalse - \scratchcounter\numexpr\currentTABLEcolumn+#1-\plusone\relax % added - \ifnum\scratchcounter>\maxTABLEcolumn % added - \def\next % added - {\setTABLEerror\TABLEspanoverflow % added - \handleTABLEerror}% % added - \else % added - \def\next % added - {\global\advance\currentTABLEcolumn#1-\plusone\relax % added - \scratchcounter2\numexpr#1-\plusone\relax % changed - \!thLoop - \ifnum\scratchcounter>\plusone - \span - \omit - \advance\scratchcounter\minusone - \repeat - \span}% - \fi % added + \scratchcounter\currentTABLEcolumn % added + \advance\scratchcounter #1% % added + \advance\scratchcounter \minusone % added + \def\next % added + {\global\advance\currentTABLEcolumn #1% % added + \global\advance\currentTABLEcolumn \minusone % added + \scratchcounter#1% \mscount is in Plain + \advance\scratchcounter \minusone + \advance\scratchcounter \scratchcounter + \!thLoop + \ifnum\scratchcounter>\plusone + \spanomit \advance\scratchcounter\minusone + \repeat + \span}% \else % added \def\next % conflicts with possible next \omit % added - {\global\advance\currentTABLEcolumn\plusone}% % added + {\global\advance\currentTABLEcolumn \plusone}% % added \fi \next} % added @@ -1084,7 +1067,11 @@ \def\!ttFullHruleA {\!ttGetHalfRuleThickness + \ifx\tablecurrenthrulecolor\empty\else + \switchtocolor[\tablecurrenthrulecolor]% + \fi \hrule\!!height\dimen0\!!depth\dimen0 + \global\let\tablecurrenthrulecolor\empty %\penalty0 % removed \egroup} @@ -1094,18 +1081,22 @@ \def\!ttShortHruleA {\!ttGetHalfRuleThickness + \ifx\tablecurrenthrulecolor\empty\else + \switchtocolor[\tablecurrenthrulecolor]% + \fi \leaders\hrule\!!height\dimen0\!!depth\dimen0\hfill \null + \global\let\tablecurrenthrulecolor\empty \ignorespaces} \def\!ttLongHrule {\omit\span\omit\span\!ttShortHrule} \def\!ttGetHalfRuleThickness - {\dimen0= + {\dimen0=\dimexpr \ifnum\!tgCode=\plusone \ifx\!tgValue\empty - \tablelinethicknessfactor + \tablehrulethicknessfactor \else \!tgValue % user-specified integer \fi @@ -1122,10 +1113,8 @@ \def\tableRight #1{\hfill#1} \def\tableOpenUp#1#2% - {\scratchdimen\tablestrutheightfactor \points \advance\scratchdimen #1\points - \edef\tablestrutheightfactor{\withoutpt\the\scratchdimen}% - \scratchdimen\tablestrutdepthfactor \points \advance\scratchdimen #2\points - \edef\tablestrutdepthfactor{\withoutpt\the\scratchdimen}} + {\edef\tablestrutheightfactor{\withoutpt\the\dimexpr\tablestrutheightfactor\points+#1\points}% + \edef\tablestrutdepthfactor {\withoutpt\the\dimexpr\tablestrutdepthfactor \points+#2\points}} \def\tableSetTableToWidth#1% {\doifelsenothing{#1}{\!taTableSpread\emptytoks}{\!taTableSpread{to #1}}} @@ -1595,8 +1584,21 @@ %D such hacks become a second nature. However, redefining \type %D {\omit} and \type{\span} is not that easy.} +\def\doTABLEnoalign + {\let\next=} + \def\TABLEnoalign - {\noalign\bgroup\let\noalign\relax\let\next=} + {\noalign + \bgroup + \let\noalign\relax + \let\TABLEnoalign\doTABLEnoalign + \doTABLEnoalign} + +\def\startTABLEnoalign + {\TABLEnoalign\bgroup} + +\def\stopTABLEnoalign + {\iffalse{\else}\fi} %D \macros %D {starttable} @@ -1780,19 +1782,16 @@ %D Now we can start the table. +\newtoks \localtabledefinitions + \def\thirdstagestartTABLE#1% {\global\setTABLEactiontrue \setTABLEaction\TABLEunknown \setTABLEforce\TABLEunknown - \resetVLvalues + \tableresetVLvalues \appendtoks\localTABLEsetup\to\everytable \tablestandardbegin[\ifsplittables u\else b\fi]% - \defineTABLEunits - \defineTABLEsteps - \defineTABLErules - \defineTABLEdivisions - \defineTABLEshorthands - \defineTABLEendings + \the\localtabledefinitions \forgetall % added \doifsomething{#1} {\def\TABLEformat{#1}% @@ -1907,10 +1906,10 @@ \let\@@TABLEhead\empty \def\TABLEhead{\@@TABLEhead} \let\@@TABLEtail\empty \def\TABLEtail{\@@TABLEtail} -\letvalue{\e!start\v!tablehead}=\undefined -\letvalue{\e!stop \v!tablehead}=\undefined -\letvalue{\e!start\v!tabletail}=\undefined -\letvalue{\e!stop \v!tabletail}=\undefined +\letbeundefined{\e!start\v!tablehead} +\letbeundefined{\e!stop \v!tablehead} +\letbeundefined{\e!start\v!tabletail} +\letbeundefined{\e!stop \v!tabletail} \expanded {\long\def\csname\e!start\v!tablehead\endcsname##1\csname\e!stop\v!tablehead\endcsname% @@ -1935,23 +1934,6 @@ %D Redudant \type{\HL}'s are removed automatically, so %D mid||lines can be used without problems. -%D We need an alternative for the normal complex or simple -%D commands, because assignments in these system commands -%D conflict with \type{\noalign}. This alternative is about -%D as efficient as possible. - -\def\complexorsimpleTable#1#2% - {\csname\if[\noexpand#2\s!complex\else\s!simple\fi\c!Table#1\endcsname#2} - -%D The next one is used in \type{\VL} cum suis and honours -%D the next grouping. - -\def\docomplexorsimpleTable#1#2% - {\ifx\next\bgroup\@EA#2\else\@EA\dodocomplexorsimpleTable\@EA#1\@EA#2\fi} - -\def\dodocomplexorsimpleTable#1#2#3% - {\if[\noexpand#3\@EA#1\else\@EA#2\fi#3} - %D The order of the next macros is more or less random. First %D we implement error recovery. Errors are reported to the %D screen and log file as well as visualized in the table in @@ -2011,12 +1993,13 @@ \setnewconstant\TABLErowzero\zerocount -\unexpanded\def\defineTABLEendings - {\let\SR\TableSR - \let\FR\TableFR - \let\MR\TableMR - \let\LR\TableLR - \let\AR\TableAR} +\appendtoks + \let\SR\TableSR + \let\FR\TableFR + \let\MR\TableMR + \let\LR\TableLR + \let\AR\TableAR +\to \localtabledefinitions \unexpanded\def\TableSR {\ifnum\TABLEaction=\TABLEfirstrow @@ -2144,7 +2127,6 @@ %D one more than the number of columns. \newcount\currentTABLEcolumn -\newcount\maxTABLEcolumn % needed for reset vl properties %D While defining this macro we change the \CATCODE\ of %D \type{|}. When counting the bars, we use a non active @@ -2161,7 +2143,7 @@ \bgroup \catcode`\|=\othercatcode -\gdef\getTABLEnofcolumns#1% +\gdef\getTABLEnofcolumns#1% todo: also divert this to lua as with tabulate {\bgroup \cleanupfeatures % needed ! \@@useotherbar @@ -2177,34 +2159,46 @@ %D \sym{\type{\HC}} a horizontal colored line %D \stopitemize -\unexpanded\def\defineTABLErules - {\let\VL\TableVL - \let\VC\TableVC - \let\HL\TableHL - \let\HC\TableHC - \let\VS\TableVS - \let\VD\TableVD - \let\VT\TableVT - \let\VN\TableVN} +\newcount\tablevrulethicknessfactor +\newcount\tablehrulethicknessfactor +\newcount\tabledrulespan +\let \tablecurrentvrulecolor \empty +\let \tablecurrenthrulecolor \empty -\unexpanded\def\TableVL - {\checkTABLEautorow - \global\advance\currentTABLEcolumn \plusone - \doTableVL} - -\def\doTableVL - {\futurelet\next\dodoTableVL} +\appendtoks + \let\VL\TableVL + \let\VC\TableVC + \let\HL\TableHL + \let\HC\TableHC + \let\VS\TableVS + \let\VD\TableVD + \let\VT\TableVT + \let\VN\TableVN +\to \localtabledefinitions + +\def\tableresetVLvalues + {\global\currentTABLEcolumn\zerocount} -\def\dodoTableVL - {\docomplexorsimpleTable\complexTableVL\simpleTableVL} +\def\dotablevrulecommand#1% global assignments + {\doifnumberelse{#1} + {\global\tablevrulethicknessfactor#1\relax + \global\multiply\tablevrulethicknessfactor\@@tiVLwidth\relax} + {\xdef\tablecurrentvrulecolor{#1}}} -\def\complexTableVL[#1]% - {\scratchcounter=0#1% - \multiply\scratchcounter \@@tiVLwidth - \simpleTableVL} +\unexpanded\def\TableVL + {\checkTABLEautorow + \global\advance\currentTABLEcolumn\plusone + \dosingleempty\doTableVL} + +\def\doTableVL[#1]% + {\global\tablecurrentvrulecolor\empty + \global\tablevrulethicknessfactor\@@tiVLwidth\relax + \iffirstargument + \rawprocesscommalist[#1]\dotablevrulecommand + \fi + \normalexpanded{\noexpand\normalTABLEcomplexbar\the\tablevrulethicknessfactor} }% \relax breaks \use -\def\simpleTableVL - {\expanded{\normalTABLEcomplexbar\@@tiVLwidth\space}}% \relax breaks \use +\let\TableVC\TableVL % for mojca % \starttable[|||] % \HL @@ -2218,79 +2212,46 @@ \unexpanded\def\TableVD {\VN2} \unexpanded\def\TableVT {\VN3} \unexpanded\def\TableVN#1{\gdef\@VLn{#1}\VL} - -\def\resetVLvalues - {\global\currentTABLEcolumn\zerocount} - -\unexpanded\def\TableVC - {\checkTABLEautorow - \doTableVC} - -\def\doTableVC - {\futurelet\next\dodoTableVC} - -\def\dodoTableVC - {\docomplexorsimpleTable\complexTableVC\simpleTableVC} -\def\complexTableVC[#1]% - {\setgvalue{bVC\the\currentTABLEcolumn}{\startcolor[#1]}% - \setgvalue{eVC\the\currentTABLEcolumn}{\stopcolor}% - \simpleTableVC} - -\def\simpleTableVC - {\setgvalue{bVL\the\currentTABLEcolumn}{\getvalue{bVC\the\currentTABLEcolumn}}% - \setgvalue{eVL\the\currentTABLEcolumn}{\getvalue{eVC\the\currentTABLEcolumn}}% - \doTableVL} +\def\dotablehrulecommand#1% global assignments + {\doifnumberelse{#1} + {\global\tablehrulethicknessfactor#1\relax + \global\multiply\tablehrulethicknessfactor\@@tiHLheight\relax} + {\xdef\tablecurrenthrulecolor{#1}}} \unexpanded\def\TableHL {\finishTABLErow - \complexorsimpleTable{HL}} - -\def\complexTableHL[#1]% - {\TABLEnoalign - {\scratchcounter0#1% - \multiply\scratchcounter \@@tiHLheight - \edef\@@tiHLheight{\the\scratchcounter}}% - \simpleTableHL} - -\def\simpleTableHL - {\TABLEnoalign - {\nobreak - \ifnum\TABLEaction=\TABLErule - \writestatus\m!TABLE{skipping \string\HL}% \statusmessage - \else - \ifnum\TABLEaction=\TABLEmidrow - \writestatus\m!TABLE{change \string\MR\space into \string\LR/\string\SR}% - \else\ifnum\TABLEaction=\TABLEfirstrow - \writestatus\m!TABLE{change \string\MR\space into \string\SR}% - \fi\fi - \startHLcommand - \expandafter\normalTABLEfullrule\@@tiHLheight - \stopHLcommand - \globalletempty\startHLcommand - \globalletempty\stopHLcommand - \accountTABLElinewidth - \fi - \setTABLEaction\TABLErule - \nobreak}} - -\let\startHLcommand\empty -\let\stopHLcommand \empty - -\unexpanded\def\TableHC - {\complexorsimpleTable{HC}} + \startTABLEnoalign + \dosingleempty\doTableHL} -\def\complexTableHC[#1]% - {\TABLEnoalign - {\gdef\startHCcommand{\startcolor[#1]}% - \gdef\stopHCcommand {\stopcolor}}% - \simpleTableHC} +\def\doTableHL[#1]% + {\nobreak + \ifnum\TABLEaction=\TABLErule + \writestatus\m!TABLE{skipping \string\HL}% \statusmessage + \else + \ifnum\TABLEaction=\TABLEmidrow + \writestatus\m!TABLE{change \string\MR\space into \string\LR/\string\SR}% + \else\ifnum\TABLEaction=\TABLEfirstrow + \writestatus\m!TABLE{change \string\MR\space into \string\SR}% + \fi\fi + \bgroup + \global\tablehrulethicknessfactor\@@tiHLheight\relax + \iffirstargument + \global\let\tablecurrenthrulecolor\empty + \rawprocesscommalist[#1]\dotablehrulecommand + \ifx\tablecurrenthrulecolor\empty\else + \switchtocolor[\tablecurrenthrulecolor]% + \fi + \fi + \normalexpanded{\noexpand\normalTABLEfullrule\the\tablehrulethicknessfactor} % + \egroup + \accountTABLElinewidth + \fi + \setTABLEaction\TABLErule + \nobreak + \stopTABLEnoalign} -\def\simpleTableHC - {\TABLEnoalign - {\globallet\startHLcommand\startHCcommand - \globallet\stopHLcommand \stopHCcommand}% - \HL} +\let\TableHC\TableHL % for mojca %D \startitemize[3*ruim] %D \sym{\type{\NL}} a vertical skip @@ -2301,30 +2262,37 @@ %D \sym{\type{\LC}} a last column %D \stopitemize -% n+1 uitleggen +% \starttable[|||] +% \VL text \VL text \VL \AR +% \TB[small] +% \VL text \VL text \VL \AR +% \TB[4*big] +% \VL text \VL text \VL \AR +% \stoptable -\unexpanded\def\defineTABLEsteps - {\let\NL\TableNL - \let\NR\TableNR - \let\NC\TableNC - \let\FC\TableNC - \let\MC\TableNC - \let\LC\TableNC} +% n+1 uitleggen -\unexpanded\def\TableNL - {\complexorsimpleTable{NL}} +\appendtoks + \let\TB\TableTB + \let\NL\TableNL % old + \let\NR\TableNR + \let\NC\TableNC + \let\FC\TableNC + \let\MC\TableNC + \let\LC\TableNC +\to \localtabledefinitions + +\unexpanded\def\TableTB + {\finishTABLErow + \startTABLEnoalign + \dosingleempty\doTableTB} -\def\complexTableNL[#1]% - {\TABLEnoalign - {\edef\@@tiNL{#1}% - \simpleTableNL}}% +\def\doTableTB[#1]% + {\blank[\iffirstargument#1\else\@@tiNL\fi]% + \nobreak + \stopTABLEnoalign} -\def\simpleTableNL - {\TABLEnoalign - {\nobreak - \setbox0\vbox{\blank[\@@tiNL]}% - \vskip\ht0 - \nobreak}} +\let\TableNL\TableTB \unexpanded\def\TableNR {\global\currentTABLEcolumn\zerocount @@ -2347,12 +2315,13 @@ \newif\ifTABLEdivision -\unexpanded\def\defineTABLEdivisions - {\global\TABLEdivisionfalse % in start - \let\DL\TableDL - \let\DC\TableDC - \let\DV\TableDV - \let\DR\TableDR} +\appendtoks + \global\TABLEdivisionfalse % in start + \let\DL\TableDL + \let\DC\TableDC + \let\DV\TableDV + \let\DR\TableDR +\to \localtabledefinitions \def\checkTABLEdivision {\ifTABLEdivision \else @@ -2361,14 +2330,21 @@ \global\TABLEdivisiontrue \fi} +\def\dotabledrulecommand#1% global assignments + {\doifnumberelse{#1} + {\ifcase\tabledrulespan + \global\tabledrulespan#1\relax + \else + \global\tablehrulethicknessfactor#1\relax + \global\multiply\tablehrulethicknessfactor\@@tiVLwidth\relax + \fi} + {\xdef\tablecurrenthrulecolor{#1}}} + \unexpanded\def\TableDL {\checkTABLEdivision - \complexorsimpleTable{DL}} + \dosingleempty\doTableDL} -\def\simpleTableDL - {\complexTableDL[1]} - -\def\complexTableDL[#1]% +\def\doTableDL[#1]% {\ifnum\TABLEaction=\TABLErule \writestatus\m!TABLE{skipping \string\DL}% \else @@ -2378,12 +2354,24 @@ \writestatus\m!TABLE{change \string\MR\space into \string\SR}% \fi\fi \setTABLEaction\TABLEunknown - \ifnum#1=\plusone + \global\tablehrulethicknessfactor\@@tiHLheight\relax + \global\tabledrulespan\zerocount + \iffirstargument + \global\let\tablecurrenthrulecolor\empty + \rawprocesscommalist[#1]\dotabledrulecommand + \ifx\tablecurrenthrulecolor\empty\else + \switchtocolor[\tablecurrenthrulecolor]% + \fi + \fi + \ifcase\tabledrulespan + \global\advance\currentTABLEcolumn \plusone + \let\next\normalTABLEsinglerule + \or \global\advance\currentTABLEcolumn \plustwo \let\next\normalTABLEsinglerule \else \global\advance\currentTABLEcolumn \plusone - \def\next{\normalTABLEmultirule{#1}}% + \edef\next{\noexpand\normalTABLEmultirule{\tabledrulespan} }% \fi \next \fi} @@ -2412,25 +2400,39 @@ \def\accountTABLElinewidth {\scratchdimen\tablelinethicknessunit} -\unexpanded\def\defineTABLEshorthands - {\def\SPAN##1{\use{##1}}% - \def\TWO {\use2}% - \def\THREE {\use3}% - \def\FOUR {\use4}% - \def\FIVE {\use5}% - \def\SIX {\use6}% - \def\REF {\ReFormat}} - -\unexpanded\def\defineTABLEunits - {\processaction - [\@@tidistance] - [ \v!none=>\OpenUp00\def\LOW{\Lower6 }, - \v!small=>\OpenUp00\def\LOW{\Lower6 }, % == baseline - \v!medium=>\OpenUp11\def\LOW{\Lower7 }, - \v!big=>\OpenUp22\def\LOW{\Lower8 }]% - \doifelse\@@tidistance\v!none - {\TABLErowfactor\zerocount} - {\TABLErowfactor\plustwo }} +\def\doTableSPAN#1{\use{#1}} +\def\doTableTWO {\use2} +\def\doTableTHREE {\use3} +\def\doTableFOUR {\use4} +\def\doTableFIVE {\use5} +\def\doTableSIX {\use6} +\def\doTableREF {\ReFormat} + +\appendtoks + \let\SPAN \doTableSPAN + \let\TWO \doTableTWO + \let\THREE\doTableTHREE + \let\FOUR \doTableFOUR + \let\FIVE \doTableFIVE + \let\SIX \doTableSIX + \let\REF \doTableREF +\to \localtabledefinitions + +\setvalue{\??ti:\c!distance:\v!none }{\OpenUp00\def\LOW{\Lower6 }} +\setvalue{\??ti:\c!distance:\v!small }{\OpenUp00\def\LOW{\Lower6 }} % == baseline +\setvalue{\??ti:\c!distance:\v!medium}{\OpenUp11\def\LOW{\Lower7 }} +\setvalue{\??ti:\c!distance:\v!big }{\OpenUp22\def\LOW{\Lower8 }} + +\appendtoks + \getvalue{\??ti:\c!distance:\@@tidistance}% +\to \localtabledefinitions + +\appendtoks + \doifelse\@@tidistance\v!none + {\TABLErowfactor\zerocount} + {\TABLErowfactor\plustwo }% +\to \localtabledefinitions + \def\dohandlebar % here ? {\ifmmode @@ -2446,7 +2448,7 @@ \def\dosetuptables[#1]% {\getparameters[\??ti][#1]% - \processaction + \processaction % we have a command for this [\@@tialign] [ \v!right=>\def\TABLEparalignment{\raggedright}, \v!left=>\def\TABLEparalignment{\raggedleft}, @@ -2456,43 +2458,24 @@ \assignalfadimension\@@tiVL\@@tiVLwidth 246% \assignalfadimension\@@tiHL\@@tiHLheight246} -\def\localTABLEsetup - {\@@ticommands\relax - \expanded{\switchtobodyfont[\@@tibodyfont]}% - \def\tablestrutheightfactor {8}% - \def\tablestrutdepthfactor {4}% - \def\tablelinethicknessfactor{4}% - \NormalTLTU {.1pt}% - \NormalTSU {\normalbaselineskip\divide\tablestrutunit 12 }% - \NormalTableUnits} - -%D And then I wrote the tabulate environment. That -%D alternative supports setting the rule thickness and color, -%D so here is the table alternative. - -% \let\startglobalTABLEcolor\empty -% \let\stopglobalTABLEcolor \empty +\let\tablelinethicknessfactor\plusfour \def\localTABLEsetup {\@@ticommands\relax - \expanded{\switchtobodyfont[\@@tibodyfont]}% - \def\tablelinethicknessfactor{4}% - \scratchdimen\@@tirulethickness - \divide\scratchdimen \tablelinethicknessfactor - \expanded{\NormalTLTU{\the\scratchdimen}}% + \expanded{\switchtobodyfont[\@@tibodyfont]}% expanded ? + \tablelinethicknessunit\dimexpr\@@tirulethickness/\tablelinethicknessfactor\relax \doifelse\@@tiheight\v!strut - {\edef\tablestrutheightfactor{\tablestrutheightfactor}} - {\edef\tablestrutheightfactor{\@@tiheight}}% + {\let\tablestrutheightfactor\tablestrutheightfactor} + {\let\tablestrutheightfactor\@@tiheight}% \doifelse\@@tidepth\v!strut - {\edef\tablestrutdepthfactor{\tablestrutdepthfactor}} - {\edef\tablestrutdepthfactor{\@@tidepth}}% - \scratchdimen\tablestrutheightfactor\points \multiply\scratchdimen 10% - \edef\tablestrutheightfactor{\withoutpt\the\scratchdimen}% - \scratchdimen\tablestrutdepthfactor \points \multiply\scratchdimen 10% - \edef\tablestrutdepthfactor{\withoutpt\the\scratchdimen}% - % units - \NormalTSU{\normalbaselineskip\divide\tablestrutunit 12 }% - \NormalTableUnits} + {\let\tablestrutdepthfactor\tablestrutdepthfactor} + {\let\tablestrutdepthfactor\@@tidepth}% + \edef\tablestrutheightfactor{\withoutpt\the\dimexpr10\dimexpr\tablestrutheightfactor\points}% + \edef\tablestrutdepthfactor {\withoutpt\the\dimexpr10\dimexpr\tablestrutdepthfactor \points}% + \tablestrutunit\dimexpr\normalbaselineskip/12\relax % 12 is default bodyfont + \tableintercolumnspaceunit.5em plus 1fil minus .25em\relax + \tablecolumnwidthunit .5em\relax + \tablekernunit .5em\relax} %D As one can see, we didn't only add color, but also more %D control over spacing. diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv index 3cfda9ad7..9428ea3dc 100644 --- a/tex/context/base/tabl-tbl.mkiv +++ b/tex/context/base/tabl-tbl.mkiv @@ -849,7 +849,7 @@ % The much needed hook: -\appendtoks \optimizeverbatimfalse \to \everytabulate +% \appendtoks \optimizeverbatimfalse \to \everytabulate % Todo: proper inheritance @@ -1661,6 +1661,30 @@ %D \stoptabulate %D \stoptyping +%D Spacing: +% +% \starttabulate +% \NC text \NC text \NC \NR +% \TB[small] +% \NC text \NC text \NC \NR +% \TB[4*big] +% \NC text \NC text \NC \NR +% \stoptabulate + +\def\TabulateTB + {\startTABLEnoalign + \dosingleempty\doTabulateTB} + +\def\doTabulateTB[#1]% + {\iffirstargument + \blank[#1] + \else + \blank + \fi + \stopTABLEnoalign} + +\appendtoks\let\TB\TabulateTB\to\everytabulate + % \starttabulatie[|mc|] % \NC \digits{100.000,00} \NC\NR % \NC \digits{@10.000,00} \NC\NR diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua index 6ca7c3396..a369552dc 100644 --- a/tex/context/base/util-prs.lua +++ b/tex/context/base/util-prs.lua @@ -50,11 +50,11 @@ local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * local hash = { } -local function set(key,value) -- using Carg is slower here +local function set(key,value) hash[key] = value end -local function set(key,value) -- using Carg is slower here +local function set(key,value) hash[key] = value end diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua index 028d0cf8d..7ce2cd345 100644 --- a/tex/context/base/util-seq.lua +++ b/tex/context/base/util-seq.lua @@ -37,88 +37,95 @@ local function validaction(action) return true end -function sequencers.reset() - return { +function sequencers.reset(t) + local s = { list = { }, order = { }, kind = { }, askip = { }, gskip = { }, } + if t then + s.arguments = t.arguments + s.returnvalues = t.returnvalues + s.results = t.results + end + s.dirty = true + return s end function sequencers.prependgroup(t,group,where) - local list, order = t.list, t.order - removevalue(order,group) - insertbeforevalue(order,where,group) - list[group] = { } + if t then + local list, order = t.list, t.order + removevalue(order,group) + insertbeforevalue(order,where,group) + list[group] = { } + t.dirty = true + end end function sequencers.appendgroup(t,group,where) - local list, order = t.list, t.order - removevalue(order,group) - insertaftervalue(order,where,group) - list[group] = { } + if t then + local list, order = t.list, t.order + removevalue(order,group) + insertaftervalue(order,where,group) + list[group] = { } + t.dirty = true + end end function sequencers.prependaction(t,group,action,where,kind,force) - local g = t.list[group] - if g and (force or validaction(action)) then - removevalue(g,action) - insertbeforevalue(g,where,action) - t.kind[action] = kind + if t then + local g = t.list[group] + if g and (force or validaction(action)) then + removevalue(g,action) + insertbeforevalue(g,where,action) + t.kind[action] = kind + t.dirty = true + end end end function sequencers.appendaction(t,group,action,where,kind,force) - local g = t.list[group] - if g and (force or validaction(action)) then - removevalue(g,action) - insertaftervalue(g,where,action) - t.kind[action] = kind + if t then + local g = t.list[group] + if g and (force or validaction(action)) then + removevalue(g,action) + insertaftervalue(g,where,action) + t.kind[action] = kind + t.dirty = true + end end end -function sequencers.enableaction (t,action) t.askip[action] = false end -function sequencers.disableaction(t,action) t.askip[action] = true end -function sequencers.enablegroup (t,group) t.gskip[group] = false end -function sequencers.disablegroup (t,group) t.gskip[group] = true end +function sequencers.enableaction (t,action) if t then t.dirty = true t.askip[action] = false end end +function sequencers.disableaction(t,action) if t then t.dirty = true t.askip[action] = true end end +function sequencers.enablegroup (t,group) if t then t.dirty = true t.gskip[group] = false end end +function sequencers.disablegroup (t,group) if t then t.dirty = true t.gskip[group] = true end end function sequencers.setkind(t,action,kind) - t.kind[action] = kind + if t then + t.kind[action] = kind + t.dirty = true + end end function sequencers.removeaction(t,group,action,force) - local g = t.list[group] + local g = t and t.list[group] if g and (force or validaction(action)) then removevalue(g,action) + t.dirty = true end end -function sequencers.compile(t,compiler,n) - if type(t) == "string" then - -- already compiled - elseif compiler then - t = compiler(t,n) - else - t = sequencers.tostring(t) - end - return loadstring(t)() -end - local function localize(str) return (gsub(str,"%.","_")) end -local template = [[ -%s -return function(...) -%s -end]] - -function sequencers.tostring(t) +local function construct(t,nodummy) local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip - local vars, calls, args, n = { }, { }, nil, 0 + local arguments, returnvalues, results = t.arguments or "...", t.returnvalues, t.results + local variables, calls, n = { }, { }, 0 for i=1,#order do local group = order[i] if not gskip[group] then @@ -128,13 +135,44 @@ function sequencers.tostring(t) if not askip[action] then local localized = localize(action) n = n + 1 - vars [n] = format("local %s = %s", localized, action) - calls[n] = format(" %s(...) -- %s %i", localized, group, i) + variables[n] = format("local %s = %s",localized,action) + if not returnvalues then + calls[n] = format("%s(%s)",localized,arguments) + elseif n == 1 then + calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments) + else + calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments) + end end end end end - return format(template,concat(vars,"\n"),concat(calls,"\n")) + t.dirty = false + if nodummy and #calls == 0 then + return nil + else + variables = concat(variables,"\n") + calls = concat(calls,"\n") + if results then + return format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results) + else + return format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls) + end + end +end + +sequencers.tostring = construct +sequencers.localize = localize + +function sequencers.compile(t,compiler,n) + if not t or type(t) == "string" then + -- already compiled + elseif compiler then + t = compiler(t,n) + else + t = construct(t) + end + return loadstring(t)() end -- we used to deal with tail as well but now that the lists are always @@ -151,7 +189,7 @@ return function(head%s) return head, done end]] -function sequencers.nodeprocessor(t,nofarguments) +function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip local vars, calls, args, n = { }, { }, nil, 0 if nofarguments == 0 then @@ -179,6 +217,7 @@ function sequencers.nodeprocessor(t,nofarguments) local localized = localize(action) n = n + 1 vars[n] = format("local %s = %s",localized,action) + -- only difference with tostring is kind and rets (why no return) if kind[action] == "nohead" then calls[n] = format(" ok = %s(head%s) done = done or ok -- %s %i",localized,args,group,i) else diff --git a/tex/context/base/v-default.lua b/tex/context/base/v-default.lua new file mode 100644 index 000000000..107c4c4ca --- /dev/null +++ b/tex/context/base/v-default.lua @@ -0,0 +1,40 @@ +if not modules then modules = { } end modules ['v-default'] = { + version = 1.001, + comment = "companion to v-default.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local patterns, P, V = lpeg.patterns, lpeg.P, lpeg.V +local makepattern = visualizers.makepattern + +local handler = visualizers.newhandler() + +local grammar = { "visualizer", + + -- basic + + emptyline = makepattern(handler,"emptyline",patterns.emptyline), + beginline = makepattern(handler,"beginline",patterns.beginline), + newline = makepattern(handler,"newline", patterns.newline), + space = makepattern(handler,"space", patterns.space), + default = makepattern(handler,"default", patterns.anything), + content = makepattern(handler,"default", patterns.somecontent), + + -- handy + + line = V("newline") * V("emptyline")^0 * V("beginline"), + whitespace = (V("space") + V("line"))^1, + optionalwhitespace = (V("space") + V("line"))^0, + + -- used + + pattern = V("line") + V("space") + V("content"), + visualizer = V("pattern")^1 + +} + +local parser = P(grammar) + +visualizers.register("default", { parser = parser, handler = handler, grammar = grammar }) diff --git a/tex/context/base/v-default.mkiv b/tex/context/base/v-default.mkiv new file mode 100644 index 000000000..f9a821f0f --- /dev/null +++ b/tex/context/base/v-default.mkiv @@ -0,0 +1,43 @@ +%D \module +%D [ file=v-default, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=Default, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-default.lua}{1.001} + +\unprotect + +\definestartstop + [DefaultSnippet] + [\c!before=\blank, + \c!after=\blank, + \c!style=\tt] + +% Name +% NamePrimitive +% NamePlain +% NameMetafun +% Group +% Boundary +% Special +% Comment +% Constructor +% Key +% Entity +% String +% Equal +% Cdata +% Instruction + +% Value +% Quote + +\protect \endinput diff --git a/tex/context/base/v-escaped.lua b/tex/context/base/v-escaped.lua new file mode 100644 index 000000000..060f48ea3 --- /dev/null +++ b/tex/context/base/v-escaped.lua @@ -0,0 +1,14 @@ +if not modules then modules = { } end modules ['v-escaped'] = { + version = 1.001, + comment = "companion to v-escaped.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +visualizers.registerescapepattern("/BTEX/ETEX","/BTEX","/ETEX") + +visualizers.register("escaped", { + parser = visualizers.escapepatterns["/BTEX/ETEX"], + handler = visualizers.newhandler(), +}) diff --git a/tex/context/base/v-escaped.mkiv b/tex/context/base/v-escaped.mkiv new file mode 100644 index 000000000..780d0b518 --- /dev/null +++ b/tex/context/base/v-escaped.mkiv @@ -0,0 +1,18 @@ +%D \module +%D [ file=v-escaped, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=Escaped, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-escaped.lua}{1.001} + +\unprotect + +\protect \endinput diff --git a/tex/context/base/v-lua.lua b/tex/context/base/v-lua.lua new file mode 100644 index 000000000..af1db27a3 --- /dev/null +++ b/tex/context/base/v-lua.lua @@ -0,0 +1,238 @@ +if not modules then modules = { } end modules ['v-lua'] = { + version = 1.001, + comment = "companion to v-lua.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- borrowed from scite +-- +-- depricated: +-- +-- gcinfo unpack getfenv setfenv loadlib +-- table.maxn table.getn table.setn +-- math.log10 math.mod math.modf math.fmod + +local format, tohash = string.format, table.tohash +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns +local C, Cs, Cg, Cb, Cmt, Carg = lpeg.C, lpeg.Cs, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Carg + +local core = tohash { + "and", "break", "do", "else", "elseif", "end", "false", "for", "function", + "if", "in", "local", "nil", "not", "or", "repeat", "return", "then", + "true", "until", "while" +} + +local base = tohash { + "assert", "collectgarbage", "dofile", "error", "loadfile", + "loadstring", "print", "rawget", "rawset", "require", "tonumber", + "tostring", "type", "_G", "getmetatable", "ipairs", "next", "pairs", + "pcall", "rawequal", "setmetatable", "xpcall", "module", "select", +} + +local libraries = { + coroutine = tohash { + "create", "resume", "status", "wrap", "yield", "running", + }, + package = tohash{ + "cpath", "loaded", "loadlib", "path", "config", "preload", "seeall", + }, + io = tohash{ + "close", "flush", "input", "lines", "open", "output", "read", "tmpfile", + "type", "write", "stdin", "stdout", "stderr", "popen", + }, + math = tohash{ + "abs", "acos", "asin", "atan", "atan2", "ceil", "cos", "deg", "exp", + "floor ", "ldexp", "log", "max", "min", "pi", "pow", "rad", "random", + "randomseed", "sin", "sqrt", "tan", "cosh", "sinh", "tanh", "huge", + }, + string = tohash{ + "byte", "char", "dump", "find", "len", "lower", "rep", "sub", "upper", + "format", "gfind", "gsub", "gmatch", "match", "reverse", + }, + table = tohash{ + "concat", "foreach", "foreachi", "sort", "insert", "remove", "pack", + "unpack", + }, + os = tohash{ + "clock", "date", "difftime", "execute", "exit", "getenv", "remove", + "rename", "setlocale", "time", "tmpname", + }, + lpeg = tohash{ + "print", "match", "locale", "type", "version", "setmaxstack", + "P", "R", "S", "C", "V", "Cs", "Ct", "Cs", "Cp", "Carg", + "Cg", "Cb", "Cmt", "Cf", "B", + }, + -- bit + -- debug +} + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local LuaSnippet = context.LuaSnippet +local startLuaSnippet = context.startLuaSnippet +local stopLuaSnippet = context.stopLuaSnippet + +local LuaSnippetBoundary = verbatim.LuaSnippetBoundary +local LuaSnippetSpecial = verbatim.LuaSnippetSpecial +local LuaSnippetComment = verbatim.LuaSnippetComment +local LuaSnippetNameCore = verbatim.LuaSnippetNameCore +local LuaSnippetNameBase = verbatim.LuaSnippetNameBase +local LuaSnippetNameLibraries = verbatim.LuaSnippetNameLibraries +local LuaSnippetName = verbatim.LuaSnippetName + +local namespace + +local function visualizename_a(s) + if core[s] then + namespace = nil + LuaSnippetNameCore(s) + elseif base[s] then + namespace = nil + LuaSnippetNameBase(s) + else + namespace = libraries[s] + if namespace then + LuaSnippetNameLibraries(s) + else + LuaSnippetName(s) + end + end +end + +local function visualizename_b(s) + if namespace and namespace[s] then + namespace = nil + LuaSnippetNameLibraries(s) + else + LuaSnippetName(s) + end +end + +local function visualizename_c(s) + LuaSnippetName(s) +end + +local handler = visualizers.newhandler { + startinline = function() LuaSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startLuaSnippet() end, + stopdisplay = function() stopLuaSnippet() end , + boundary = function(s) LuaSnippetBoundary(s) end, + special = function(s) LuaSnippetSpecial (s) end, + comment = function(s) LuaSnippetComment (s) end, + period = function(s) verbatim(s) end, + name_a = visualizename_a, + name_b = visualizename_b, + name_c = visualizename_c, +} + +local space = patterns.space +local anything = patterns.anything +local newline = patterns.newline +local emptyline = patterns.emptyline +local beginline = patterns.beginline +local somecontent = patterns.somecontent + +local comment = P("--") +local name = (patterns.letter + patterns.underscore) + * (patterns.letter + patterns.underscore + patterns.digit)^0 +local boundary = S('()[]{}') +local special = S("-+/*^%=#") + P("..") + +-- The following longstring parser is taken from Roberto's documentation +-- that can be found at http://www.inf.puc-rio.br/~roberto/lpeg/lpeg.html. + +local equals = P("=")^0 +local open = P("[") * Cg(equals, "init") * P("[") * P("\n")^-1 +local close = P("]") * C(equals) * P("]") +local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) +local longstring = open * Cs((P(1) - closeeq)^0) * close * Carg(1) + +--~ local simple = P ( -- here we hook into the handler but it is default so we could use that +--~ makepattern(handler,"space",space) +--~ + makepattern(handler,"newline",newline) +--~ * makepattern(handler,"emptyline",emptyline) +--~ * makepattern(handler,"beginline",beginline) +--~ + makepattern(handler,"default",anything) +--~ )^0 + +local function long(content,equals,settings) + handler.boundary(format("[%s[",equals or "")) + visualizers.write(content,settings) -- unhandled + handler.boundary(format("]%s]",equals or "")) +end + +local grammar = visualizers.newgrammar("default", { "visualizer", +--~ emptyline = +--~ makepattern(handler,"emptyline",emptyline), +--~ beginline = +--~ makepattern(handler,"beginline",beginline), +--~ newline = +--~ makepattern(handler,"newline",newline), +--~ space = +--~ makepattern(handler,"space",space), +--~ default = +--~ makepattern(handler,"default",anything), +--~ line = +--~ V("newline") * V("emptyline")^0 * V("beginline"), +--~ whitespace = +--~ (V("space") + V("line"))^1, +--~ optionalwhitespace = +--~ (V("space") + V("line"))^0, +--~ content = +--~ makepattern(handler,"default",somecontent), + + sstring = + makepattern(handler,"string",patterns.dquote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.dquote))^0 + * makepattern(handler,"string",patterns.dquote), + dstring = + makepattern(handler,"string",patterns.squote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.squote))^0 + * makepattern(handler,"string",patterns.squote), + longstring = + longstring / long, + comment = + makepattern(handler,"comment",comment) + * (V("space") + V("content"))^0, + longcomment = + makepattern(handler,"comment",comment) + * longstring / long, + name = + makepattern(handler,"name_a",name) + * ( V("optionalwhitespace") + * makepattern(handler,"default",patterns.period) + * V("optionalwhitespace") + * makepattern(handler,"name_b",name) + )^-1 + * ( V("optionalwhitespace") + * makepattern(handler,"default",patterns.period) + * V("optionalwhitespace") + * makepattern(handler,"name_c",name) + )^0, + + pattern = + V("longcomment") + + V("comment") + + V("longstring") + + V("dstring") + + V("sstring") + + V("name") + + makepattern(handler,"boundary",boundary) + + makepattern(handler,"special",special) + + + V("space") + + V("line") + + V("default"), + + visualizer = + V("pattern")^1 +} ) + +local parser = P(grammar) + +visualizers.register("lua", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/v-lua.mkiv b/tex/context/base/v-lua.mkiv new file mode 100644 index 000000000..6bfe4a963 --- /dev/null +++ b/tex/context/base/v-lua.mkiv @@ -0,0 +1,61 @@ +%D \module +%D [ file=v-lua, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=\LUA, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-lua.lua}{1.001} + +\unprotect + +\definestartstop + [LuaSnippet] + [DefaultSnippet] + +\definestartstop + [LuaSnippetName] + [\c!color=, + \c!style=boldface] + +\definestartstop + [LuaSnippetNameCore] + [\c!color=darkgreen, + \c!style=boldface] + +\definestartstop + [LuaSnippetNameBase] + [\c!color=darkgreen, + \c!style=boldface] + +\definestartstop + [LuaSnippetNameLibraries] + [\c!color=darkgreen, + \c!style=boldface] + +\definestartstop + [LuaSnippetBoundary] + [\c!color=darkblue, + \c!style=boldface] + +\definestartstop + [LuaSnippetSpecial] + [\c!color=darkred, + \c!style=boldface] + +\definestartstop + [LuaSnippetComment] + [\c!color=darkyellow, + \c!style=boldface] + +\definetyping + [LUA] + [\c!option=lua] + +\protect \endinput diff --git a/tex/context/base/v-nested.lua b/tex/context/base/v-nested.lua new file mode 100644 index 000000000..e37e1bb58 --- /dev/null +++ b/tex/context/base/v-nested.lua @@ -0,0 +1,80 @@ +if not modules then modules = { } end modules ['v-nested'] = { + version = 1.001, + comment = "companion to v-nested.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local lpegmatch, patterns = lpeg.match, lpeg.patterns +local P, V, Carg = lpeg.P, lpeg.V, lpeg.Carg + +local context = context +local verbatim = context.verbatim +local variables = interfaces.variables + +local makepattern = visualizers.makepattern +local getvisualizer = visualizers.getvisualizer + +local nested = nil + +local donestedtypingstart = context.donestedtypingstart +local donestedtypingstop = context.donestedtypingstop + +local v_none = variables.none +local v_slanted = variables.slanted + +local handler = visualizers.newhandler { + initialize = function(settings) + local option = settings and settings.option + if not option or option == "" then + nested = nil + elseif option == v_slanted then + nested = nil + elseif option == v_none then + nested = nil + else + nested = getvisualizer(option,"direct") + end + end, + open = function() + donestedtypingstart() + end, + close = function() + donestedtypingstop() + end, + content = function(s) + if nested then + nested(s) + else + verbatim(s) + end + end, +} + +local open = P("<<") +local close = P(">>") +local rest = (1 - open - close - patterns.space - patterns.newline)^1 + +local grammar = visualizers.newgrammar("default", { + + initialize = patterns.beginofstring * Carg(1) / handler.initialize, + + open = makepattern(handler,"open",open), + close = makepattern(handler,"close",close), + rest = makepattern(handler,"content",rest), + + nested = V("open") * (V("pattern")^0) * V("close"), + pattern = V("line") + V("space") + V("nested") + V("rest"), + + visualizer = V("initialize") * (V("pattern")^1) + +} ) + +local parser = P(grammar) + +visualizers.register("nested", { parser = parser, handler = handler, grammar = grammar } ) + +-- lpeg.match(parser,[[<>tf<>tf>>]]) context.par() +-- lpeg.match(parser,[[<>sl>>tf>>]]) context.par() +-- lpeg.match(parser,[[sl<>tf>>sl]]) context.par() diff --git a/tex/context/base/v-nested.mkiv b/tex/context/base/v-nested.mkiv new file mode 100644 index 000000000..aa68483a3 --- /dev/null +++ b/tex/context/base/v-nested.mkiv @@ -0,0 +1,21 @@ +%D \module +%D [ file=v-nested, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=Nested, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-nested.lua}{1.001} + +\unprotect + +\unexpanded\def\donestedtypingstart{\bgroup\swaptypeface{}} +\unexpanded\def\donestedtypingstop {\egroup{}} + +\unprotect \endinput diff --git a/tex/context/base/v-tex.lua b/tex/context/base/v-tex.lua index 0a97d28d6..0977ca571 100644 --- a/tex/context/base/v-tex.lua +++ b/tex/context/base/v-tex.lua @@ -1,55 +1,71 @@ if not modules then modules = { } end modules ['v-tex'] = { version = 1.001, - comment = "companion to buff-vis.mkiv", + comment = "companion to v-tex.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -local verbatim = context.verbatim local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns -local visualizer = { - start = function() context.startTexSnippet() end, - stop = function() context.stopTexSnippet() end , - name = function(s) verbatim.TexSnippetName(s) end, - group = function(s) verbatim.TexSnippetGroup(s) end, - boundary = function(s) verbatim.TexSnippetBoundary(s) end, - special = function(s) verbatim.TexSnippetSpecial(s) end, - comment = function(s) verbatim.TexSnippetComment(s) end, - default = function(s) verbatim(s) end, +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local TexSnippet = context.TexSnippet +local startTexSnippet = context.startTexSnippet +local stopTexSnippet = context.stopTexSnippet + +local TexSnippetName = verbatim.TexSnippetName +local TexSnippetGroup = verbatim.TexSnippetGroup +local TexSnippetBoundary = verbatim.TexSnippetBoundary +local TexSnippetSpecial = verbatim.TexSnippetSpecial +local TexSnippetComment = verbatim.TexSnippetComment + +local handler = visualizers.newhandler { + startinline = function() TexSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startTexSnippet() end, + stopdisplay = function() stopTexSnippet() end , + name = function(s) TexSnippetName(s) end, + group = function(s) TexSnippetGroup(s) end, + boundary = function(s) TexSnippetBoundary(s) end, + special = function(s) TexSnippetSpecial(s) end, + comment = function(s) TexSnippetComment(s) end, } --- todo: unicode letters +-- todo: unicode letters in control sequences (slow as we need to test the nature) -local comment = S("%") -local restofline = (1-patterns.newline)^0 -local anything = patterns.anything -local name = P("\\") * (patterns.letter + S("@!?"))^1 -local escape = P("\\") * (anything - patterns.newline)^-1 -- else we get \n -local group = S("${}") -local boundary = S('[]()<>#="') -local special = S("/^_-&+'`|") +local comment = S("%") +local name = P("\\") * (patterns.letter + S("@!?"))^1 +local escape = P("\\") * (patterns.anything - patterns.newline)^-1 -- else we get \n +local group = S("${}") +local boundary = S('[]()<>#="') +local special = S("/^_-&+'`|") local pattern = visualizers.pattern -local texvisualizer = P { "process", - process = - V("start") * V("content") * V("stop"), - start = - pattern(visualizer,"start",patterns.beginofstring), - stop = - pattern(visualizer,"stop",patterns.endofstring), - content = ( - pattern(visualizer,"comment",comment) - * pattern(visualizer,"default",restofline) - + pattern(visualizer,"name",name) - + pattern(visualizer,"name",escape) - + pattern(visualizer,"group",group) - + pattern(visualizer,"boundary",boundary) - + pattern(visualizer,"special",special) - + pattern(visualizer,"default",anything) - )^1 -} +local grammar = visualizers.newgrammar("default", { "visualizer", + + comment = makepattern(handler,"comment",comment) + * (V("space") + V("content"))^0, + name = makepattern(handler,"name",name), + escape = makepattern(handler,"name",escape), + group = makepattern(handler,"group",group), + boundary = makepattern(handler,"boundary",boundary), + special = makepattern(handler,"special",special), + + pattern = + V("comment") + V("name") + V("escape") + V("group") + V("boundary") + V("special") + + V("newline") * V("emptyline")^0 * V("beginline") + + V("space") + + V("default"), + + visualizer = + V("pattern")^1 + +} ) + +local parser = P(grammar) -return texvisualizer +visualizers.register("tex", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/v-tex.mkiv b/tex/context/base/v-tex.mkiv index 57aab2272..5cd5753bb 100644 --- a/tex/context/base/v-tex.mkiv +++ b/tex/context/base/v-tex.mkiv @@ -1,34 +1,51 @@ -% todo: global +%D \module +%D [ file=v-tex, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=\TEX, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-tex.lua}{1.001} \unprotect \definestartstop [TexSnippet] - [\c!style=\tt] + [DefaultSnippet] \definestartstop [TexSnippetName] [\c!color=darkgreen, - \c!style=bold] + \c!style=boldface] \definestartstop [TexSnippetGroup] [\c!color=darkred, - \c!style=bold] + \c!style=boldface] \definestartstop [TexSnippetBoundary] [\c!color=darkblue, - \c!style=bold] + \c!style=boldface] \definestartstop [TexSnippetSpecial] [\c!color=darkyellow, - \c!style=bold] + \c!style=boldface] \definestartstop [TexSnippetComment] [\c!color=darkyellow, - \c!style=bold] + \c!style=boldface] + +\definetyping + [TEX] + [\c!option=tex] \protect \endinput diff --git a/tex/context/base/v-xml.lua b/tex/context/base/v-xml.lua new file mode 100644 index 000000000..bf6945e26 --- /dev/null +++ b/tex/context/base/v-xml.lua @@ -0,0 +1,133 @@ +if not modules then modules = { } end modules ['v-xml'] = { + version = 1.001, + comment = "companion to v-xml.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local P, S, V, patterns = lpeg.P, lpeg.S, lpeg.V, lpeg.patterns + +local context = context +local verbatim = context.verbatim +local makepattern = visualizers.makepattern + +local XmlSnippet = context.XmlSnippet +local startXmlSnippet = context.startXmlSnippet +local stopXmlSnippet = context.stopXmlSnippet + +local XmlSnippetName = verbatim.XmlSnippetName +local XmlSnippetKey = verbatim.XmlSnippetKey +local XmlSnippetBoundary = verbatim.XmlSnippetBoundary +local XmlSnippetString = verbatim.XmlSnippetString +local XmlSnippetEqual = verbatim.XmlSnippetEqual +local XmlSnippetEntity = verbatim.XmlSnippetEntity +local XmlSnippetComment = verbatim.XmlSnippetComment +local XmlSnippetCdata = verbatim.XmlSnippetCdata + +local handler = visualizers.newhandler { + startinline = function() XmlSnippet(false,"{") end, + stopinline = function() context("}") end, + startdisplay = function() startXmlSnippet() end, + stopdisplay = function() stopXmlSnippet () end, + name = function(s) XmlSnippetName(s) end, + key = function(s) XmlSnippetKey(s) end, + boundary = function(s) XmlSnippetBoundary(s) end, + string = function(s) XmlSnippetString(s) end, + equal = function(s) XmlSnippetEqual(s) end, + entity = function(s) XmlSnippetEntity(s) end, + comment = function(s) XmlSnippetComment(s) end, + cdata = function(s) XmlSnippetCdata(s) end, +} + +local comment = P("--") +local name = (patterns.letter + patterns.digit + S('_-.'))^1 +local entity = P("&") * (1-P(";"))^1 * P(";") +local openbegin = P("<") +local openend = P("") + P(">") +local closeend = P(">") +local opencomment = P("") +local openinstruction = P("") +local opencdata = P("") + +local grammar = visualizers.newgrammar("default", { "visualizer", + sstring = + makepattern(handler,"string",patterns.dquote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.dquote))^0 + * makepattern(handler,"string",patterns.dquote), + dstring = + makepattern(handler,"string",patterns.squote) + * (V("whitespace") + makepattern(handler,"default",1-patterns.squote))^0 + * makepattern(handler,"string",patterns.squote), + entity = + makepattern(handler,"entity",entity), + name = + makepattern(handler,"name",name) + * ( + makepattern(handler,"default",patterns.colon) + * makepattern(handler,"name",name) + )^0, + key = + makepattern(handler,"key",name) + * ( + makepattern(handler,"default",patterns.colon) + * makepattern(handler,"key",name) + )^0, + attributes = ( + V("optionalwhitespace") + * V("key") + * V("optionalwhitespace") + * makepattern(handler,"equal",patterns.equal) + * V("optionalwhitespace") + * (V("dstring") + V("sstring")) + * V("optionalwhitespace") + )^0, + open = + makepattern(handler,"boundary",openbegin) + * V("name") + * V("optionalwhitespace") + * V("attributes") + * makepattern(handler,"boundary",closebegin), + close = + makepattern(handler,"boundary",openend) + * V("name") + * V("optionalwhitespace") + * makepattern(handler,"boundary",closeend), + comment = + makepattern(handler,"boundary",opencomment) + * (V("whitespace") + makepattern(handler,"comment",(1-closecomment)))^0 -- slow + * makepattern(handler,"boundary",closecomment), + cdata = + makepattern(handler,"boundary",opencdata) + * (V("whitespace") + makepattern(handler,"comment",(1-closecdata)))^0 -- slow + * makepattern(handler,"boundary",closecdata), + instruction = + makepattern(handler,"boundary",openinstruction) + * V("name") + * V("optionalwhitespace") + * V("attributes") + * V("optionalwhitespace") + * makepattern(handler,"boundary",closeinstruction), + + pattern = + V("comment") + + V("instruction") + + V("cdata") + + V("close") + + V("open") + + V("entity") + + V("space") + + V("line") + + V("default"), + + visualizer = + V("pattern")^1 +} ) + +local parser = P(grammar) + +visualizers.register("xml", { parser = parser, handler = handler, grammar = grammar } ) diff --git a/tex/context/base/v-xml.mkiv b/tex/context/base/v-xml.mkiv new file mode 100644 index 000000000..3cc9244f2 --- /dev/null +++ b/tex/context/base/v-xml.mkiv @@ -0,0 +1,71 @@ +%D \module +%D [ file=v-xml, +%D version=2010.10.19, +%D title=\CONTEXT\ Visualizer Macros, +%D subtitle=\XML, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright=PRAGMA-ADE] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\registerctxluafile{v-xml.lua}{1.001} + +\unprotect + +\definestartstop + [XmlSnippet] + [DefaultSnippet] + +\definestartstop + [XmlSnippetName] + [\c!color=darkblue, + \c!style=boldface] + +\definestartstop + [XmlSnippetKey] + [\c!color=darkgreen, + \c!style=boldface] + +\definestartstop + [XmlSnippetBoundary] + [\c!color=darkblue, + \c!style=boldface] + +\definestartstop + [XmlSnippetEntity] + [\c!color=darkred, + \c!style=boldface] + +\definestartstop + [XmlSnippetString] + [\c!color=darkblue, + \c!style=boldface] + +\definestartstop + [XmlSnippetEqual] + [\c!color=darkblue, + \c!style=boldface] + +\definestartstop + [XmlSnippetComment] + [\c!color=darkyellow, + \c!style=boldface] + +\definestartstop + [XmlSnippetCdata] + [\c!color=darkyellow, + \c!style=boldface] + +\definestartstop + [XmlSnippetInstruction] + [\c!color=darkyellow, + \c!style=boldface] + +\definetyping + [XML] + [\c!option=xml] + +\protect \endinput diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua index 0209d16bf..b50799488 100644 --- a/tex/generic/context/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 11/20/10 12:51:46 +-- merge date : 11/26/10 21:21:25 do -- begin closure to overcome local limits and interference @@ -143,6 +143,9 @@ local lpeg = require("lpeg") local type = type +-- Beware, we predefine a bunch of patterns here and one reason for doing so +-- is that we get consistent behaviour in some of the visualizers. + lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns @@ -159,19 +162,38 @@ local alwaysmatched = P(true) patterns.anything = anything patterns.endofstring = endofstring patterns.beginofstring = alwaysmatched +patterns.alwaysmatched = alwaysmatched local digit, sign = R('09'), S('+-') local cr, lf, crlf = P("\r"), P("\n"), P("\r\n") +local newline = crlf + cr + lf local utf8next = R("\128\191") local escaped = P("\\") * anything local squote = P("'") local dquote = P('"') +local space = P(" ") + +patterns.somecontent = (anything - newline - space)^1 +patterns.beginline = #(1-newline) + +local utfbom_32_be = P('\000\000\254\255') +local utfbom_32_le = P('\255\254\000\000') +local utfbom_16_be = P('\255\254') +local utfbom_16_le = P('\254\255') +local utfbom_8 = P('\239\187\191') +local utfbom = utfbom_32_be + utfbom_32_le + + utfbom_16_be + utfbom_16_le + + utfbom_8 +local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le" + + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le" + + utfbom_8 / "utf-8" + alwaysmatched / "unknown" patterns.utf8one = R("\000\127") patterns.utf8two = R("\194\223") * utf8next patterns.utf8three = R("\224\239") * utf8next * utf8next patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next -patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191') +patterns.utfbom = utfbom +patterns.utftype = utftype local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false) @@ -197,24 +219,30 @@ patterns.hexadecimal = P("0x") * R("09","AF","af")^1 patterns.lowercase = R("az") patterns.uppercase = R("AZ") patterns.letter = patterns.lowercase + patterns.uppercase -patterns.space = P(" ") +patterns.space = space patterns.tab = P("\t") patterns.spaceortab = patterns.space + patterns.tab patterns.eol = S("\n\r") patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto) -patterns.newline = crlf + cr + lf -patterns.nonspace = 1 - patterns.space +patterns.newline = newline +patterns.emptyline = newline^1 patterns.nonspacer = 1 - patterns.spacer patterns.whitespace = patterns.eol + patterns.spacer patterns.nonwhitespace = 1 - patterns.whitespace +patterns.equal = P("=") patterns.comma = P(",") patterns.commaspacer = P(",") * patterns.spacer^0 patterns.period = P(".") +patterns.colon = P(":") +patterns.semicolon = P(";") +patterns.underscore = P("_") patterns.escaped = escaped patterns.squote = squote patterns.dquote = dquote -patterns.undouble = (dquote/"") * ((escaped + (1-dquote))^0) * (dquote/"") -patterns.unsingle = (squote/"") * ((escaped + (1-squote))^0) * (squote/"") +patterns.nosquote = (escaped + (1-squote))^0 +patterns.nodquote = (escaped + (1-dquote))^0 +patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") +patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble patterns.unspacer = ((patterns.spacer^1)/"")^0 @@ -237,19 +265,6 @@ function lpeg.splitter(pattern, action) return (((1-P(pattern))^1)/action+1)^0 end -local spacing = patterns.spacer^0 * patterns.newline -- sort of strip -local empty = spacing * Cc("") -local nonempty = Cs((1-spacing)^1) * spacing^-1 -local content = (empty + nonempty)^1 - -local capture = Ct(content^0) - -function string.splitlines(str) - return match(capture,str) -end - -patterns.textline = content - local splitters_s, splitters_m = { }, { } local function splitat(separator,single) @@ -296,6 +311,35 @@ function string.split(str,separator) return match(c,str) end +local spacing = patterns.spacer^0 * newline -- sort of strip +local empty = spacing * Cc("") +local nonempty = Cs((1-spacing)^1) * spacing^-1 +local content = (empty + nonempty)^1 + +patterns.textline = content + +--~ local linesplitter = Ct(content^0) +--~ +--~ function string.splitlines(str) +--~ return match(linesplitter,str) +--~ end + +local linesplitter = Ct(splitat(newline)) + +patterns.linesplitter = linesplitter + +function string.splitlines(str) + return match(linesplitter,str) +end + +local utflinesplitter = utfbom^-1 * Ct(splitat(newline)) + +patterns.utflinesplitter = utflinesplitter + +function string.utfsplitlines(str) + return match(utflinesplitter,str) +end + --~ lpeg.splitters = cache -- no longer public local cache = { } @@ -8235,7 +8279,7 @@ local contextsetups = specifiers.contextsetups local contextnumbers = specifiers.contextnumbers local contextmerged = specifiers.contextmerged --- we cannot optimize with "start = first_character(head)" because then we don't +-- we cannot optimize with "start = first_glyph(head)" because then we don't -- know which rlmode we're in which messes up cursive handling later on -- -- head is always a whatsit so we can safely assume that head is not changed -- cgit v1.2.3