From 6312e2b2913bc7de6f3c0ba30b993e2b4714edf1 Mon Sep 17 00:00:00 2001
From: Hans Hagen
The next function associates a namespace prefix with an
This is a prelude to a more extensive logging module. For the sake
+of parsing log files, in addition to the standard logging we will
+provide an
This looks pretty ugly but we need to speed things up a bit.
+--ldx]]-- + +logs.levels = { + ['error'] = 1, + ['warning'] = 2, + ['info'] = 3, + ['debug'] = 4 +} + +logs.functions = { + 'error', 'warning', 'info', 'debug', 'report', + 'start', 'stop', 'push', 'pop' +} + +logs.callbacks = { + 'start_page_number', + 'stop_page_number', + 'report_output_pages', + 'report_output_log' +} + +logs.xml = logs.xml or { } +logs.tex = logs.tex or { } + +logs.level = 0 + +do + local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format + + if texlua then + write_nl = print + write = io.write + end + + function logs.xml.debug(category,str) + if logs.level > 3 then write_nl(format("The next variant has lazy token collecting, on a 140 page mk.tex this saves + about .25 seconds, which is understandable because we have no graphmes and + not collecting tokens is not only faster but also saves garbage collecting. +
+ --ldx]]-- + + function characters.filters.utf.collapse(str) -- not really tested (we could preallocate a table) + if cf.collapsing and str then if #str > 1 then - if not characters.filters.utf.initialized then -- saves a call - characters.filters.utf.initialize() + if not cf.initialized then -- saves a call + cf.initialize() end - local tokens, first, done = { }, false, false - for second in string.utfcharacters(str) do - if cr[second] then - if first then - tokens[#tokens+1] = first + local tokens, first, done, n = { }, false, false, 0 + for second in su(str) do + if done then + if cr[second] then + if first then + tokens[#tokens+1] = first + end + first = cr[second] + else + local cgf = cg[first] + if cgf and cgf[second] then + first = cgf[second] + elseif first then + tokens[#tokens+1] = first + first = second + else + first = second + end end - first, done = cr[second], true else - local cgf = cg[first] - if cgf and cgf[second] then - first, done = cgf[second], true - elseif first then - tokens[#tokens+1] = first - first = second + if cr[second] then + for s in su(str) do + if n == 0 then + break + else + tokens[#tokens+1], n = s, n - 1 + end + end + if first then + tokens[#tokens+1] = first + end + first, done = cr[second], true else - first = second + local cgf = cg[first] + if cgf and cgf[second] then + for s in su(str) do + if n == 0 then + break + else + tokens[#tokens+1], n = s, n -1 + end + end + first, done = cgf[second], true + else + first, n = second, n + 1 + end end end end if done then tokens[#tokens+1] = first - return table.concat(tokens,"") + return concat(tokens,"") -- seldom called end elseif #str > 0 then return cr[str] or str @@ -187,6 +268,53 @@ do return str end + --~ not faster (0.1 seconds on a 500 k collapsable file) + --~ + --~ local specials, initials = lpeg.P(false), "" + --~ for k,v in pairs(cr) do + --~ specials, initials = specials + lpeg.P(k)/v, initials .. k:sub(1,1) + --~ end + --~ specials = lpeg.Cs(lpeg.P((1-lpeg.S(initials)) + specials)^0) + --~ local graphemes = "" + --~ for _, v in pairs(cg) do + --~ for kk, _ in pairs(v) do + --~ graphemes = graphemes .. kk:sub(1,1) + --~ end + --~ end + --~ graphemes = lpeg.P{ lpeg.S(graphemes) + 1 * lpeg.V(1) } + --~ + --~ function characters.filters.utf.collapse(str) + --~ if cf.collapsing and str then + --~ if #str > 1 then + --~ str = specials:match(str) + --~ if graphemes:match(str) then + --~ if not cf.initialized then -- saves a call + --~ cf.initialize() + --~ end + --~ local tokens, first, done = { }, false, false + --~ for second in su(str) do + --~ local cgf = cg[first] + --~ if cgf and cgf[second] then + --~ first, done = cgf[second], true + --~ elseif first then + --~ tokens[#tokens+1] = first + --~ first = second + --~ else + --~ first = second + --~ end + --~ end + --~ if done then + --~ tokens[#tokens+1] = first + --~ return table.concat(tokens,"") + --~ end + --~ end + --~ elseif #str > 0 then + --~ return cr[str] or str + --~ end + --~ end + --~ return str + --~ end + end --[[ldx-- diff --git a/tex/context/base/colo-new.lua b/tex/context/base/colo-new.lua index 842e9c15a..b009c5e9b 100644 --- a/tex/context/base/colo-new.lua +++ b/tex/context/base/colo-new.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['colo-ini'] = { license = "see context related readme files" } +-- split_settings -> aux.settings_to_hash + -- for the moment this looks messy but we're waiting for a pdf backend interface -- -- code collected here will move and be adapted @@ -42,7 +44,7 @@ do function backends.pdf.registerspotcolorname(name,e) if e and e ~= "" then - tex.sprint(tex.ctxcatcodes,string.format(s_template_e,name,e)) + tex.sprint(tex.ctxcatcodes,string.format(s_template_e,name,e)) -- todo in new backend: e:gsub(" ","#20") end end @@ -149,7 +151,7 @@ do elseif kind == 4 then backend.registercmykspotcolor(parent,f,d,p,v[6],v[7],v[8],v[9]) end - backends.pdf.registerspotcolorname(name,e) + backends.pdf.registerspotcolorname(parent,e) end end @@ -473,18 +475,33 @@ end -- literals needed to inject code in the mp stream, we cannot use attributes there -- since literals may have qQ's -function ctx.pdfrgbliteral(model,r,g,b) - tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'rgb',r,g,b)))) -end -function ctx.pdfcmykliteral(model,c,m,y,k) - tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'cmyk',c,m,y,k)))) -end -function ctx.pdfgrayliteral(model,s) - tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'gray',s)))) -end -function ctx.pdfspotliteral(model,n,f,d,p) - tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'spot',n,f,d,p)))) -- incorrect -end -function ctx.pdftransparencyliteral(a,t) - tex.sprint(tex.ctxcatcodes,string.format("\\pdfliteral{/Tr%s gs}",transparencies.register(nil,a,t))) +do + + local format, sprint = string.format, tex.sprint + + local intransparency = false + + function ctx.pdfrgbliteral(model,r,g,b) + sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'rgb',r,g,b)))) + end + function ctx.pdfcmykliteral(model,c,m,y,k) + sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'cmyk',c,m,y,k)))) + end + function ctx.pdfgrayliteral(model,s) + sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'gray',s)))) + end + function ctx.pdfspotliteral(model,n,f,d,p) + sprint(tex.ctxcatcodes,format("\\pdfliteral{%s}",ctx.pdfcolor(model,colors.register('color',nil,'spot',n,f,d,p)))) -- incorrect + end + function ctx.pdftransparencyliteral(a,t) + intransparency = true + sprint(tex.ctxcatcodes,format("\\pdfliteral{/Tr%s gs}",transparencies.register(nil,a,t))) + end + function ctx.pdffinishtransparency() + if intransparency then + intransparency = false + sprint(tex.ctxcatcodes,"\\pdfliteral{/Tr0 gs}") -- we happen to know this -) + end + end + end diff --git a/tex/context/base/colo-new.mkii b/tex/context/base/colo-new.mkii index 9bef82710..ac8b86715 100644 --- a/tex/context/base/colo-new.mkii +++ b/tex/context/base/colo-new.mkii @@ -904,6 +904,12 @@ %D page color. This macro is used in the same way as %D \type {\color}. +\def\startregistercolor[#1]% + {\permitcolormodefalse\startcolor[#1]\permitcolormodetrue} + +\def\stopregistercolor + {\permitcolormodefalse\stopcolor\permitcolormodetrue} + \def\starttextcolor[#1]% {\doifsomething{#1} {\bgroup diff --git a/tex/context/base/colo-new.mkiv b/tex/context/base/colo-new.mkiv index e7f8dfd17..38cbd7339 100644 --- a/tex/context/base/colo-new.mkiv +++ b/tex/context/base/colo-new.mkiv @@ -106,6 +106,13 @@ \csname(ts:#1)\endcsname \fi\fi} +\let\normaldoactivatecolor\doactivatecolor + +\def\doactivatecolor + {\ctxlua{colors.enabled=true}% + \let\doactivatecolor\normaldoactivatecolor + \doactivatecolor} + \def\deactivatecolor {\doresetattribute\s!color \doresetattribute\s!transparency} diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index c97575baa..74d4173a3 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -19,27 +19,66 @@ \enablemode[mkiv] +% potential new defaults: + +% \setbreakpoints[compound] + \unprotect -\appendtoks - \ctxlua{garbagecollector.update()}% -\to \everyshipout +\ifx\clearmarks\undefined + \def\clearmarks {\begingroup\afterassignment\doclearmarks\scratchcounter} + \def\doclearmarks{\normalmarks\scratchcounter{}\endgroup} +\fi + +\def\resetmark#1% we cannot use \normalmarks#1{} + {\global\@EA\chardef\csname\@@mrk\string#1\endcsname\zerocount + \@EA\clearmarks\csname\@@prk\string#1\endcsname + \global\@EA\let\csname\@@trk\string#1\endcsname\empty + \global\@EA\let\csname\@@frk\string#1\endcsname\empty + \global\@EA\let\csname\@@brk\string#1\endcsname\empty + \global\@EA\let\csname\@@crk\string#1\endcsname\empty} + +% \appendtoks +% \ctxlua{garbagecollector.update()}% +% \to \everyshipout % texmf.instance will become just texmf +%D Since this can be a showstopper, we report the path at the beginning +%D as well as at the end of a run. + +\writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}} +\writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}} + +%D For the moment we report some statistics. Later this will become an option, +%D but for now we need this information. + \appendtoks + \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}}% + \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}% + \writestatus\m!lua{modules/dumps/instances - \ctxlua{tex.print((status.luabytecodes-500).."/"..input.storage.done.."/"..status.luastates)}}% \writestatus\m!lua{input load time - \ctxlua{input.loadtime(texmf.instance)} seconds}% \writestatus\m!lua{fonts load time - \ctxlua{input.loadtime(fonts)} seconds}% \writestatus\m!lua{xml load time - \ctxlua{input.loadtime(lxml)} seconds}% \writestatus\m!lua{mps conversion time - \ctxlua{input.loadtime(mptopdf)} seconds}% - \writestatus\m!lua{node processing time - \ctxlua{input.loadtime(nodes)} seconds}% + \writestatus\m!lua{node processing time - \ctxlua{input.loadtime(nodes)} seconds (including kernel)}% + \writestatus\m!lua{kernel processing time - \ctxlua{input.loadtime(kernel)} seconds}% \writestatus\m!lua{attribute processing time - \ctxlua{input.loadtime(attributes)} seconds}% - \writestatus\m!lua{used config path - \ctxlua{tex.print(caches.configpath(texmf.instance))}}% - \writestatus\m!lua{used cache path - \ctxlua{tex.print(caches.path)}}% - \writestatus\m!lua{modules/dumps/instances - \ctxlua{tex.print((status.luabytecodes-500).."/"..input.storage.done.."/"..status.luastates)}}% - \writestatus\m!lua{current memory usage - \ctxlua{tex.print(status.luastate_bytes)} bytes}% - \writestatus\m!lua{language load time - \ctxlua{input.loadtime(languages)} seconds (n=\ctxlua{tex.print(languages.n())})}% + \writestatus\m!lua{language load time - \ctxlua{input.loadtime(languages)} seconds (n=\ctxlua{tex.print(languages.hyphenation.n())})}% \writestatus\m!lua{loaded fonts - \ctxlua{tex.print(fonts.logger.report())}}% + \writestatus\m!lua{loaded patterns - \ctxlua{tex.print(languages.logger.report())}}% + \writestatus\m!lua{current memory usage - \ctxlua{tex.print(status.luastate_bytes)} bytes}% + \writestatus\m!lua{cleaned up reserved nodes - \ctxlua{ + tex.print(string.format("\letterpercent s nodes, \letterpercent s lists (of \letterpercent s)", nodes.cleanup_reserved(\number\topofboxstack))) + }}% +\to \everybye + +% \appendtoks +% \ctxlua{nodes.check_for_leaks()}% +% \to \everygoodbye + +\appendtoks + \writestatus{remark}{temporary fallback to base mode for tlig and trep}% end of font-otf.lua \to \everybye \def\resettimer {\ctxlua{environment.starttime = os.clock()}} @@ -49,10 +88,11 @@ %D For me. \def\traceluausage + {\dosingleempty\dotraceluausage} + +\def\dotraceluausage[#1]% {\ctxlua{debugger.enable()}% - \appendtoks - \ctxlua{debugger.disable() debugger.showstats(texio.write,5000)}% - \to \everybye} + \appendtoks\ctxlua{debugger.disable() debugger.showstats(print,\doifnumberelse{#1}{#1}{5000})}\to\everybye} %D Fonts (experimental AFM loading} @@ -109,9 +149,9 @@ \definestartstop[randomized][\c!before=\dosetattribute{case}\plusfour,\c!after=] -\def\WORD{\groupedcommand{\dosetattribute{case}\plusone }{}} -\def\word{\groupedcommand{\dosetattribute{case}\plustwo }{}} -\def\Word{\groupedcommand{\dosetattribute{case}\plusthree}{}} % \plusfour +\def\WORD{\groupedcommand{\setcharactercasing[\plusone ]}{}} +\def\word{\groupedcommand{\setcharactercasing[\plustwo ]}{}} +\def\Word{\groupedcommand{\setcharactercasing[\plusthree]}{}} % \plusfour \let\WORDS\WORD \let\words\word @@ -121,3 +161,17 @@ % \expanded{\defineactivecharacter \number"2000E} {\textdir TRT\relax} % \expanded{\defineactivecharacter \number"2000F} {\textdir TLT\relax} + +\startluacode + local ss = { } + function ctx.writestatus(a,b) + local s = ss[a] + if not ss[a] then + s = a:rpadd(15) .. ":" + ss[a] = s + end + texio.write_nl(s .. b) + end +\stopluacode + +\def\writestatus#1#2{\ctxlua{ctx.writestatus([[#1]],[[#2]])}} diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex index 03b7ed27c..9e3a4eb37 100644 --- a/tex/context/base/cont-new.tex +++ b/tex/context/base/cont-new.tex @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2007.09.28 11:58} +\newcontextversion{2007.12.05 13:56} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new @@ -53,7 +53,7 @@ \def\floatsetupcontent {\copy\nextbox}% \def\floatsetupwidth {\wd\nextbox}% \def\floatsetupheight {\ht\nextbox}% - \def\placesetupfloat[##1]{\placefloat[##1][#2][#3]{\floatsetupcaption}{\floatsetupcontent}}% + \def\placesetupfloat[##1]{\placefloat[##1][#2][#3]{#4}{\floatsetupcontent}}% #4 and not \floatsetupcaption (unexpanded) \dowithnextbox{\setups[#1]}\vbox} \chardef\baselinegridmode=0 % option in layout / 1=permit_half_lines @@ -1040,13 +1040,15 @@ \let\normaltype\type -\beginTEX - \unexpanded\def\retype#1{\bgroup\convertargument#1\to\ascii\@EA\normaltype\@EA{\ascii}\egroup} -\endTEX - -\beginETEX - \unexpanded\def\retype#1{\scantokens{\normaltype{#1}\ignorespaces}} -\endETEX +\ifx\scantextokens\undefined + \ifx\scantokens\undefined + \unexpanded\def\retype#1{\bgroup\convertargument#1\to\ascii\@EA\normaltype\@EA{\ascii}\egroup} + \else + \unexpanded\def\retype#1{\scantokens{\normaltype{#1}\ignorespaces}\relax} + \fi +\else + \unexpanded\def\retype#1{\scantextokens{\normaltype{#1}}} +\fi \def\simplifytype{\let\type\retype} diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex index 2c807f93c..c43b89b72 100644 --- a/tex/context/base/context.tex +++ b/tex/context/base/context.tex @@ -42,7 +42,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2007.09.28 11:58} +\edef\contextversion{2007.12.05 13:56} %D For those who want to use this: @@ -372,7 +372,8 @@ \loadcorefile{page-lyr.tex} \loadcorefile{page-mak.tex} \loadcorefile{page-num.tex} -\loadcorefile{page-lin.tex} +\loadmarkfile{page-lin} +\loadcorefile{page-par.tex} \loadcorefile{page-mar.tex} \loadcorefile{core-job.tex} % why so late? diff --git a/tex/context/base/core-buf.lua b/tex/context/base/core-buf.lua index 6277a95ed..081655a72 100644 --- a/tex/context/base/core-buf.lua +++ b/tex/context/base/core-buf.lua @@ -162,13 +162,13 @@ function buffers.inspect(name) if v == "" then tex.sprint(tex.ctxcatcodes,"[crlf]\\par ") else - tex.sprint(tex.ctxcatcodes,(string.gsub("(.)",function(c) + tex.sprint(tex.ctxcatcodes,(buffers.data[name]:gsub("(.)",function(c) return " [" .. string.byte(c) .. "] " end)) .. "\\par") end end else - tex.sprint(tex.ctxcatcodes,(string.gsub(buffers.data[name],"(.)",function(c) + tex.sprint(tex.ctxcatcodes,(buffers.data[name]:gsub("(.)",function(c) return " [" .. string.byte(c) .. "] " end))) end @@ -354,7 +354,7 @@ buffers.open_nested = string.rep("\\char"..string.byte('<').." ",2) buffers.close_nested = string.rep("\\char"..string.byte('>').." ",2) function buffers.replace_nested(result) - return (string.gsub(string.gsub(result,buffers.open_nested,"{"),buffers.close_nested,"}")) + return (string.gsub(result:gsub(buffers.open_nested,"{"),buffers.close_nested,"}")) end function buffers.flush_result(result,nested) diff --git a/tex/context/base/core-des.tex b/tex/context/base/core-des.tex index d0152fddc..f11721c96 100644 --- a/tex/context/base/core-des.tex +++ b/tex/context/base/core-des.tex @@ -828,9 +828,7 @@ \def\do@@label[#1][#2]% {\numberparameter{#1}\c!before \numberparameter{#1}\c!command - {\doattributes{\@@thenumber{#1}}\c!headstyle\c!headcolor - {\dotextprefix{\numberparameter{#1}\c!text}% - \getvalue{\e!next#1}[#2]}}% + {\doattributes{\@@thenumber{#1}}\c!headstyle\c!headcolor{\getvalue{\e!next#1}[#2]}}% \numberparameter{#1}\c!after}% \def\do@@nextlabel[#1][#2]% diff --git a/tex/context/base/core-fig.tex b/tex/context/base/core-fig.tex index 104b753ec..27825ba5c 100644 --- a/tex/context/base/core-fig.tex +++ b/tex/context/base/core-fig.tex @@ -1002,6 +1002,14 @@ \def\dogetfiguredimensionsonly[#1][#2]% {\dogetfiguredimensions[#1][#2]% \doresetobjects} + +\def\doiffigureelse#1% + {\getfiguredimensions[#1]% + \ifcase\figurewidth + \expandafter\secondoftwoarguments + \else + \expandafter\firstoftwoarguments + \fi} %D Size determination. %D @@ -1408,7 +1416,7 @@ \def\doexternalfigure[#1][#2][#3]% [label][file][settings] | [file][settings] | [file][parent][settings] {\bgroup \doifelsenothing{#1} - {\framed[\c!width=\defaultfigurewidth,\c!height=\defaultfigureheight]{external\\figure}} + {\framed[\c!width=\defaultfigurewidth,\c!height=\defaultfigureheight]{external\\figure\\no name}} {\doifundefinedelse{\??ef\??ef#1} {\useexternalfigure[\s!dummy][#1][#2][#3]% \getvalue{\??ef\??ef\s!dummy}[]} % [] is dummy arg 5 diff --git a/tex/context/base/core-itm.tex b/tex/context/base/core-itm.tex index 42d45a5df..97b102e4e 100644 --- a/tex/context/base/core-itm.tex +++ b/tex/context/base/core-itm.tex @@ -214,7 +214,7 @@ \def\doinitializeitemgrouplevel#1% {\copyparameters [\??op\currentitemgroup#1][\??oo] - [\c!width,\c!factor,\c!distance,\c!align,\c!option, + [\c!width,\c!factor,\c!distance,\c!align,\c!symalign,\c!option, \c!style,\c!marstyle,\c!symstyle,\c!headstyle, \c!color,\c!marcolor,\c!symcolor,\c!headcolor, \c!beforehead,\c!afterhead,\c!before,\c!inbetween,\c!after, @@ -606,6 +606,16 @@ \ifx\startcolumns\undefined \def\startcolumns[#1]{} \fi \ifx\stopcolumns \undefined \let\stopcolumns\relax \fi +\def\dosetsymalign#1% hm, we should use one of the core-spa macros or make a helper + {\processaction + [#1] + [ \v!flushleft=>\let\symalignleft\relax, + \v!right=>\let\symalignleft\relax, + \v!flushright=>\let\symalignleft\hfill, + \v!left=>\let\symalignleft\hfill, + \v!middle=>\let\symalignleft\hfil, + \v!center=>\let\symalignleft\hfil]} + \def\redostartitemgroup[#1][#2]% {\setfalse\inlinelistitem % new, no indent (leftskip) \setfalse\concatnextitem % new, concat @@ -642,6 +652,7 @@ \let\marsymbol\relax \globallet\doitemdestination\empty \let\symsymbol\empty + \let\symalignleft\relax \the\itemgroupcommands \checkcurrentnofitems % \getitemparameter\itemlevel\empty @@ -672,6 +683,7 @@ \doadaptrightskip{\getitemparameter1\c!rightmargin}% \fi \dosetraggedcommand{\getitemparameter\itemlevel\c!align}\raggedcommand + \dosetsymalign{\getitemparameter\itemlevel\c!symalign}% \doifsomething{\getitemparameter\itemlevel\c!indenting} {% is \expanded needed? \expanded{\setupindenting[\getitemparameter\itemlevel\c!indenting]}}% @@ -772,11 +784,17 @@ \dontrechecknextindentation \fi \fi - \endgroup - \doglobal\decrement(\itemlevel,\itemincrement)% - \egroup - % new needed in sidefloats (surfaced in volker's proceedings) - \ifconditional\textlistitem\else\par\fi + % new test, needed in sidefloats (surfaced in volker's proceedings) + \ifconditional\textlistitem % else forgotten + \endgroup + \doglobal\decrement(\itemlevel,\itemincrement)% + \egroup + \else + \endgroup + \doglobal\decrement(\itemlevel,\itemincrement)% + \egroup + \par + \fi \dorechecknextindentation} \newtoks\itemgroupcommands @@ -917,7 +935,7 @@ \else \scratchdimen\z@ \fi - \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}% + \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hss}}% was: \hfill \hskip\scratchdimen} \def\optimizelistitemsbreak @@ -998,12 +1016,17 @@ \ifconditional\textlistitem \hbox{\ifconditional\sublistitem+\fi\box8\hskip\interwordspace}\nobreak \else\ifconditional\inlinelistitem - \hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}% + \hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hss}% was: \hfill \else\ifconditional\txtlistitem \dodotxtitem \else % todo: align+marge binnen de hbox - \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}% +% \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi\box8\hfill}}% + \llap{\hbox to \dimen0{\ifconditional\sublistitem\llap{+}\fi + \symalignleft + \box8\hfil + \hskip\getitemparameter\itemlevel\c!distance% T h + }}% \fi\fi\fi \fi \forceunexpanded % needed for m conversion (\os) / i need to look into this @@ -1249,6 +1272,7 @@ \c!distance=.5em, %\c!align=\v!normal, % definitely not \v!normal, see mails and \c!align=, % debug reports of David A & Patrick G on context list + \c!symalign=, \c!color=, \c!indenting=, % untouched if empty \c!color=, diff --git a/tex/context/base/core-lst.tex b/tex/context/base/core-lst.tex index 7c26b97f7..b73ed388a 100644 --- a/tex/context/base/core-lst.tex +++ b/tex/context/base/core-lst.tex @@ -278,7 +278,7 @@ \c!textstyle,\c!textcolor,\c!textcommand, \c!pagestyle,\c!pagecommand,\c!pagecolor, \c!numberstyle,\c!numbercolor,\c!numbercommand, -\c!headnumber, + \c!headnumber, \c!pagenumber,\c!pageboundaries,\c!margin,\c!symbol,\c!limittext, \c!aligntitle,\c!before,\c!after,\c!inbetween,\v!part\c!number,\c!label]% \getparameters[\??li#1][#3]}}% @@ -802,24 +802,45 @@ \midaligned {}} +% \def\dodofixdlistelementEFG#1#2#3#4#5#6#7#8% keep this one here as reference +% {\noindent +% \hbox +% {#1% in case E nils the strut +% \let\\=\newlineinlist +% \setbox0\hbox +% {#2{\showcontrastlocation\??ia{#8}% +% {\dostartlistattributes\c!style\c!color\empty +% \ignorespaces\dontconvertfont\setstrut +% \begstrut +% %\doifelsenothing{\listparameter\c!maxwidth} +% % {\listparameter\c!textcommand{#6}} +% % {\listparameter\c!textcommand{\limitatetext{#6}{\listparameter\c!maxwidth}{\unknown}}}% +% \limitatedlistentry{#6}% +% \endstrut % struts new +% \dostoplistattributes}}}% +% \linklisttoelement{#4}{#7}{#8}{\box0}}%{\copy0}}% +% \par % should be an option +% \listparameter\c!inbetween} + \def\dodofixdlistelementEFG#1#2#3#4#5#6#7#8% {\noindent - \hbox - {#1% in case E nils the strut - \let\\=\newlineinlist - \setbox0\hbox - {#2{\showcontrastlocation\??ia{#8}% - {\dostartlistattributes\c!style\c!color\empty - \ignorespaces\dontconvertfont\setstrut - \begstrut - %\doifelsenothing{\listparameter\c!maxwidth} - % {\listparameter\c!textcommand{#6}} - % {\listparameter\c!textcommand{\limitatetext{#6}{\listparameter\c!maxwidth}{\unknown}}}% - \limitatedlistentry{#6}% - \endstrut % struts new - \dostoplistattributes}}}% - \linklisttoelement{#4}{#7}{#8}{\box0}}%{\copy0}}% - \par % should be an option + \bgroup + \def\makelistelement##1##2% isolated by Wolfgang Schuster + {\doifelse{\listparameter\c!interaction}{##1} + {#2{##2}} + {\setbox0\hbox{#2{\showcontrastlocation\??ia{#8}{##2}}}% + \linklisttoelement{#4}{#7}{#8}{\box0}}}% + \makelistelement\v!no + {\let\\=\newlineinlist + #1% in case E nils the strut (still needed?) + \dostartlistattributes\c!style\c!color\empty + \ignorespaces\dontconvertfont\setstrut + \begstrut + \limitatedlistentry{#6}% + \endstrut + \dostoplistattributes}% + \egroup + \par \listparameter\c!inbetween} % better: diff --git a/tex/context/base/core-mis.tex b/tex/context/base/core-mis.tex index 9d3682d69..8459caab2 100644 --- a/tex/context/base/core-mis.tex +++ b/tex/context/base/core-mis.tex @@ -1335,6 +1335,14 @@ \c!left={\symbol[\c!leftquote]}, \c!right={\symbol[\c!rightquote]}] +\definedelimitedtext + [\v!blockquote][\v!quotation] + +\setupdelimitedtext + [\v!blockquote] + [\c!left=, + \c!right=] + \definedelimitedtext [\v!speech][\v!quotation] @@ -2605,7 +2613,7 @@ \def\dorotatenextbox#1#2% {\doifsomething{#1} - {\edef\@@rorotation{\number#1}% get rid of leading zeros and spaces + {\edef\@@rorotation{\realnumber{#1}}% get rid of leading zeros and spaces \setbox\nextbox\vbox{\flushnextbox}% not really needed \dodorotatenextbox\@@rorotation#2}% \hbox{\boxcursor\flushnextbox}} diff --git a/tex/context/base/core-new.tex b/tex/context/base/core-new.tex index 9155c9dab..1dd989c37 100644 --- a/tex/context/base/core-new.tex +++ b/tex/context/base/core-new.tex @@ -33,10 +33,6 @@ \def\dosetupsB[#1]{\cleanuplabel{#1}\processcommacommand[\cleanlabel]\dosetups} % [..] \def\dosetupsC[#1]{\cleanuplabel{#1}\dosetups\cleanlabel} % [..] -% \def\dosetups#1% the grid option will be extended to other main modes -% {\executeifdefined{\??su\ifgridsnapping\v!grid\fi:#1} -% {\executeifdefined{\??su :#1}\empty}} - \def\dosetups#1% the grid option will be extended to other main modes {\executeifdefined{\??su\ifgridsnapping\v!grid\fi:#1} {\executeifdefined{\??su :#1}\gobbleoneargument}\empty} % takes one argument @@ -44,6 +40,20 @@ \def\setupwithargument#1% the grid option will be extended to other main modes {\executeifdefined{\??su:#1}\gobbleoneargument} +% somehow fails ... +% +% \letvalue{\??su:..}\gobbleoneargument +% +% \def\dosetups#1% the grid option will be extended to other main modes +% {\csname \??su +% \ifcsname\??su\ifgridsnapping\v!grid\fi:#1\endcsname\v!grid:#1\else +% \ifcsname\??su :#1\endcsname :#1\else +% :..\fi\fi +% \endcsname\empty} % takes one argument +% +% \def\setupwithargument#1% the grid option will be extended to other main modes +% {\csname\??su:\ifcsname\??su:#1\endcsname#1\else..\fi\endcsname} + \let\directsetup\dosetups \def\doifsetupselse#1% to be done: grid @@ -54,7 +64,7 @@ \def\startsetups {\xxstartsetups\plusone \stopsetups } \let\stopsetups \relax \def\startlocalsetups{\xxstartsetups\plusone \stoplocalsetups} \let\stoplocalsetups\relax \def\startrawsetups {\xxstartsetups\zerocount\stoprawsetups } \let\stoprawsetups \relax -\def\startxmlsetups {\xxstartsetups\plustwo\stopxmlsetups } \let\stopxmlsetups \relax +\def\startxmlsetups {\xxstartsetups\plustwo \stopxmlsetups } \let\stopxmlsetups \relax \def\xxstartsetups#1#2% {\begingroup\chardef\setupseolmode#1\doifnextcharelse[{\startsetupsA#2}{\startsetupsB#2}} diff --git a/tex/context/base/core-ntb.tex b/tex/context/base/core-ntb.tex index a98609d9a..a57739c8b 100644 --- a/tex/context/base/core-ntb.tex +++ b/tex/context/base/core-ntb.tex @@ -23,6 +23,19 @@ %D optimizations were rejected in order not to complicate this %D module too much (and in order to prevail extensibility). +% \starttext +% \placefigure[left]{}{} +% \startlinecorrection \dontleavehmode \bTABLE +% \bTR \bTD oeps \eTD \eTR +% \eTABLE +% \stoplinecorrection +% \placefigure[right]{}{} +% \startlinecorrection \dontleavehmode \bTABLE +% \bTR \bTD oeps \eTD \eTR +% \eTABLE +% \stoplinecorrection +% \stoptext + %D To Do: %D %D \starttyping diff --git a/tex/context/base/core-pgr.tex b/tex/context/base/core-pgr.tex index ce7fb2459..1a4508b7f 100644 --- a/tex/context/base/core-pgr.tex +++ b/tex/context/base/core-pgr.tex @@ -498,6 +498,7 @@ gridtype=0, linetype=1, filltype=1, + dashtype=0, %snaptops=true, % not that nice: true/false gridcolor=red, linecolor=blue, @@ -506,7 +507,8 @@ linewidth=\linewidth, gridwidth=\linewidth, gridshift=\!!zeropoint, - lineradius=.5\bodyfontsize] + lineradius=.5\bodyfontsize, + dashtype=1] \startuseMPgraphic{mpos:par:shape} \iftracepositions show_par \else draw_par \fi ; @@ -516,6 +518,7 @@ boxgridtype := \MPvar{gridtype} ; boxlinetype := \MPvar{linetype} ; boxfilltype := \MPvar{filltype} ; + boxdashtype := \MPvar{dashtype} ; boxgridcolor := \MPvar{gridcolor} ; boxlinecolor := \MPvar{linecolor} ; boxfillcolor := \MPvar{fillcolor} ; @@ -620,7 +623,7 @@ \newcounter\textbackgrounddepth \appendtoks - \savecurrentvalue\totalnofparbackgrounds\nofparbackgrounds + \expanded{\savecurrentvalue\noexpand\totalnofparbackgrounds{\number\nofparbackgrounds}}% \to \everybye \appendtoks @@ -628,7 +631,7 @@ \to \everystarttext \ifx\totalnofparbackgrounds\undefined \newcounter\totalnofparbackgrounds \fi -\ifx\nofparbackgrounds \undefined \newcounter\nofparbackgrounds \fi +\ifx\nofparbackgrounds \undefined \newcount \nofparbackgrounds \fi \def\initializeparbackgrounds {\ifcase\totalnofparbackgrounds\else @@ -684,20 +687,18 @@ \def\dostarttextbackground[#1][#2]% {\checktextbackgrounds \def\currenttextbackground{#1}% - \doglobal\increment\nofparbackgrounds - %\edef\currentparbackground{background:\nofparbackgrounds}% - \edef\currentparbackground{pbg:\nofparbackgrounds}% - \bgroup - \increment\nofparbackgrounds - %\xdef\nextparbackground{background:\nofparbackgrounds}% - \xdef\nextparbackground{pbg:\nofparbackgrounds}% - \egroup + \global\advance\nofparbackgrounds\plusone + \edef\currentparbackground{pbg:\number\nofparbackgrounds}% +% \bgroup +% \advance\nofparbackgrounds\plusone +% \xdef\nextparbackground{pbg:\number\nofparbackgrounds}% +% \egroup + \xdef\nextparbackground{pbg:\number\numexpr\nofparbackgrounds+\plusone\relax}% still xdef ? % todo : \synchonizepositionpage{b:\currentparbackground}{s:\currentparbackground}% \setuptextbackground[#1][#2]% \let\dodostarttextbackground\relax \let\dodostoptextbackground \relax - \doif{\textbackgroundparameter\c!state}\v!start - {\dopresettextbackground{#1}}% + \doif{\textbackgroundparameter\c!state}\v!start{\dopresettextbackground{#1}}% \dodostarttextbackground} % todo \backgroundvariable\c!variant @@ -737,6 +738,7 @@ gridtype=\textbackgroundparameter\c!alternative, filltype=\textbackgroundparameter\c!background, linetype=\textbackgroundparameter\c!frame, + dashtype=\textbackgroundparameter{dash}, % to be internationalized gridcolor=\textbackgroundparameter\c!framecolor, linecolor=\textbackgroundparameter\c!framecolor, fillcolor=\textbackgroundparameter\c!backgroundcolor, @@ -946,7 +948,7 @@ \copyparameters[\??td#1][\??td] [\c!state,\c!location,\c!alternative,\c!mp,\c!method, \c!background,\c!backgroundcolor,\c!corner,\c!level, - \c!backgroundoffset,\c!before,\c!after,\c!align, + \c!backgroundoffset,\c!before,\c!after,\c!align,dash, % dash not yet internationalized \c!radius,\c!frame,\c!framecolor,\c!rulethickness,\c!voffset, \c!leftoffset,\c!rightoffset,\c!topoffset,\c!bottomoffset]% \getparameters[\??td#1][#2]% @@ -994,6 +996,7 @@ \c!level=-1, \c!alternative=0, \c!align=, + dash=0, % to be internationalized \c!background=\v!color, \c!backgroundcolor=lightgray, \c!backgroundoffset=\!!zeropoint, @@ -1254,90 +1257,46 @@ \newif\ifrepositionmarginbox \repositionmarginboxtrue -\newcounter\currentmarginpos - -% \def\dopositionmarginbox#1% -% {\bgroup -% \ifrepositionmarginbox -% \doglobal\increment\currentmarginpos -% \setposition{\s!margin:\currentmarginpos}% -% \scratchdimen=\MPy{\s!margin:\currentmarginpos}% -% \doglobal\increment\currentmarginpos -% \advance\scratchdimen by -\MPy{\s!margin:\currentmarginpos}% -% \advance\scratchdimen by -\strutdp -% \setbox#1=\hbox -% {\setposition{\s!margin:\currentmarginpos}\raise\scratchdimen\box#1}% -% \dp#1=\!!zeropoint -% \ht#1=\!!zeropoint -% \fi -% \vadjust{\box#1}% -% \egroup} - -% \def\dopositionmarginbox#1% how about page boundaries ! -% {\bgroup -% \ifrepositionmarginbox -% \doglobal\increment\currentmarginpos -% \setposition{\s!margin:\currentmarginpos}% -% \scratchdimen\MPy{\s!margin:\currentmarginpos}% -% \doglobal\increment\currentmarginpos -% \advance\scratchdimen -\MPy{\s!margin:\currentmarginpos}% -% \advance\scratchdimen -\strutdp -% % new -% \setbox#1\hbox -% {\hskip-\MPx{\s!margin:\currentmarginpos}% -% \hskip\MPx{head:\realfolio}% -% \box#1}% -% % so far -% \setbox#1\hbox -% {\setposition{\s!margin:\currentmarginpos}% -% \raise\scratchdimen\box#1}% -% \dp#1\zeropoint -% \ht#1\zeropoint -% \fi -% \graphicvadjust{\box#1}% -% \egroup} +\newcount\currentmarginpos \def\dopositionmarginbox#1% {\bgroup \ifrepositionmarginbox - \doglobal\increment\currentmarginpos - \setposition{\s!margin:\currentmarginpos}% + \global\advance\currentmarginpos\plusone + \setposition{\s!margin:\number\currentmarginpos}% \ifcase\marginrepositionmethod % nothing \or % nothing \or % stack / page check yet untested -% \scratchcounter\MPp{\s!margin:\currentmarginpos}\relax - \scratchdimen\MPy{\s!margin:\currentmarginpos}% - \doglobal\increment\currentmarginpos - \advance\scratchdimen -\MPy{\s!margin:\currentmarginpos}% + \scratchdimen\MPy{\s!margin:\number\currentmarginpos}% + \global\advance\currentmarginpos\plusone + \advance\scratchdimen -\MPy{\s!margin:\number\currentmarginpos}% \advance\scratchdimen -\strutdp -% \ifnum\scratchcounter=\MPp{\s!margin:\currentmarginpos}\relax % new \setbox#1\hbox - {\hskip-\MPx{\s!margin:\currentmarginpos}% + {\hskip-\MPx{\s!margin:\number\currentmarginpos}% \hskip\MPx{head:\realfolio}% \box#1}% % so far \setbox#1\hbox - {\setposition{\s!margin:\currentmarginpos}% + {\setposition{\s!margin:\number\currentmarginpos}% \raise\scratchdimen\box#1}% -% \fi \or % move up - \ifnum\MPp{p:\parposcounter}=\MPp{\s!margin:\currentmarginpos}\relax - \scratchdimen\dimexpr\MPy{p:\parposcounter}-\MPy{\s!margin:\currentmarginpos}\relax + \ifnum\MPp{p:\number\parposcounter}=\MPp{\s!margin:\number\currentmarginpos}\relax + \scratchdimen\dimexpr\MPy{p:\number\parposcounter}-\MPy{\s!margin:\number\currentmarginpos}\relax \expanded{\setbox#1\hbox{\raise\scratchdimen\box#1}\ht#1\the\ht#1\dp#1\the\dp#1}% \fi \or % move up, assume end of par - \ifnum\MPp{p:\parposcounter}=\MPp{\s!margin:\currentmarginpos}\relax + \ifnum\MPp{p:\number\parposcounter}=\MPp{\s!margin:\number\currentmarginpos}\relax \getnoflines\margincontentheight \advance\noflines\minusone \scratchdimen\noflines\lineheight \else - \scratchdimen\dimexpr\MPy{p:\parposcounter}-\MPy{\s!margin:\currentmarginpos}\relax + \scratchdimen\dimexpr\MPy{p:\number\parposcounter}-\MPy{\s!margin:\number\currentmarginpos}\relax \fi \expanded{\setbox#1\hbox{\raise\scratchdimen\box#1}\ht#1\the\ht#1\dp#1\the\dp#1}% \fi diff --git a/tex/context/base/core-pos.tex b/tex/context/base/core-pos.tex index 6b0e103fd..ff88efdf8 100644 --- a/tex/context/base/core-pos.tex +++ b/tex/context/base/core-pos.tex @@ -116,11 +116,11 @@ %D For postprocessing purposes, we save the number of %D positions. -\newcounter\currentpositions % current number of positions +\newcount\currentpositions % current number of positions \newcounter\totalnofpositions % total from previous run \appendtoks - \savecurrentvalue\totalnofpositions\currentpositions + \expanded{\savecurrentvalue\noexpand\totalnofpositions{\the\currentpositions}}% \to \everybye %D The next switch can be used to communicate a special @@ -239,7 +239,7 @@ {\printpaperwidth }% {\printpaperheight}% \fi - \doglobal\increment\currentpositions} + \global\advance\currentpositions\plusone} \def\setpositiononly#1% {\iftrialtypesetting @@ -297,7 +297,7 @@ \def\setpositiondataplus#1#2#3#4#5% {\iftrialtypesetting \else \initializenextposition - \hbox to \nextboxwd + \hbox % bug: to \nextboxwd {\edef\currentposition{#1}% \dosetpositionplus\currentposition {\the\dimexpr#2\relax}% @@ -451,20 +451,20 @@ \def\epos#1{\removelastspace\hpos{e:#1}{\strut}} \def\fpos#1% - {\setpositionplus{b:#1}\parposcounter\horizontalstrut + {\setpositionplus{b:#1}{\number\parposcounter}\horizontalstrut \ignorespaces} \def\tpos#1% {\removelastspace - \setpositionplus{e:#1}\parposcounter\horizontalstrut} + \setpositionplus{e:#1}{\number\parposcounter}\horizontalstrut} \def\ffpos#1% - {\setpositionplus{b:#1}\parposcounter\horizontalstrut\wpos{#1}% + {\setpositionplus{b:#1}{\number\parposcounter}\horizontalstrut\wpos{#1}% \ignorespaces} \def\ttpos#1% {\removelastspace - \setpositionplus{e:#1}\parposcounter\horizontalstrut} + \setpositionplus{e:#1}{\number\parposcounter}\horizontalstrut} \def\wpos#1% {\dontleavehmode\vadjust % may disappear if buried @@ -503,7 +503,7 @@ %D of them. This mechanism is activated automatically %D based on information collected in the previous pass. -\newcounter\parposcounter +\newcount\parposcounter \newif\ifpositioningpar @@ -523,12 +523,12 @@ \chardef\parposstrut=1 % 0 => no strut data, so fall backs used \def\doregisterparoptions - {\doglobal\increment\parposcounter + {\global\advance\parposcounter\plusone \begingroup \leftskip 1\leftskip \rightskip1\rightskip \setpositiondataplus - {p:\parposcounter}% % identifier + {p:\number\parposcounter}% identifier {\the\zeropoint}% {\the\strutht}% {\the\strutdp}% diff --git a/tex/context/base/core-reg.mkiv b/tex/context/base/core-reg.mkiv index d7dc9a9cb..f4c2cc64c 100644 --- a/tex/context/base/core-reg.mkiv +++ b/tex/context/base/core-reg.mkiv @@ -36,13 +36,13 @@ \doglobal\addtocommalist{#1}\allregisters} \def\mksaveregisterentry#1#2#3#4#5#6#7% class type reference key entry pagespec realpage - {\expanded{\writeutilitytua{table.insert(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}} + {\expanded{\writeutilitytua{ti(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}} \def\mksaveregistersee#1#2#3#4#5#6#7% class type reference key entry see pagespec - {\expanded{\writeutilitytua{table.insert(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}} + {\expanded{\writeutilitytua{ti(jr['#1'],{'#2','#3',\!!bs#4\!!es,\!!bs#5\!!es,'#6','#7'})}}} \def\mksaveregistervariable#1#2#3% class type value - {\expanded{\immediatewriteutilitytua{table.insert(jr['#1'],{'#2','#3'})}}} + {\expanded{\immediatewriteutilitytua{ti(jr['#1'],{'#2','#3'})}}} % Beware, we have no filename support here. For that we need to save the resulting % tex code in a file. No big deal. diff --git a/tex/context/base/core-spa.lua b/tex/context/base/core-spa.lua index 1d8616c3f..9b7486722 100644 --- a/tex/context/base/core-spa.lua +++ b/tex/context/base/core-spa.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['core-spa'] = { license = "see context related readme files" } +-- todo: test without unset + -- vertical space handler nodes.snapvalues = { } @@ -20,8 +22,6 @@ do local kern, glue, penalty, hlist = node.id('kern'), node.id('glue'), node.id('penalty'), node.id('hlist') - local penalty_node = node.new('penalty') - local has_attribute = node.has_attribute local has_field = node.has_field @@ -55,7 +55,7 @@ do -- alignment box begin_of_par vmode_par hmode_par insert penalty before_display after_display function nodes.is_display_math(head) - n = head.prev + local n = head.prev while n do local id = n.id if id == penalty then @@ -86,18 +86,6 @@ do -- helpers - function nodes.snapline(current,where) - local sn = has_attribute(current.list,snap_category) - if sn then - local sv = nodes.snapvalues[sn] - if sv then - local height, depth, lineheight = sv[1], sv[2], sv[3] - current.height = math.ceil((current.height-height)/lineheight)*lineheight + height - current.depth = math.ceil((current.depth -depth )/lineheight)*lineheight + depth - end - end - end - -- local free = node.free local line_skip = 1 @@ -112,6 +100,7 @@ do local function collapser(head,where) if head and head.next then + input.starttiming(nodes) local trace = nodes.trace_collapse local current, tail = head, nil local glue_order, glue_data = 0, nil @@ -213,8 +202,19 @@ do if trace then trace_done("before",glue_data) end glue_order, glue_data = 0, nil end - if id == hlist and where == 'hmode_par' and current.list then - nodes.snapline(current,where) -- will be inline later + if id == hlist and where == 'hmode_par' then + local list = current.list + if list then + local sn = has_attribute(list,snap_category) + if sn then + local sv = nodes.snapvalues[sn] + if sv then + local height, depth, lineheight = sv[1], sv[2], sv[3] + current.height = math.ceil((current.height-height)/lineheight)*lineheight + height + current.depth = math.ceil((current.depth -depth )/lineheight)*lineheight + depth + end + end + end end current = current.next end @@ -230,8 +230,7 @@ do head, current = nodes.remove(head, parskip, true) end if penalty_data then - local p = node.copy(penalty_node) - p.penalty = penalty_data + local p = nodes.penalty(penalty_data) if trace then trace_done("before",p) end head, head = nodes.before(head,head,p) end @@ -240,6 +239,7 @@ do head, tail = nodes.after(head,tail,glue_data) end if trace then show_tracing() end + input.stoptiming(nodes) end return head end @@ -247,15 +247,13 @@ do local head, tail = nil, nil function nodes.flush_vertical_spacing() - if head then - input.start_timing(nodes) + if head and head.next then local t = collapser(head) head = nil -- tail = nil - input.stop_timing(nodes) return t else - return nil + return head end end @@ -275,7 +273,6 @@ do tail = tt t = nil else - input.start_timing(nodes) if head then t.prev = tail tail.next = t @@ -286,7 +283,6 @@ do else t = collapser(t,where) end - input.stop_timing(nodes,where) end elseif head then t.prev = tail @@ -302,7 +298,6 @@ do function nodes.handle_vbox_spacing(t) if t and t.next then - local tail = node.slide(t) return collapser(t,'whole') else return t @@ -313,8 +308,10 @@ end -- experimental callback definitions will be moved elsewhere -callback.register('vpack_filter', nodes.handle_vbox_spacing) -callback.register('buildpage_filter', nodes.handle_page_spacing) +-- not yet ... we need to get rid of lastskip stuff first +-- +-- callback.register('vpack_filter', nodes.handle_vbox_spacing) +-- callback.register('buildpage_filter', nodes.handle_page_spacing) -- horizontal stuff @@ -322,13 +319,8 @@ callback.register('buildpage_filter', nodes.handle_page_spacing) do - local kern_node = node.new("kern",1) - local penalty_node = node.new("penalty") - local glue_node = node.new("glue") - local glue_spec_node = node.new("glue_spec") - - local contains = node.has_attribute - local unset = node.unset_attribute + local has_attribute = node.has_attribute + local unset = node.unset_attribute local glyph = node.id("glyph") local kern = node.id("kern") @@ -337,32 +329,9 @@ do local hlist = node.id('hlist') local vlist = node.id('vlist') ---~ function nodes.penalty(p) ---~ local n = node.copy(penalty_node) ---~ n.penalty = p ---~ return n ---~ end ---~ function nodes.kern(k) ---~ local n = node.copy(kern_node) ---~ n.kern = k ---~ return n ---~ end ---~ function nodes.glue(width,stretch,shrink) ---~ local n = node.copy(glue_node) ---~ local s = node.copy(glue_spec_node) ---~ s.width, s.stretch, s.shrink = width, stretch, shrink ---~ n.spec = s ---~ return n ---~ end ---~ function nodes.glue_spec(width,stretch,shrink) ---~ local s = node.copy(glue_spec_node) ---~ s.width, s.stretch, s.shrink = width, stretch, shrink ---~ return s ---~ end - spacings = spacings or { } spacings.mapping = spacings.mapping or { } - spacings.enabled = true + spacings.enabled = false input.storage.register(false,"spacings/mapping", spacings.mapping, "spacings.mapping") @@ -380,14 +349,16 @@ do map.left, map.right = left, right end + -- todo: no ligatures + function spacings.process(namespace,attribute,head) local done, mapping, fontids = false, spacings.mapping, fonts.tfm.id for start in node.traverse_id(glyph,head) do -- tricky since we inject - local attr = contains(start,attribute) + local attr = has_attribute(start,attribute) if attr then local map = mapping[attr] if map then - map = mapping[attr][start.char] + map = map[start.char] unset(start,attribute) if map then local kern, prev = map.left, start.prev @@ -414,7 +385,7 @@ do kerns = kerns or { } kerns.mapping = kerns.mapping or { } - kerns.enabled = true + kerns.enabled = false input.storage.register(false, "kerns/mapping", kerns.mapping, "kerns.mapping") @@ -425,12 +396,12 @@ do -- local marks = fti[font].shared.otfdata.luatex.marks -- if not marks[tchar] then - function kerns.process(namespace,attribute,head) -- todo interchar kerns / disc nodes + function kerns.process(namespace,attribute,head) -- todo interchar kerns / disc nodes / can be made faster local fti, scale = fonts.tfm.id, tex.scale local start, done, mapping, fontids, lastfont = head, false, kerns.mapping, fonts.tfm.id, nil while start do -- faster to test for attr first - local attr = contains(start,attribute) + local attr = has_attribute(start,attribute) if attr then unset(start,attribute) local krn = mapping[attr] @@ -485,38 +456,87 @@ do node.insert_before(head,start,nodes.kern(krn)) done = true elseif pid == disc then - local d = start.prev - local pre, post = d.pre, d.post - if pre then - local p = d.prev - local nn, pp = p.prev, p.next - p.prev, p.next = nil, pre -- hijack node - pre = kerns.process(namespace,attribute,p) + -- probably wrong anyway + -- currently this hooks into the node handlere before + -- hyphenation takes place, but this may change + -- + -- local d = start.prev + -- local pre, post = d.pre, d.post + -- if pre then + -- local p = d.prev + -- local nn, pp = p.prev, p.next + -- p.prev, p.next = nil, pre -- hijack node + -- pre = kerns.process(namespace,attribute,p) + -- pre = pre.next + -- pre.prev = nil + -- p.prev, p.next = nn, pp + -- d.pre = pre + -- end + -- if post then -- more checks needed + -- local tail = node.slide(post) + -- local nn, pp = d.next.prev, d.next.next + -- d.next.next, d.next.prev = nil, tail + -- tail.next = start.next -- hijack node + -- post = kerns.process(namespace,attribute,post) + -- tail.next = nil + -- d.next.prev, d.next.next = nn, pp + -- d.post = post + -- end + -- local prevchar, nextchar = d.prev.char, d.next.char -- == start.char + -- local tfm = fti[lastfont].characters[prevchar] + -- local ickern = tfm.kerns + -- if ickern and ickern[nextchar] then + -- krn = scale(ickern[nextchar]+fontids[lastfont].parameters[6],krn) + -- else + -- krn = scale(fontids[lastfont].parameters[6],krn) + -- end + -- node.insert_before(head,start,nodes.kern(krn)) + -- d.replace = d.replace + 1 + -- + -- untested: + -- + local disc = start.prev -- disc + local pre, post, replace = disc.pre, disc.post, disc.replace + if pre then -- must pair with start.prev + local before = node.copy(disc.prev) + pre.prev = before + before.next = pre + before.prev = nil + pre = kerns.process(namespace,attribute,before) pre = pre.next pre.prev = nil - p.prev, p.next = nn, pp - d.pre = pre + disc.pre = pre + node.free(before) end - if post then + if post then -- must pair with start + local after = node.copy(disc.next) local tail = node.slide(post) - local nn, pp = d.next.prev, d.next.next - d.next.next, d.next.prev = nil, tail - tail.next = start.next -- hijack node + tail.next = after + after.prev = tail + after.next = nil post = kerns.process(namespace,attribute,post) tail.next = nil - d.next.prev, d.next.next = nn, pp - d.post = post + disc.post = post + node.free(after) end - local prevchar, nextchar = d.prev.char, d.next.char -- == start.char - local tfm = fti[lastfont].characters[prevchar] - local ickern = tfm.kerns - if ickern and ickern[nextchar] then - krn = scale(ickern[nextchar]+fontids[lastfont].parameters[6],krn) - else - krn = scale(fontids[lastfont].parameters[6],krn) + if replace then -- must pair with start and start.prev + local before = node.copy(disc.prev) + local after = node.copy(disc.next) + local tail = node.slide(post) + replace.prev = before + before.next = replace + before.prev = nil + tail.next = after + after.prev = tail + after.next = nil + replace = kerns.process(namespace,attribute,before) + replace = replace.next + replace.prev = nil + tail.next = nil + disc.replace = replace + node.free(after) + node.free(before) end - node.insert_before(head,start,nodes.kern(krn)) - d.replace = d.replace + 1 end end elseif id == glue and start.subtype == 0 then @@ -534,7 +554,7 @@ do start.kern = scale(sk,krn) done = true end - elseif lastfont and id == hlist or id == vlist then -- todo: lookahead + elseif lastfont and (id == hlist or id == vlist) then -- todo: lookahead if start.prev then node.insert_before(head,start,nodes.kern(scale(fontids[lastfont].parameters[6],krn))) done = true @@ -562,7 +582,7 @@ do -- relocate node and attribute stuff once it's more complete !! cases = cases or { } - cases.enabled = true + cases.enabled = false cases.actions = { } -- hm needs to be run before glyphs: chars.plugins @@ -581,7 +601,7 @@ do local function lower(start) local data, char = characters.data, start.char if data[char] then - local ul = data[char].ulcode + local lc = data[char].lccode if lc and fonts.tfm.id[start.font].characters[lc] then start.char = lc return start, true @@ -656,7 +676,7 @@ do function cases.process(namespace,attribute,head) -- not real fast but also not used on much data local done, actions = false, cases.actions for start in node.traverse_id(glyph,head) do - local attr = contains(start,attribute) + local attr = has_attribute(start,attribute) if attr then unset(start,attribute) local action = actions[attr] @@ -676,7 +696,8 @@ do breakpoints = breakpoints or { } breakpoints.mapping = breakpoints.mapping or { } - breakpoints.enabled = true + breakpoints.methods = breakpoints.methods or { } + breakpoints.enabled = false input.storage.register(false,"breakpoints/mapping", breakpoints.mapping, "breakpoints.mapping") @@ -689,39 +710,84 @@ do mapping[char] = { kind or 1, before or 1, after or 1 } end + breakpoints.methods[1] = function(head,start) + -- no discretionary needed + -- \def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax} + -- \def\postwordbreak {\penalty\zerocount\hskip\zeropoint\relax} + -- texio.write_nl(string.format("injecting replacement type %s for character %s",map[1],utf.char(start.char))) + if start.prev and start.next then + node.insert_before(head,start,nodes.penalty(10000)) + node.insert_before(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.penalty(0)) + end + return head, start + end + breakpoints.methods[2] = function(head,start) -- ( => (- + if start.prev and start.next then + local tmp = start + start = nodes.disc() + start.prev, start.next = tmp.prev, tmp.next + tmp.prev.next, tmp.next.prev = start, start + tmp.prev, tmp.next = nil, nil + start.replace = tmp + local tmp, hyphen = node.copy(tmp), node.copy(tmp) + hyphen.char = languages.prehyphenchar(tmp.lang) + tmp.next, hyphen.prev = hyphen, tmp + start.post = tmp + node.insert_before(head,start,nodes.penalty(10000)) + node.insert_before(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.penalty(10000)) + end + return head, start + end + breakpoints.methods[3] = function(head,start) -- ) => -) + if start.prev and start.next then + local tmp = start + start = nodes.disc() + start.prev, start.next = tmp.prev, tmp.next + tmp.prev.next, tmp.next.prev = start, start + tmp.prev, tmp.next = nil, nil + start.replace = tmp + local tmp, hyphen = node.copy(tmp), node.copy(tmp) + hyphen.char = languages.prehyphenchar(tmp.lang) + tmp.prev, hyphen.next = hyphen, tmp + start.pre = hyphen + node.insert_before(head,start,nodes.penalty(10000)) + node.insert_before(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.glue(0)) + node.insert_after(head,start,nodes.penalty(10000)) + end + return head, start + end + function breakpoints.process(namespace,attribute,head) local done, mapping, fontids = false, breakpoints.mapping, fonts.tfm.id local start, n = head, 0 while start do local id = start.id if id == glyph then - local attr = contains(start,attribute) + local attr = has_attribute(start,attribute) if attr then - unset(start,attribute) + unset(start,attribute) -- maybe test for subtype > 256 (faster) -- look ahead and back n chars local map = mapping[attr] if map then - map = map[start.char] - if map then - if n >= map[2] then - local m = map[3] + local smap = map[start.char] + if smap then + if n >= smap[2] then + local m = smap[3] local next = start.next while next do -- gamble on same attribute local id = next.id if id == glyph then -- gamble on same attribute - if m == 1 then - if map[1] == 1 then - -- no discretionary needed - -- \def\prewordbreak {\penalty\plustenthousand\hskip\zeropoint\relax} - -- \def\postwordbreak {\penalty\zerocount\hskip\zeropoint\relax} - -- texio.write_nl(string.format("injecting replacement type %s for character %s",map[1],utf.char(start.char))) - local g, p = nodes.glue(0), nodes.penalty(10000) - node.insert_before(head,start,g) - node.insert_before(head,g,p) - g, p = nodes.glue(0), nodes.penalty(0) - node.insert_after(head,start,p) - node.insert_after(head,p,g) - start = g + if map[next.char] then + break + elseif m == 1 then + local method = breakpoints.methods[smap[1]] + if method then + head, start = method(head,start) done = true end break diff --git a/tex/context/base/core-spa.mkiv b/tex/context/base/core-spa.mkiv index 780fbe3dc..4fcad6b31 100644 --- a/tex/context/base/core-spa.mkiv +++ b/tex/context/base/core-spa.mkiv @@ -93,8 +93,10 @@ \endgroup \fi} -\def\setcharacterspacing[#1]% - {\dosetattribute{spacing}{\csname\??ch:#1\endcsname}} +\def\setcharacterspacing + {\ctxlua{spacings.enabled=true}% + \gdef\setcharacterspacing[##1]{\dosetattribute{spacing}{\csname\??ch:##1\endcsname}}% + \setcharacterspacing} \setvalue{\??ch:\s!reset}{\doresetattribute{spacing}} @@ -135,8 +137,10 @@ \endgroup \fi} -\def\setcharacterkerning[#1]% - {\dosetattribute{kern}{\csname\??ck:#1\endcsname}} +\def\setcharacterkerning + {\ctxlua{kerns.enabled=true}% + \gdef\setcharacterkerning[##1]{\dosetattribute{kern}{\csname\??ck:##1\endcsname}}% + \setcharacterkerning} \setvalue{\??ck:\s!reset}{\doresetattribute{kern}} @@ -152,9 +156,14 @@ \defineattribute[case] -\def\WORD{\groupedcommand{\dosetattribute{case}\plusone }{}} -\def\word{\groupedcommand{\dosetattribute{case}\plustwo }{}} -\def\Word{\groupedcommand{\dosetattribute{case}\plusthree}{}} % \plusfour +\def\setcharactercasing + {\ctxlua{cases.enabled=true}% + \gdef\setcharactercasing[##1]{\dosetattribute{case}{\number##1}}% + \setcharactercasing} + +\def\WORD{\groupedcommand{\setcharactercasing[\plusone ]}{}} +\def\word{\groupedcommand{\setcharactercasing[\plustwo ]}{}} +\def\Word{\groupedcommand{\setcharactercasing[\plusthree]}{}} % \plusfour \let\WORDS\WORD \let\words\word @@ -176,6 +185,8 @@ \definesystemvariable {bp} % BreakPoint +\exhyphenchar=\minusone % we use a different order then base tex, so we really need this + \newcount \maxbreakpointsid \def\definebreakpoints @@ -200,14 +211,22 @@ \endgroup \fi} -\def\setbreakpoints[#1]% - {\dosetattribute{breakpoint}{\csname\??bp:#1\endcsname}} +\def\setbreakpoints + {\ctxlua{breakpoints.enabled=true}% + \gdef\setbreakpoints[##1]{\dosetattribute{breakpoint}{\csname\??bp:##1\endcsname}}% + \setbreakpoints} \setvalue{\??bp:\s!reset}{\doresetattribute{breakpoint}} \definebreakpoints[compound] \installbreakpoint [compound] [\number`+] [\c!left=3,\c!right=3,\c!type=1] +\installbreakpoint [compound] [\number`-] [\c!left=3,\c!right=3,\c!type=1] +\installbreakpoint [compound] [\number`/] [\c!left=3,\c!right=3,\c!type=1] +\installbreakpoint [compound] [\number`(] [\c!left=3,\c!right=3,\c!type=2] +\installbreakpoint [compound] [\number`)] [\c!left=3,\c!right=3,\c!type=3] + +% \setbreakpoints[compound] \protect \endinput diff --git a/tex/context/base/core-spa.tex b/tex/context/base/core-spa.tex index 36157135a..561c0844e 100644 --- a/tex/context/base/core-spa.tex +++ b/tex/context/base/core-spa.tex @@ -2299,16 +2299,41 @@ %D Centered looks nicer: +% \def\dosetstrut +% {\let\strut\normalstrut +% \setbox\strutbox\normalhbox +% {\normalhbox to \zeropoint +% {% \hss % new, will be option +% \vrule +% \!!width \strutwidth +% \!!height\strutheight +% \!!depth \strutdepth +% \hss}}% +% \struttotal\dimexpr\strutht+\strutdp\relax} +% +% because of all the callbacks in mkiv, we avoid unnecessary boxes ... +% maybe use an attribute so that we can tag boxes that don't need a +% treatment; tests with using an attribute so far have shown that +% it's slower because testing the attribute takes time too + \def\dosetstrut {\let\strut\normalstrut - \setbox\strutbox\normalhbox - {\normalhbox to \zeropoint - {% \hss % new, will be option - \vrule - \!!width \strutwidth - \!!height\strutheight - \!!depth \strutdepth - \hss}}% + \ifdim\strutwidth=\zeropoint + \setbox\strutbox\normalhbox + {\vrule + \!!width \zeropoint + \!!height\strutheight + \!!depth \strutdepth}% + \else + \setbox\strutbox\normalhbox + {\normalhbox to \zeropoint + {% \hss % new, will be option + \vrule + \!!width \strutwidth + \!!height\strutheight + \!!depth \strutdepth + \hss}}% + \fi \struttotal\dimexpr\strutht+\strutdp\relax} %D The dimen \type {\struttotal} holds the exact size of the @@ -2414,7 +2439,7 @@ \fi \fi\fi} -\newbox\nostrutbox \setbox\nostrutbox\normalhbox{\normalhbox{}} +\newbox\nostrutbox \setbox\nostrutbox\normalhbox{} % {\normalhbox{}} \def\setnostrut {\setbox\strutbox\copy\nostrutbox @@ -4505,6 +4530,110 @@ \space \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi} +% moved from page-lin + +\def\installspacehandler#1#2% needs to set \obeyedspace + {\setvalue{\??sr#1}{#2}} + +\installspacehandler \v!on + {\obeyspaces + \def\obeyedspace{\mathortext\normalspace{\dontleavehmode{\tt\controlspace}}}% + \let\ =\obeyedspace} + +\installspacehandler \v!yes + {\obeyspaces + \def\obeyedspace{\mathortext\normalspace{\dontleavehmode \normalspace }}% + \let\ =\obeyedspace} + +\installspacehandler \v!off + {\normalspaces + \let\obeyedspace\normalspace + \let\ =\normalspace} + +\installspacehandler \v!fixed + {\obeyspaces + \def\obeyedspace{\mathortext\normalspace{\dontleavehmode\fixedspace}}% + \let\ =\obeyedspace} + +\def\activatespacehandler#1% + {\executeifdefined{\??sr#1}{\activatespacehandler\v!off}} + +% moved from page-lin + +%D When spacing is active we need to handle commands in +%D a special way: +%D +%D \starttyping +%D \setuplines[space=on] +%D +%D \startlines +%D Let's talk about this{\ttsl\gobbleoneargument or}that. +%D \stoplines +%D +%D \startlines +%D Let's talk about this{\getvalue{ttsl}or}that. +%D \stoplines +%D \stoptyping +%D +%D One can indent in several ways: +%D +%D \starttyping +%D \setupindenting[medium] \setuplines[indenting=odd] % no yes odd even +%D +%D \startlines +%D first +%D second +%D third +%D fourth +%D \stoplines +%D \stoptyping + +\def\setuplines + {\dodoubleargument\getparameters[\??rg]} + +\def\startlines + {\@@rgbefore + \pushmacro\checkindentation + \whitespace + %\page[\v!preference]} gaat mis na koppen, nieuw: later \nobreak + \begingroup + \setupindenting[\@@rgindenting]% + \typesettinglinestrue + \setupwhitespace[\v!none]% + \obeylines + \ignorespaces + \gdef\afterfirstobeyedline % tzt two pass, net als opsomming + {\gdef\afterfirstobeyedline + {\nobreak + \global\let\afterfirstobeyedline\relax}}% + \def\obeyedline + {\par + \afterfirstobeyedline + \futurelet\next\dobetweenthelines}% + \activatespacehandler\@@rgspace + \GotoPar} + +\def\stoplines + {\endgroup + \popmacro\checkindentation + \@@rgafter} + +\def\dobetweenthelines + {\doifmeaningelse\next\obeyedline\@@rginbetween\donothing} + +\setuplines + [\c!before=\blank, + \c!after=\blank, + \c!inbetween=\blank, + \c!indenting=\v!no, + \c!space=\v!default] + +\def\emptylines + {\dosingleempty\doemptylines} + +\def\doemptylines[#1]% + {\endgraf\dorecurse{\iffirstargument#1\else3\fi}\crlf} + % plugins \loadmarkfile{core-spa} diff --git a/tex/context/base/core-syn.lua b/tex/context/base/core-syn.lua index fc7b72b5d..7f6ea7614 100644 --- a/tex/context/base/core-syn.lua +++ b/tex/context/base/core-syn.lua @@ -62,7 +62,7 @@ do return split end - -- for the moment we use the old structure, some day mmiv code + -- for the moment we use the old structure, some day mkiv code -- will be different: more structure, less mess local template = { diff --git a/tex/context/base/core-syn.mkiv b/tex/context/base/core-syn.mkiv index 8996940dd..23385c9e6 100644 --- a/tex/context/base/core-syn.mkiv +++ b/tex/context/base/core-syn.mkiv @@ -36,10 +36,10 @@ \doglobal\addtocommalist{#1}\allsortedlists} \def\mksavesortedlistentry#1#2#3#4% class key entry meaning - {\immediatewriteutilitytua{table.insert(js['#1'],{'e','#1',\!!bs#2\!!es,\!!bs#3\!!es})}} + {\immediatewriteutilitytua{ti(js['#1'],{'e','#2',\!!bs#3\!!es,\!!bs#4\!!es})}} \def\mksavesortedlistvariable#1#2#3% class type value - {\immediatewriteutilitytua{table.insert(js['#1'],{'#2','#3'})}} + {\immediatewriteutilitytua{ti(js['#1'],{'#2','#3'})}} \def\mkloadsortedlist#1% class {\bgroup diff --git a/tex/context/base/core-tbl.tex b/tex/context/base/core-tbl.tex index d63aedd1a..8081ff62a 100644 --- a/tex/context/base/core-tbl.tex +++ b/tex/context/base/core-tbl.tex @@ -236,73 +236,6 @@ \def\checktabulatesetups {\getvalue{\@@tabsetups@@\tabulatecolumn}} -% \def\dodosettabulatepreamble#1#2% -% {\ifzeropt\tabulatewidth -% \ifcase\tabulatemodus\relax -% \let\preamblebox\empty -% \else -% \def\preamblebox{\autotabulatetrue}% -% \fi -% \else -% \ifcase\tabulatemodus\relax -% \edef\preamblebox{\hbox to \the\tabulatewidth}% -% \else -% \edef\preamblebox{\hsize\the\tabulatewidth}% -% \fi -% \fi -% % -% % less bytes -% % -% %\edef\preamblebox% -% % {\ifcase\tabulatewidth -% % \ifcase\tabulatemodus\relax\else\noexpand\autotabulatetrue\fi -% % \els -% % \ifcase\tabulatemodus\relax\hbox to\else\hsize\fi\the\tabulatewidth -% % \fi}% -% % -% % 0 = NC column next EQ equal column -% % 1 = RC column raw RQ equal column raw -% % 2 = HC column hook HQ equal column hook -% % some entries can be left out if we test for them being set -% \@EA\appendtoks \@EA&\@EA\hskip\pretabskip##&\to\!!toksa -% \appendtoks \ignorespaces\to\!!toksa -% %\@EA\appendtoks\@EA\xdef\@EA\tabulatecolumn\@EA{\tabulatecolumns}\to\!!toksa -% \@EA\appendtoks\@EA\xdef\@EA\tabulatecolumn\@EA{\the\tabulatecolumns}\to\!!toksa -% \appendtoks \checktabulatesetups\to\!!toksa -% \appendtoks \checktabulatehook\to\!!toksa -% \@EA\appendtoks \preamblebox\to\!!toksa -% \appendtoks \bgroup\bbskip\bgroup#1\to\!!toksa -% \appendtoks\ifnum\tabulatetype=\plusone \else \to\!!toksa -% \@EA\appendtoks \the\tabulatebmath\to\!!toksa -% \@EA\appendtoks \the\tabulatefont\to\!!toksa -% \@EA\appendtoks \the\tabulatesettings\to\!!toksa -% \@EA\appendtoks \the\tabulatebefore\to\!!toksa -% \appendtoks\fi \to\!!toksa -% \appendtoks \bgroup\ignorespaces\to\!!toksa -% % -% \appendtoks \tabulatehook##\to\!!toksa -% % -% %%\doifdefinedelse{\@@tabalign@@\tabulatecolumns} -% %\doifdefinedelse{\@@tabalign@@\the\tabulatecolumns} -% % {\appendtoks\handletabulatecharalign## \to\!!toksa} -% % {\appendtoks\tabulatehook ##\to \!!toksa}% -% % waarom kan ik hier geen \xx{##} geven, om een of -% % andere reden passeert dan tex de hele regel (incl \NC's) -% % als argument; elke delimiter <> space gaat trouwens fout -% \appendtoks \unskip\unskip\ifmmode\else\endgraf\fi\egroup\to\!!toksa -% \appendtoks\ifnum\tabulatetype=1 \else \to\!!toksa -% \@EA\appendtoks \the\tabulateafter\to\!!toksa -% \@EA\appendtoks \the\tabulateemath\to\!!toksa -% \appendtoks\fi \to\!!toksa -% \appendtoks #2\egroup\egroup\to\!!toksa -% \@EA\appendtoks \@EA&\@EA\hskip\postabskip##\to\!!toksa -% \appendtoks\NC\to\tabulatedummy -% \let\bbskip\empty -% \def\pretabskip{.5\tabulateunit}% -% \let\postabskip\pretabskip -% \let\gettabulateexit\dogettabulateexit -% \tabulatewidth\zeropoint} - \let\pretabrule \donothing \let\posttabrule\donothing diff --git a/tex/context/base/core-two.mkiv b/tex/context/base/core-two.mkiv index bbe00be92..d4641e024 100644 --- a/tex/context/base/core-two.mkiv +++ b/tex/context/base/core-two.mkiv @@ -20,8 +20,8 @@ \immediatewriteutilitytua{local tp = job.twopass}% \to \everyopenutilities -\def\immediatesavetwopassdata #1#2#3{\expanded{\immediatewriteutilitytua{table.insert(tp['#1'],"#3")}}} -\def\savetwopassdata #1#2#3{\expanded{\writeutilitytua {table.insert(tp['#1'],"#3")}}} +\def\immediatesavetwopassdata #1#2#3{\expanded{\immediatewriteutilitytua{ti(tp['#1'],"#3")}}} +\def\savetwopassdata #1#2#3{\expanded{\writeutilitytua {ti(tp['#1'],"#3")}}} \def\immediatesavetaggedtwopassdata#1#2#3#4{\expanded{\immediatewriteutilitytua{tp['#1']['#3']="#4"}}} \def\savetaggedtwopassdata #1#2#3#4{\expanded{\writeutilitytua {tp['#1']['#3']="#4"}}} diff --git a/tex/context/base/core-uti.mkiv b/tex/context/base/core-uti.mkiv index 88f95efed..8059ed69d 100644 --- a/tex/context/base/core-uti.mkiv +++ b/tex/context/base/core-uti.mkiv @@ -61,13 +61,14 @@ \immediatewriteutilitytua{if job and job.version and not job.version == "\utilityversion" then return end}% \immediatewriteutilitytua{if not job then job = { } end}% \immediatewriteutilitytua{job.version = "\utilityversion"}% + \immediatewriteutilitytua{local ti = table.insert}% \to \everyopenutilities \appendtoks \immediatewriteutilitytua{end}% \immediatewriteutilitytua{}% \immediatewriteutilitytua{-- end of utility file}% - %immediate\closeout\utility@tua + %\immediate\closeout\utility@tua \to \everycloseutilities % The next file can be in lua or luc format: @@ -114,4 +115,8 @@ \ctxlua{input.storage.finalize()}% \to \everyfinalizeluacode +\appendtoks + \ctxlua{nodes.cleanup_reserved()}% +\to \everydump + \protect \endinput diff --git a/tex/context/base/core-uti.tex b/tex/context/base/core-uti.tex index 55cebf673..b91abdd3a 100644 --- a/tex/context/base/core-uti.tex +++ b/tex/context/base/core-uti.tex @@ -277,6 +277,20 @@ % we need to pop and push, else problems with reading % utility files (toc) in xml mode and (e.g.) in a toc % entry doing a doifmode +% +% the following is not ok because we have no way to signal +% xml content (yet), so for the moment we use this: + +\appendtoks + \ifprocessingXML + \processingXMLfalse + \enableXML + \catcode`\\=\@@escape + \catcode`\{=\@@begingroup + \catcode`\}=\@@endgroup + \catcode`\%=\@@comment\relax + \fi +\to \everybeforeutilityread \long\def\doutilities#1#2#3#4#5% % introduceren in utility file {\resetutilities diff --git a/tex/context/base/enco-ini.mkiv b/tex/context/base/enco-ini.mkiv index a676c46aa..45e467252 100644 --- a/tex/context/base/enco-ini.mkiv +++ b/tex/context/base/enco-ini.mkiv @@ -21,7 +21,9 @@ characters.context.rehash() \stopruntimectxluacode -\ctxlua { characters.context.define() } % redefines all \characters +\ctxlua { + characters.context.define() +} % redefines all \characters \useencoding[032,033,037] % fallbacks for some unicode chars, todo diff --git a/tex/context/base/enco-ini.tex b/tex/context/base/enco-ini.tex index 202fa38ef..05bec2ba7 100644 --- a/tex/context/base/enco-ini.tex +++ b/tex/context/base/enco-ini.tex @@ -641,7 +641,8 @@ \pathypsettings\afterassignment\hyphenation\scratchtoks=} %D This is not needed for patterns because they are loaded grouped -%D anyway and it saves us an assignment. +%D anyway and it saves us an assignment. Can go ... no longer +%D shared patterns. \def\startpatternloading#1#2#3% % we should use \everypatternloading {\startreadingfile diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua index e6cb9fcd6..b8c2eea59 100644 --- a/tex/context/base/font-afm.lua +++ b/tex/context/base/font-afm.lua @@ -19,7 +19,7 @@ away. fonts = fonts or { } fonts.afm = fonts.afm or { } -fonts.afm.version = 1.13 -- incrementing this number one up will force a re-cache +fonts.afm.version = 1.21 -- incrementing this number one up will force a re-cache fonts.afm.syncspace = true -- when true, nicer stretch values fonts.afm.enhance_data = true -- best leave this set to true fonts.afm.trace_features = false @@ -35,8 +35,62 @@ fonts.afm.cache = containers.define("fonts", "afm", fonts.afm.version built inIn principle we can share tfm tables when we are in node for a font, but then
+we need to define a font switch as an id/attr switch which is no fun, so in that
+case users can best use dynamic features ... so, we will not use that speedup. Okay,
+when we get rid of base mode we can optimize even further by sharing, but then we
+loose our testcases for
So far we haven't really dealt with features (or whatever we want to pass along with the font definition. We distinguish the following situations:
+situations:
name:xetex like specs
@@ -338,7 +374,7 @@ end
fonts.define.register_split("@", fonts.define.specify.predefined)
-function fonts.define.specify.colonized(specification)
+function fonts.define.specify.colonized(specification) -- xetex mode
local list = { }
if specification.detail and specification.detail ~= "" then
local expanded_features = { }
@@ -378,29 +414,88 @@ end
fonts.define.register_split(":", fonts.define.specify.colonized)
-fonts.define.specify.context_setups = fonts.define.specify.context_setups or { }
+fonts.define.specify.context_setups = fonts.define.specify.context_setups or { }
+fonts.define.specify.context_numbers = fonts.define.specify.context_numbers or { }
+fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
-input.storage.register(false,"fonts/setups", fonts.define.specify.context_setups, "fonts.define.specify.context_setups")
+input.storage.register(false,"fonts/setups" , fonts.define.specify.context_setups , "fonts.define.specify.context_setups" )
+input.storage.register(false,"fonts/numbers", fonts.define.specify.context_numbers, "fonts.define.specify.context_numbers")
function fonts.define.specify.preset_context(name,features)
+ local fds = fonts.define.specify
+ local setups, numbers, synonyms = fds.context_setups, fds.context_numbers, fds.synonyms
+ local number = (setups[name] and setups[name].number) or 0
local t = aux.settings_to_hash(features)
for k,v in pairs(t) do
+ k = synonyms[k] or k
t[k] = v:is_boolean()
if type(t[k]) == "nil" then
t[k] = v
end
end
- fonts.define.specify.context_setups[name] = t
+ if number == 0 then
+ numbers[#numbers+1] = name
+ t.number = #numbers
+ else
+ t.number = number
+ end
+ setups[name] = t
+end
+
+--~ function fonts.define.specify.context_number(name)
+--~ local s = fonts.define.specify.context_setups[name]
+--~ return (s and s.number) or -1
+--~ end
+
+do
+
+ -- here we clone features according to languages
+
+ local default = 0
+ local setups = fonts.define.specify.context_setups
+ local numbers = fonts.define.specify.context_numbers
+
+ function fonts.define.specify.context_number(name)
+ local t = setups[name]
+ if not t then
+ return default
+ elseif t.auto then
+ local lng = tonumber(tex.language)
+ local tag = name .. ":" .. lng
+ local s = setups[tag]
+ if s then
+ return s.number or default
+ else
+ local script, language = languages.association(lng)
+ if t.script ~= script or t.language ~= language then
+ local s = table.fastcopy(t)
+ local n = #numbers + 1
+ setups[tag] = s
+ numbers[n] = tag
+ s.number = n
+ s.script = script
+ s.language = language
+ return n
+ else
+ setups[tag] = t
+ return t.number or default
+ end
+ end
+ else
+ return t.number or default
+ end
+ end
+
end
-function fonts.define.specify.context_tostring(name,kind,separator,yes,no,strict)
- return aux.hash_to_string(table.merged(fonts[kind].features.default or {},fonts.define.specify.context_setups[name] or {}),separator,yes,no,strict)
+function fonts.define.specify.context_tostring(name,kind,separator,yes,no,strict,omit)
+ return aux.hash_to_string(table.merged(fonts[kind].features.default or {},fonts.define.specify.context_setups[name] or {}),separator,yes,no,strict,omit)
end
function fonts.define.specify.split_context(features)
if fonts.define.specify.context_setups[features] then
return fonts.define.specify.context_setups[features]
- else
+ else -- ? ? ?
return fonts.define.specify.preset_context("***",features)
end
end
@@ -454,7 +549,7 @@ function fonts.define.read(name,size,id)
specification = fonts.define.resolve(specification)
local hash = fonts.tfm.hash_instance(specification)
if true then
- local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
+ --~ local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
end
local fontdata = fonts.tfm.internalized[hash] -- id
if not fontdata then
@@ -465,7 +560,7 @@ function fonts.define.read(name,size,id)
fonts.tfm.check_virtual_id(fontdata)
end
if true then
- fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
+ --~ fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
end
if not fonts.tfm.internalized[hash] then
fonts.tfm.id[id] = fontdata
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 3cc6433b2..2d1005ad2 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -95,15 +95,20 @@ end
one.
--ldx]]--
-do
+-- maybe make this a function:
+
+function fonts.enc.make_unicode_vector()
local vector, hash = { }, { }
- for k,v in pairs(characters.data) do
- local a = v.adobename
- if a then
- vector[k], hash[a] = a, k
+ for code, v in pairs(characters.data) do
+ local name = v.adobename
+ if name then
+ vector[code], hash[name] = name, code
else
- vector[k] = '.notdef'
+ vector[code] = '.notdef'
end
end
+ for name, code in pairs(characters.synonyms) do
+ vector[code], hash[name] = name, code
+ end
containers.write(fonts.enc.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
end
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index b81b94309..e5975a38e 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -111,7 +111,7 @@ end
fonts.vf.aux.combine.force_composed = false
-fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
+ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local chars = g.characters
local cap_lly = chars[string.byte("X")].boundingbox[4]
local ita_cor = math.cos(math.rad(90+g.italicangle))
@@ -126,7 +126,10 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local cc = c.category
if (cc == 'll') or (cc == 'lu') or (cc == 'lt') then
local acc = s[3]
- chars[i] = table.fastcopy(chars[chr])
+ local t = table.fastcopy(chars[chr])
+t.name = ""
+t.index = i
+t.unicode = i
if chars[acc] then
local cb = chars[chr].boundingbox
local ab = chars[acc].boundingbox
@@ -139,9 +142,10 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
local dd = (c_urx-c_llx)*ita_cor
if a_ury < 0 then
local dy = cap_lly-a_lly
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx-dd},
+ {"down", -dy}, -- added
{special, red},
{"slot", 1, acc},
{special, black},
@@ -150,7 +154,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
}
elseif c_ury > a_lly then
local dy = cap_lly-a_lly
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx+dd},
{"down", -dy},
@@ -161,7 +165,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
{"slot", 1, chr},
}
else
- chars[i].commands = {
+ t.commands = {
{"push"},
{"right", dx+dd},
{special, blue},
@@ -171,6 +175,7 @@ fonts.vf.aux.combine.commands["complete-composed-characters"] = function(g,v)
{"slot", 1, chr},
}
end
+ chars[i] = t
end
end
end
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index d4adf360b..ce4a9e431 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -34,6 +34,10 @@ fonts.triggers = {
'script'
}
+fonts.define = fonts.define or { }
+fonts.define.specify = fonts.define.specify or { }
+fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
+
-- tracing
do
@@ -42,7 +46,7 @@ do
fonts.color.trace = false
- local attribute = attributes.numbers['color'] or 4 -- we happen to know this -)
+ local attribute = attributes.numbers['color'] or 7 -- we happen to know this -)
local mapping = attributes.list[attribute]
local set_attribute = node.set_attribute
diff --git a/tex/context/base/font-ini.mkii b/tex/context/base/font-ini.mkii
index ef2ab85a7..f4ed9893e 100644
--- a/tex/context/base/font-ini.mkii
+++ b/tex/context/base/font-ini.mkii
@@ -16,4 +16,35 @@
\def\mkdefinefontfeature#1% #2
{\setvalue{\??fa#1}} % {#2}
+\def\updatefontparameters
+ {\edef\@@fontencoding{\truefontdata\fontfile \s!encoding}%
+ \edef\@@fontmapping {\truefontdata\fontfile \s!mapping }%
+ \edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
+ \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
+ \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
+
+\def\setfontcharacteristics
+ {\updatefontparameters % redundant, will go away, faster too
+ \fastenableencoding
+ {\ifx\@@fontencoding\empty
+ \s!default \else \@@fontencoding
+ \fi}%
+ \fastenablemapping
+ {\ifx\@@fontmapping\empty
+ \ifx\@@fontencoding\empty
+ \s!default \else \@@fontencoding
+ \fi
+ \else
+ \@@fontmapping
+ \fi}%
+ \fastenablehandling
+ {\ifx\@@fonthandling\empty
+ \s!default \else \@@fonthandling
+ \fi}%
+ {\lastfontidentifier}%
+ \the\everyfont
+ \synchronizepatternswithfont}
+
+\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
+
\protect \endinput
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index 45ff3480e..86b21fa58 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -22,6 +22,10 @@
\registerctxluafile{font-def}{1.001}
\registerctxluafile{font-fbk}{1.001}
+\startruntimectxluacode
+ fonts.enc.make_unicode_vector()
+\stopruntimectxluacode
+
\unprotect
\def\mkdefinefontfeature#1#2%
@@ -77,6 +81,44 @@
{\dodoubleargument\dofontfeatureslist}
\def\dofontfeatureslist[#1][#2]% todo: arg voor type
- {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true))}}
+ {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}}
+
+\def\definefontlocal#1%
+ {\expandafter\font\csname#1\endcsname\lastfontname\relax}
+
+\def\definefontglobal#1%
+ {\global\expandafter\font\csname#1:\endcsname\lastfontname\relax}
+
+\attribute\zerocount\zerocount % first in list, so fast match
+
+% \def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}}
+% \def\setfontfeature #1{\attribute\zerocount\featureattribute{#1}\relax}
+% \def\resetfontfeature#1{\attribute\zerocount\zerocount}
+
+\let\currentfeature\empty
+
+\def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}}
+\def\setfontfeature #1{\edef\currentfeature{#1}\attribute\zerocount\featureattribute{#1}\relax}
+\def\resetfontfeature#1{\let\currentfeature\empty\attribute\zerocount\zerocount}
+
+\appendtoks
+ \setfontfeature\currentfeature
+\to \everylanguage
+
+%D Simpler:
+
+\def\updatefontparameters
+ {\edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
+ \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
+ \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
+
+\def\setfontcharacteristics
+ {\fastenablehandling{\ifx\@@fonthandling\empty\s!default\else\@@fonthandling\fi}\lastfontidentifier
+ \the\everyfont}
+
+%D Predefined:
+
+% \installfontfeature[otf][tlig]
+% \installfontfeature[otf][trep]
\protect \endinput
diff --git a/tex/context/base/font-ini.tex b/tex/context/base/font-ini.tex
index 0b8da7756..b15e3812d 100644
--- a/tex/context/base/font-ini.tex
+++ b/tex/context/base/font-ini.tex
@@ -1240,7 +1240,6 @@
\expandafter\dogetglobalfontparameter
\fi}
-\let\@@fontresource\empty
\let\@@fontencoding\empty
\let\@@fontmapping \empty
\let\@@fonthandling\empty
@@ -1322,12 +1321,7 @@
% \definefontsynonym[Serif] [palatinonova-regular*default]
% \definefontsynonym[SerifCaps] [palatinonova-regular*default-caps]
% \stoptypescript
-%
-% \starttypescript [serif] [palatino-nova-regular] [name]
-% \definefontsynonym[Serif] [palatinonova-regular] [resource=palatinonova-regular,features=default]
-% \definefontsynonym[SerifCaps][palatinonova-regular-sc][resource=palatinonova-regular,features=default-caps] % caps
-% \stoptypescript
-%
+
% \definetypeface[mainface][rm][serif][palatino-nova-regular][default] \setupbodyfont[mainface]
%
% \starttext
@@ -1399,14 +1393,12 @@
\def\docheckfontfilename#1*#2#3*#4\relax % class overrules file
{\edef\checkedfontfile{{%
- \ifx\@@fontresource\empty#1\else\@@fontresource\fi
+ #1%
\expandafter\ifx\csname\fontclass\s!features\endcsname\empty
\ifx\@@fontfeatures\empty\ifx#2\empty\else*#2#3\fi\else*\@@fontfeatures\fi
- \else\expandafter\ifx\csname\fontclass\s!features\endcsname\relax % redundant, will go away
- \ifx\@@fontfeatures\empty\ifx#2\empty\else*#2#3\fi\else*\@@fontfeatures\fi
\else
*\csname\fontclass\s!features\endcsname
- \fi\fi
+ \fi
}}%
\doshowcheckedfontfeatures}
@@ -1468,29 +1460,28 @@
\edef\!!stringb{#2}%
\ifx\!!stringb\empty
% no prefix
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringa\else\@@fontresource\fi}%
+ \let\checkedfontfile\!!stringa
\doiffoundxetexfontelse{1a}{\checkedfontfile\checkedfontfeatures}
{\edef\checkedfontfile{\checkedfontfile\checkedfontfeatures}}
{\doiffoundxetexfontelse{1b}{"\checkedfontfile\checkedfontfeatures"}
{\edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}}
{\doiffoundxetexfontelse{1c}{"[\checkedfontfile]\checkedfontfeatures"}
{\edef\checkedfontfile{"[\checkedfontfile]\checkedfontfeatures"}}
- {\edef\checkedfontfile{\checkedfontfile}}}}%
+ {}}}%
\else\ifx\!!stringa\v!file
% force file, only file check when no spaces
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringb\else\@@fontresource\fi}%
+ \let\checkedfontfile\!!stringb
\doiffoundxetexfontelse{2b}{"[\checkedfontfile]\checkedfontfeatures"}
{\edef\checkedfontfile{"[\checkedfontfile]\checkedfontfeatures"}}
{\doiffoundxetexfontelse{2c}{"\checkedfontfile\checkedfontfeatures"}
{\edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}}
- {\edef\checkedfontfile{\checkedfontfile}}}%
+ {}}%
\else\ifx\!!stringa\v!name
% force name, always lookup by xetex itself, "" forces otf/ttf/type1
- \edef\checkedfontfile{\ifx\@@fontresource\empty\!!stringb\else\@@fontresource\fi}%
- \edef\checkedfontfile{"\checkedfontfile\checkedfontfeatures"}%
+ \edef\checkedfontfile{"\!!stringb\checkedfontfeatures"}%
\else
% whatever, maybe even xetex spec, forget about features
- \edef\checkedfontfile{"\ifx\@@fontresource\empty\!!stringa\!!stringb\else\@@fontresource\fi"}%
+ \edef\checkedfontfile{"\!!stringa\!!stringb"}%
\fi\fi\fi}
\def\checkfontfilename% -- todo: integrate so that we call do.. directly
@@ -1965,7 +1956,6 @@
\edef\currentfontfileencoding{\truefontdata\@@truefontname\s!encoding}%
\edef\currentfontfilemapping {\truefontdata\@@truefontname\s!mapping }%
\edef\currentfontfilehandling{\truefontdata\@@truefontname\s!handling}%
- \edef\currentfontfileresource{\truefontdata\@@truefontname\s!resource}%
\edef\currentfontfilefeatures{\truefontdata\@@truefontname\s!features}}
%D \macros
@@ -3531,37 +3521,8 @@
%D The font specific features are bound to the filename.
-\def\updatefontparameters % can be simpler for mkii (and mkiv no font encoding)
- {\edef\@@fontencoding{\truefontdata\fontfile \s!encoding}%
- \edef\@@fontmapping {\truefontdata\fontfile \s!mapping }%
- \edef\@@fonthandling{\truefontdata\somefontname\s!handling}%
- \edef\@@fontfeatures{\truefontdata\fontfile \s!features}%
- \edef\@@fontresource{\truefontdata\fontfile \s!resource}%
- \edef\@@fontskewchar{\truefontdata\fontfile \s!skewchar}}
-
-\def\setfontcharacteristics
- {\updatefontparameters % redundant, will go away, faster too
- \fastenableencoding
- {\ifx\@@fontencoding\empty
- \s!default \else \@@fontencoding
- \fi}%
- \fastenablemapping
- {\ifx\@@fontmapping\empty
- \ifx\@@fontencoding\empty
- \s!default \else \@@fontencoding
- \fi
- \else
- \@@fontmapping
- \fi}%
- \fastenablehandling
- {\ifx\@@fonthandling\empty
- \s!default \else \@@fonthandling
- \fi}%
- {\lastfontidentifier}%
- \the\everyfont
- \synchronizepatternswithfont}
-
-\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
+\ifx\updatefontparameters \undefined \let\updatefontparameters \relax \fi
+\ifx\setfontcharacteristics\undefined \let\setfontcharacteristics\relax \fi
%D Experimental:
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index 46ec72aa0..68142e4c4 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -6,27 +6,12 @@ if not modules then modules = { } end modules ['font-otf'] = {
license = "see context related readme files"
}
+--- todo: featuredata is now indexed by kind,lookup but probably lookup is okay too
-- abvf abvs blwf blwm blws dist falt half halt jalt lfbd ljmo
-- mset opbd palt pwid qwid rand rtbd rtla ruby size tjmo twid valt vatu vert
-- vhal vjmo vkna vkrn vpal vrt2
--- otfdata zit in tfmdata / check
-
---~ function string:split_at_space()
---~ local t = { }
---~ for s in self:gmatch("(%S+)") do
---~ t[#t+1] = s
---~ end
---~ return t
---~ end
-
--- beware, the node related functions need to return head, current -- todo
--- we may move marks to components so that parsing is faster
-
--- using for i=1,#t do ... t[i] ... end is much faster than using ipairs
--- copying some functions is faster than sharing code chunks esp here
-
--[[ldx--
This module is sparesely documented because it is a moving target.
The table format of the reader changes and we experiment a lot with
@@ -34,11 +19,20 @@ different methods for supporting features.
As with the code, we may decide to store more information
in the table.
+
+Incrementing the version number will force a re-cache. We jump the
+number by one when there's a fix in the library or
+ code that results in different tables.
--ldx]]--
+--~ The node based processing functions look quite complex which is mainly due to
+--~ the fact that we need to share data and cache resolved issues (saves much memory and
+--~ is also faster). A further complication is that we support static as well as dynamic
+--~ features.
+
fonts = fonts or { }
fonts.otf = fonts.otf or { }
-fonts.otf.version = 1.64 -- incrementing this number one up will force a re-cache
+fonts.otf.version = 1.73
fonts.otf.tables = fonts.otf.tables or { }
fonts.otf.meanings = fonts.otf.meanings or { }
fonts.otf.enhance_data = false
@@ -49,6 +43,7 @@ fonts.otf.features.data = { }
fonts.otf.features.list = { } -- not (yet) used, oft fonts have gpos/gsub lists
fonts.otf.features.default = { }
fonts.otf.trace_features = false
+fonts.otf.trace_set_features = false
fonts.otf.trace_replacements = false
fonts.otf.trace_contexts = false
fonts.otf.trace_anchors = false
@@ -702,6 +697,10 @@ end
fonts.otf.enhance = fonts.otf.enhance or { }
fonts.otf.enhance.add_kerns = true
+fonts.otf.featurefiles = {
+--~ "texhistoric.fea"
+}
+
function fonts.otf.load(filename,format,sub,featurefile)
local name = file.basename(file.removesuffix(filename))
if featurefile then
@@ -729,13 +728,19 @@ function fonts.otf.load(filename,format,sub,featurefile)
end
if ff then
logs.report("load otf","loading: " .. filename)
- if featurefile then
- featurefile = input.find_file(texmf.instance,file.addsuffix(featurefile,'fea'),"FONTFEATURES")
- if featurefile and featurefile ~= "" then
- logs.report("load otf", "featurefile: " .. featurefile)
- fontforge.apply_featurefile(ff, featurefile)
+ local function load_featurefile(featurefile)
+ if featurefile then
+ featurefile = input.find_file(texmf.instance,file.addsuffix(featurefile,'fea'),"FONTFEATURES")
+ if featurefile and featurefile ~= "" then
+ logs.report("load otf", "featurefile: " .. featurefile)
+ fontforge.apply_featurefile(ff, featurefile)
+ end
end
end
+ for _, featurefile in pairs(fonts.otf.featurefiles) do
+ load_featurefile(featurefile)
+ end
+ load_featurefile(featurefile)
data = fontforge.to_table(ff)
fontforge.close(ff)
if data then
@@ -754,19 +759,19 @@ function fonts.otf.load(filename,format,sub,featurefile)
end
end
end
- if data then
- local map = data.map.map
- local backmap = data.map.backmap
- local unicodes = data.luatex.unicodes
- local glyphs = data.glyphs
- -- maybe handy some day, not used
- data.name_to_unicode = function (n) return unicodes[n] end
- data.name_to_index = function (n) return map[unicodes[n]] end
- data.index_to_name = function (i) return glyphs[i].name end
- data.unicode_to_name = function (u) return glyphs[map[u]].name end
- data.index_to_unicode = function (u) return backmap[u] end
- data.unicode_to_index = function (u) return map[u] end
- end
+--~ if data then
+--~ local map = data.map.map
+--~ local backmap = data.map.backmap
+--~ local unicodes = data.luatex.unicodes
+--~ local glyphs = data.glyphs
+--~ -- maybe handy some day, not used
+--~ data.name_to_unicode = function (n) return unicodes[n] end
+--~ data.name_to_index = function (n) return map[unicodes[n]] end
+--~ data.index_to_name = function (i) return glyphs[i].name end
+--~ data.unicode_to_name = function (u) return glyphs[map[u]].name end
+--~ data.index_to_unicode = function (u) return backmap[u] end
+--~ data.unicode_to_index = function (u) return map[u] end
+--~ end
return data
end
@@ -786,36 +791,98 @@ function fonts.otf.enhance.analyze(data,filename)
data.luatex = t
end
-function fonts.otf.load_cidmap(filename)
- local data = io.loaddata(filename)
- if data then
- local unicodes, names = { }, {}
- data = data:gsub("^(%d+)%s+(%d+)\n","")
- for a,b in data:gmatch("(%d+)%s+([%d%a]+)\n") do
- unicodes[tonumber(a)] = tonumber(b,16)
- end
- for a,b,c in data:gmatch("(%d+)%.%.(%d+)%s+([%d%a]+)%s*\n") do
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
- end
- for a,b in data:gmatch("(%d+)%s+\/(%S+)%s*\n") do
- names[tonumber(a)] = b
- end
- local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- else
- return nil
+--~ function fonts.otf.load_cidmap(filename) -- lpeg
+--~ local data = io.loaddata(filename)
+--~ if data then
+--~ local unicodes, names = { }, {}
+--~ data = data:gsub("^(%d+)%s+(%d+)\n","")
+--~ for a,b in data:gmatch("(%d+)%s+([%d%a]+)\n") do
+--~ unicodes[tonumber(a)] = tonumber(b,16)
+--~ end
+--~ for a,b,c in data:gmatch("(%d+)%.%.(%d+)%s+([%d%a]+)%s*\n") do
+--~ c = tonumber(c,16)
+--~ for i=tonumber(a),tonumber(b) do
+--~ unicodes[i] = c
+--~ c = c + 1
+--~ end
+--~ end
+--~ for a,b in data:gmatch("(%d+)%s+\/(%S+)%s*\n") do
+--~ names[tonumber(a)] = b
+--~ end
+--~ local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
+--~ return {
+--~ supplement = supplement,
+--~ registry = registry,
+--~ ordering = ordering,
+--~ filename = filename,
+--~ unicodes = unicodes,
+--~ names = names
+--~ }
+--~ else
+--~ return nil
+--~ end
+--~ end
+
+do
+ -- original string parsr: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
+ --
+ -- 18964 18964 (leader)
+ -- 0 /.notdef
+ -- 1..95 0020
+ -- 99 3000
+
+ local number = lpeg.C(lpeg.R("09","af","AF")^1)
+ local space = lpeg.S(" \n\r\t")
+ local spaces = space^0
+ local period = lpeg.P(".")
+ local periods = period * period
+ local name = lpeg.P("/") * lpeg.C((1-space)^1)
+
+ local unicodes, names = { }, {}
+
+ local tonumber = tonumber
+
+ function do_one(a,b)
+ unicodes[tonumber(a)] = tonumber(b,16)
+ end
+ function do_range(a,b,c)
+ c = tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i] = c
+ c = c + 1
+ end
+ end
+ function do_name(a,b)
+ names[tonumber(a)] = b
+ end
+
+ grammar = lpeg.P { "start",
+ start = number * spaces * number * lpeg.V("series"),
+ series = (spaces * (lpeg.V("one") + lpeg.V("range") + lpeg.V("named")) )^1,
+ one = (number * spaces * number) / do_one,
+ range = (number * periods * number * spaces * number) / do_range,
+ named = (number * spaces * name) / do_name
+ }
+
+ function fonts.otf.load_cidmap(filename) -- lpeg
+ local data = io.loaddata(filename)
+ if data then
+ unicodes, names = { }, { }
+ grammar:match(data)
+ local supplement, registry, ordering = filename:match("^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement = supplement,
+ registry = registry,
+ ordering = ordering,
+ filename = filename,
+ unicodes = unicodes,
+ names = names
+ }
+ else
+ return nil
+ end
end
+
end
fonts.otf.cidmaps = { }
@@ -951,11 +1018,19 @@ function fonts.otf.enhance.before(data,filename)
table.compact(subfont.glyphs)
end
end
+
+--~ for index, glyph in pairs(data.glyphs) do
+--~ for k,v in pairs(glyph) do
+--~ if v == 0 then glyph[k] = nil end
+--~ end
+--~ end
+
end
function fonts.otf.enhance.after(data,filename) -- to be split
if fonts.otf.enhance.add_kerns then
local glyphs, mapmap, unicodes = data.glyphs, data.map.map, data.luatex.unicodes
+ local mkdone = false
for index, glyph in pairs(data.glyphs) do
if glyph.kerns then
local mykerns = { } -- unicode indexed !
@@ -977,8 +1052,13 @@ function fonts.otf.enhance.after(data,filename) -- to be split
end
end
glyph.mykerns = mykerns
+ glyph.kerns = nil -- saves space and time
+ mkdone = true
end
end
+ if mkdone then
+ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
+ end
if data.gpos then
for _, gpos in ipairs(data.gpos) do
if gpos.subtables then
@@ -990,7 +1070,8 @@ function fonts.otf.enhance.after(data,filename) -- to be split
local maxfirsts, maxseconds = table.getn(firsts), table.getn(seconds)
logs.report("load otf", string.format("adding kernclass %s with %s times %s pairs)",lookup, maxfirsts, maxseconds))
for fk, fv in pairs(firsts) do
- for first in fv:gmatch("(%S+)") do
+ -- for first in fv:gmatch("([^ ]+)") do
+ for first in fv:gmatch("[^ ]+") do
local glyph = glyphs[mapmap[unicodes[first]]]
local mykerns = glyph.mykerns
if not mykerns then
@@ -1003,7 +1084,8 @@ function fonts.otf.enhance.after(data,filename) -- to be split
mykerns[lookup] = lookupkerns
end
for sk, sv in pairs(seconds) do
- for second in sv:gmatch("(%S+)") do
+ -- for second in sv:gmatch("([^ ]+)") do
+ for second in sv:gmatch("[^ ]+") do
lookupkerns[unicodes[second]] = offsets[(fk-1) * maxseconds + sk]
end
end
@@ -1144,15 +1226,15 @@ function fonts.otf.analyze_unicodes(data)
return unicodes
end
-function fonts.otf.analyze_features(g)
+function fonts.otf.analyze_features(g, features)
if g then
local t, done = { }, { }
- for k,v in ipairs(g) do
- local f = v.features
+ for k=1,#g do
+ local f = features or g[k].features
if f then
- for k, v in ipairs(f) do
+ for k=1,#f do
-- scripts and tag
- local tag = v.tag
+ local tag = f[k].tag
if not done[tag] then
t[#t+1] = tag
done[tag] = true
@@ -1167,9 +1249,18 @@ function fonts.otf.analyze_features(g)
return nil
end
-function fonts.otf.valid_subtable(otfdata,language,script,kind)
- local t = otfdata.luatex.subtables
- return t[kind] and t[kind][script] and t[kind][script][language] and t[kind][script][language].lookups
+function fonts.otf.valid_subtable(otfdata,kind,script,language)
+ local tk = otfdata.luatex.subtables[kind]
+ if tk then
+ local tks = tk[script] or tk.dflt
+ if tks then
+ local tksl = tks[language] or tks.dflt
+ if tksl then
+ return tksl.lookups
+ end
+ end
+ end
+ return false
end
function fonts.otf.features.register(name,default)
@@ -1177,59 +1268,70 @@ function fonts.otf.features.register(name,default)
fonts.otf.features.default[name] = default
end
-function fonts.otf.set_features(tfmdata)
+function fonts.otf.set_features(tfmdata) -- node and base, simple mapping
local shared = tfmdata.shared
local otfdata = shared.otfdata
shared.features = fonts.define.check(shared.features,fonts.otf.features.default)
local features = shared.features
-tfmdata.language = tfmdata.language or 'dflt'
-tfmdata.script = tfmdata.script or 'dflt'
+ local trace = fonts.otf.trace_features or fonts.otf.trace_set_features
+ if not tfmdata.language then tfmdata.language = 'dflt' end
+ if not tfmdata.script then tfmdata.script = 'dflt' end
if not table.is_empty(features) then
local gposlist = otfdata.luatex.gposfeatures
local gsublist = otfdata.luatex.gsubfeatures
local mode = tfmdata.mode or fonts.mode
local fi = fonts.initializers[mode]
- if fi and fi.otf then
- local function initialize(list) -- using tex lig and kerning
- if list then
- for _, f in ipairs(list) do
- local value = features[f]
- if value and fi.otf[f] then -- brr
- if fonts.otf.trace_features then
- logs.report("define otf",string.format("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown'))
+ if fi then -- todo: delay initilization for mode 'node'
+ local fiotf = fi.otf
+ if fiotf then
+ local done = { }
+ local function initialize(list) -- using tex lig and kerning
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ local value = features[f]
+ if value and fiotf[f] then -- brr
+ if not done[f] then -- so, we can move some to triggers
+ if trace then
+ logs.report("define otf",string.format("initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown'))
+ end
+ fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
+ mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
+ fi = fonts.initializers[mode]
+ fiotf = fi.otf
+ done[f] = true
+ end
end
- fi.otf[f](tfmdata,value) -- can set mode (no need to pass otf)
- mode = tfmdata.mode or fonts.mode
- fi = fonts.initializers[mode]
end
end
end
+ initialize(fonts.triggers)
+ initialize(gsublist)
+ initialize(gposlist)
end
- initialize(fonts.triggers)
- initialize(gsublist)
- initialize(gposlist)
end
local fm = fonts.methods[mode]
- if fm and fm.otf then
- local function register(list) -- node manipulations
- if list then
- for _, f in ipairs(list) do
- if features[f] and fm.otf[f] then -- brr
- if fonts.otf.trace_features then
- logs.report("define otf",string.format("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown'))
- end
- if not shared.processors then -- maybe also predefine
- shared.processors = { fm.otf[f] }
- else
- shared.processors[#shared.processors+1] = fm.otf[f]
+ if fm then
+ local fmotf = fm.otf
+ local sp = shared.processors
+ if fmotf then
+ local function register(list) -- node manipulations
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ if features[f] and fmotf[f] then -- brr
+ if trace then
+ logs.report("define otf",string.format("installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown'))
+ end
+ sp[#sp+1] = fmotf[f]
end
end
end
end
+ register(fonts.triggers)
+ register(gsublist)
+ register(gposlist)
end
- register(fonts.triggers)
- register(gsublist)
- register(gposlist)
end
end
end
@@ -1245,12 +1347,33 @@ function fonts.otf.otf_to_tfm(specification)
if not tfmdata then
local otfdata = fonts.otf.load(filename,format,sub,features and features.featurefile)
if not table.is_empty(otfdata) then
+if true then
+ otfdata._shared_ = otfdata._shared_ or { -- aggressive sharing
+ processes = { },
+ lookuptable = { },
+ featuredata = { },
+ featurecache = { },
+ }
+end
tfmdata = fonts.otf.copy_to_tfm(otfdata)
if not table.is_empty(tfmdata) then
- tfmdata.shared = tfmdata.shared or { }
tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared.otfdata = otfdata
- tfmdata.shared.features = features
+ tfmdata.shared = tfmdata.shared or { } -- combine
+ local shared = tfmdata.shared
+ shared.otfdata = otfdata
+ shared.features = features
+ shared.processors = { }
+ shared.dynamics = { }
+ shared.processes = { }
+ shared.lookuptable = { }
+ shared.featuredata = { }
+ shared.featurecache = { }
+ if otfdata._shared_ then
+ shared.processes = otfdata._shared_.processes
+ shared.lookuptable = otfdata._shared_.lookuptable
+ shared.featuredata = otfdata._shared_.featuredata
+ shared.featurecache = otfdata._shared_.featurecache
+ end
fonts.otf.set_features(tfmdata)
end
end
@@ -1264,21 +1387,24 @@ function fonts.otf.features.prepare_base_kerns(tfmdata,kind,value) -- todo what
local otfdata = tfmdata.shared.otfdata
local charlist = otfdata.glyphs
local unicodes = otfdata.luatex.unicodes
- local somevalid = fonts.otf.some_valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
+ local somevalid = fonts.otf.some_valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
for _, chr in pairs(tfmdata.characters) do
local d = charlist[chr.index]
- if d and d.kerns then
- local t, done = chr.kerns or { }, false
- for _, v in pairs(d.kerns) do
- if somevalid[v.lookup] then
- local k = unicodes[v.char]
- if k > 0 then
- t[k], done = v.off, true
+ if d then
+ local dk = d.kerns
+ if dk then
+ local t, done = chr.kerns or { }, false
+ for _, v in pairs(dk) do
+ if somevalid[v.lookup] then
+ local k = unicodes[v.char]
+ if k > 0 then
+ t[k], done = v.off, true
+ end
end
end
- end
- if done then
- chr.kerns = t
+ if done then
+ chr.kerns = t -- no empty assignments
+ end
end
end
end
@@ -1290,25 +1416,32 @@ function fonts.otf.copy_to_tfm(data)
local tfm = { characters = { }, parameters = { } }
local unicodes = data.luatex.unicodes
local characters = tfm.characters
+ local parameters = tfm.parameters
local force = fonts.otf.notdef
+ local zerobox = { 0, 0, 0, 0 }
+ local glyphs = data.glyphs
for k,v in pairs(data.map.map) do
-- k = unicode, v = slot
- local d = data.glyphs[v]
- if d and (force or d.name) then
- local t = {
- index = v,
- unicode = k,
- name = d.name or ".notdef",
- boundingbox = d.boundingbox or nil,
- width = d.width or 0,
- height = d.boundingbox[4] or 0,
- depth = - d.boundingbox[2] or 0,
- class = d.class,
- }
- if d.class == "mark" then
- t.width = - t.width
- end
- characters[k] = t
+ local d = glyphs[v]
+ if d then
+ local name = d.name
+ if force or name then
+ local b = d.boundingbox or zerobox
+ local w = d.width or 0
+ if d.class == "mark" then
+ w = - w
+ end
+ characters[k] = {
+ index = v,
+ unicode = k,
+ name = name or ".notdef",
+ boundingbox = b,
+ width = w,
+ height = b[4],
+ depth = - b[2],
+ class = d.class,
+ }
+ end
end
end
local designsize = data.designsize or data.design_size or 100
@@ -1319,7 +1452,7 @@ function fonts.otf.copy_to_tfm(data)
tfm.units = data.units_per_em or 1000
-- we need a runtime lookup because of running from cdrom or zip, brrr
tfm.filename = input.findbinfile(texmf.instance,data.luatex.filename,"") or data.luatex.filename
- tfm.fullname = data.fullname or data.fontname
+ tfm.fullname = data.fontname or data.fullname
tfm.encodingbytes = 2
tfm.cidinfo = data.cidinfo
tfm.cidinfo.registry = tfm.cidinfo.registry or ""
@@ -1359,13 +1492,13 @@ function fonts.otf.copy_to_tfm(data)
end
end
spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
- tfm.parameters[1] = 0 -- slant
- tfm.parameters[2] = spaceunits -- space
- tfm.parameters[3] = tfm.units/2 -- 500 -- space_stretch
- tfm.parameters[4] = 2*tfm.units/3 -- 333 -- space_shrink
- tfm.parameters[5] = 4*tfm.units/5 -- 400 -- x_height
- tfm.parameters[6] = tfm.units -- 1000 -- quad
- tfm.parameters[7] = 0 -- extra_space (todo)
+ parameters[1] = 0 -- slant
+ parameters[2] = spaceunits -- space
+ parameters[3] = tfm.units/2 -- 500 -- space_stretch
+ parameters[4] = 2*tfm.units/3 -- 333 -- space_shrink
+ parameters[5] = 4*tfm.units/5 -- 400 -- x_height
+ parameters[6] = tfm.units -- 1000 -- quad
+ parameters[7] = 0 -- extra_space (todo)
if spaceunits < 2*tfm.units/5 then
-- todo: warning
end
@@ -1373,21 +1506,21 @@ function fonts.otf.copy_to_tfm(data)
tfm.ascender = math.abs(data.ascent or 0)
tfm.descender = math.abs(data.descent or 0)
if data.italicangle then -- maybe also in afm _
- tfm.parameters[1] = tfm.parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
+ parameters[1] = parameters[1] - math.round(math.tan(data.italicangle*math.pi/180))
end
if data.isfixedpitch then
- tfm.parameters[3] = 0
- tfm.parameters[4] = 0
+ parameters[3] = 0
+ parameters[4] = 0
elseif fonts.otf.syncspace then --
- tfm.parameters[3] = spaceunits/2 -- space_stretch
- tfm.parameters[4] = spaceunits/3 -- space_shrink
+ parameters[3] = spaceunits/2 -- space_stretch
+ parameters[4] = spaceunits/3 -- space_shrink
end
if data.pfminfo and data.pfminfo.os2_xheight and data.pfminfo.os2_xheight > 0 then
- tfm.parameters[5] = data.pfminfo.os2_xheight
+ parameters[5] = data.pfminfo.os2_xheight
else
local x = characters[unicodes['x']]
if x then
- tfm.parameters[5] = x.height
+ parameters[5] = x.height
end
end
-- [6]
@@ -1421,72 +1554,111 @@ function fonts.tfm.read_from_open_type(specification)
return tfmtable
end
+function fonts.otf.analyze_only(otfdata)
+ local analyze = fonts.otf.analyze_features
+ return analyze(otfdata.gpos), analyze(otfdata.gsub)
+end
+
+local a_to_script = { }
+local a_to_language = { }
+
+do
+
+ local context_setups = fonts.define.specify.context_setups
+ local context_numbers = fonts.define.specify.context_numbers
+
+ function fonts.otf.set_dynamics(tfmdata,attribute,features) --currently experimental and slow / hackery
+ local shared = tfmdata.shared
+ local dynamics = shared.dynamics
+ if dynamics then
+ features = features or context_setups[context_numbers[attribute]]
+ if features then
+ local script = features.script or 'dflt'
+ local language = features.language or 'dflt'
+ local ds = dynamics[script]
+ if not ds then
+ ds = { }
+ dynamics[script] = ds
+ end
+ local dsl = ds[language]
+ if not dsl then
+ dsl = { }
+ ds[language] = dsl
+ end
+ local dsla = dsl[attribute]
+ if dsla then
+ return dsla
+ else
+ a_to_script [attribute] = script
+ a_to_language[attribute] = language
+ dsla = { }
+ local otfdata = shared.otfdata
+ local methods = fonts.methods.node.otf
+ local initializers = fonts.initializers.node.otf
+ local gposfeatures, gsubfeatures = fonts.otf.analyze_only(otfdata,features)
+ local default = fonts.otf.features.default
+ local function register(list)
+ if list then
+ for i=1,#list do
+ local f = list[i]
+ local value = features[f] or default[f]
+ if value then
+ local i, m = initializers[f], methods[f]
+ if i then
+ i(tfmdata,value)
+ end
+ if m then
+ dsla[#dsla+1] = m
+ end
+ end
+ end
+ end
+ end
+ register(fonts.triggers)
+ register(gsubfeatures)
+ register(gposfeatures)
+ dynamics[script][language][attribute] = dsla
+ return dsla
+ end
+ end
+ end
+ return { } -- todo: false
+ end
+
+end
+
-- scripts
fonts.otf.default_language = 'latn'
fonts.otf.default_script = 'dflt'
---~ function fonts.otf.valid_feature(otfdata,language,script) -- return hash is faster
---~ local language = language or fonts.otf.default_language
---~ local script = script or fonts.otf.default_script
---~ if not (script and language) then
---~ return boolean.alwaystrue
---~ else
---~ language = string.padd(language:lower(),4)
---~ script = string.padd(script:lower (),4)
---~ local t = { }
---~ for k,v in pairs(otfdata.luatex.subtables) do
---~ local vv = v[script]
---~ if vv and vv[language] then
---~ t[k] = vv[language].valid
---~ end
---~ end
---~ local always = otfdata.luatex.always_valid -- for the moment not per feature
---~ --~ return function(kind,tag) -- is the kind test needed
---~ --~ return always[tag] or (kind and t[kind] and t[kind][tag])
---~ --~ end
---~ return function(kind,tag) -- better inline
---~ return always[tag] or (t[kind] and t[kind][tag])
---~ end
---~ end
---~ end
-
-function fonts.otf.valid_feature(otfdata,language,script,feature) -- return hash is faster
- local language = language or fonts.otf.default_language
+function fonts.otf.valid_feature(otfdata,kind,script,language) -- return hash is faster
local script = script or fonts.otf.default_script
+ local language = language or fonts.otf.default_language
if not (script and language) then
return true
else
- language = string.padd(language:lower(),4)
- script = string.padd(script:lower (),4)
---~ local t = { }
---~ for k,v in pairs(otfdata.luatex.subtables) do
---~ local vv = v[script]
---~ if vv and vv[language] then
---~ t[k] = vv[language].valid
---~ end
---~ end
- local ft = otfdata.luatex.subtables[feature]
+ script, language = script:lower(), language:lower() -- will go away, we will lowercase values
+ local ft = otfdata.luatex.subtables[kind]
local st = ft[script]
return false, otfdata.luatex.always_valid, st and st[language] and st[language].valid
end
end
-function fonts.otf.some_valid_feature(otfdata,language,script,kind)
- local language = language or fonts.otf.default_language
+function fonts.otf.some_valid_feature(otfdata,kind,script,language)
local script = script or fonts.otf.default_script
+ local language = language or fonts.otf.default_language
if not (script and language) then
return boolean.alwaystrue
else
- language = string.padd(language:lower(),4)
- script = string.padd(script:lower (),4)
+ script, language = script:lower(), language:lower() -- will go away, we will lowercase values
local t = otfdata.luatex.subtables[kind]
if t and t[script] and t[script][language] and t[script][language].valid then
return t[script][language].valid
else
return { }
end
---~ return (t and t[script][language] and t[script][language].valid) or { }
+ -- return (t and t[script] and t[script][language] and t[script][language].valid) or { }
end
end
@@ -1497,6 +1669,7 @@ function fonts.otf.features.aux.resolve_ligatures(tfmdata,ligatures,kind)
local changed = tfmdata.changed or { }
local done = { }
kind = kind or "unknown"
+ local trace = fonts.otf.trace_features
while true do
local ok = false
for k,v in pairs(ligatures) do
@@ -1507,18 +1680,22 @@ function fonts.otf.features.aux.resolve_ligatures(tfmdata,ligatures,kind)
local c, f, s = chars[v[2]], ligs[1], ligs[2]
local uf, us = unicodes[f], unicodes[s]
if changed[uf] or changed[us] then
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("%s: %s (%s) + %s (%s) ignored",kind,f,uf,s,us))
end
else
local first, second = chars[uf], us
if first and second then
- if not first.ligatures then first.ligatures = { } end
- first.ligatures[second] = {
+ local t = first.ligatures
+ if not t then
+ t = { }
+ first.ligatures = t
+ end
+ t[second] = {
char = unicodes[c.name],
type = 0
}
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("%s: %s (%s) + %s (%s) = %s (%s)",kind,f,uf,s,us,c.name,unicodes[c.name]))
end
end
@@ -1549,15 +1726,15 @@ function fonts.otf.features.prepare_base_substitutions(tfmdata,kind,value) -- we
local unicodes = otfdata.luatex.unicodes
local trace = fonts.otf.trace_features
local chars = tfmdata.characters
- local somevalid = fonts.otf.some_valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
+ local somevalid = fonts.otf.some_valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
tfmdata.changed = tfmdata.changed or { }
local changed = tfmdata.changed
+ local glyphs = otfdata.glyphs
for k,c in pairs(chars) do
- local o = otfdata.glyphs[c.index]
+ local o = glyphs[c.index]
if o and o.lookups then
for lookup,ps in pairs(o.lookups) do
---~ if valid(kind,lookup) then -- can be optimized for #p = 1
-if somevalid[lookup] then -- can be optimized for #p = 1
+ if somevalid[lookup] then
for i=1,#ps do
local p = ps[i]
local t = p.type
@@ -1578,7 +1755,7 @@ if somevalid[lookup] then -- can be optimized for #p = 1
end
elseif t == 'alternate' then
local pa = p.specification if pa and pa.components then
- local pc = pa.components:match("(%S+)")
+ local pc = pa.components:match("([^ ]+)")
if pc then
local upc = unicodes[pc]
if upc and chars[upc] then
@@ -1592,11 +1769,14 @@ if somevalid[lookup] then -- can be optimized for #p = 1
end
elseif t == 'ligature' and not changed[k] then
local pl = p.specification
- if pl and pl.components then
- if trace then
- logs.report("define otf",string.format("%s: %s => %s (%s)",kind,pl.components,chars[k].name,k))
+ if pl then
+ local plc = pl.components
+ if plc then
+ if trace then
+ logs.report("define otf",string.format("%s: %s => %s (%s)",kind,plc,chars[k].name,k))
+ end
+ ligatures[#ligatures+1] = { plc, k }
end
- ligatures[#ligatures+1] = { pl.components, k }
end
end
end
@@ -1642,7 +1822,7 @@ fonts.otf.features.data.tex = {
--~ 0x201D 0x2019 0x2019
--~ 0x201E 0X002C 0x002C
-function fonts.initializers.base.otf.texligatures(tfm,value)
+function fonts.initializers.base.otf.tlig(tfm,value)
local otfdata = tfm.shared.otfdata
local unicodes = otfdata.luatex.unicodes
local ligatures = { }
@@ -1656,21 +1836,20 @@ function fonts.initializers.base.otf.texligatures(tfm,value)
ligatures[#ligatures+1] = { v[2], v[1] }
end
end
- fonts.otf.features.aux.resolve_ligatures(tfm,ligatures)
+ fonts.otf.features.aux.resolve_ligatures(tfm,ligatures,'tlig')
end
-function fonts.initializers.base.otf.texquotes(tfm,value)
+function fonts.initializers.base.otf.trep(tfm,value)
tfm.characters[0x0022] = table.fastcopy(tfm.characters[0x201D])
tfm.characters[0x0027] = table.fastcopy(tfm.characters[0x2019])
tfm.characters[0x0060] = table.fastcopy(tfm.characters[0x2018])
end
-fonts.initializers.base.otf.trep = fonts.initializers.base.otf.texquotes
-fonts.initializers.base.otf.tlig = fonts.initializers.base.otf.texligatures
-
-table.insert(fonts.triggers,"texquotes")
-table.insert(fonts.triggers,"texligatures")
table.insert(fonts.triggers,"tlig")
+table.insert(fonts.triggers,"trep")
+
+fonts.define.specify.synonyms["texquotes"] = "trep"
+fonts.define.specify.synonyms["texligatures"] = "tlig"
-- Here comes the real thing ... node processing! The next session prepares
-- things. The main features (unchained by rules) have their own caches,
@@ -1680,37 +1859,37 @@ do
fonts.otf.features.prepare = { }
- -- also share vars
-
- function fonts.otf.features.prepare.feature(tfmdata,kind,value) -- check BASE VS NODE
+ function fonts.otf.features.prepare.feature(tfmdata,kind,value)
if value then
- tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared = tfmdata.shared or { }
+ local language, script = tfmdata.language or "dflt", tfmdata.script or "dflt"
local shared = tfmdata.shared
- shared.featuredata = shared.featuredata or { }
- shared.featuredata[kind] = shared.featuredata[kind] or { }
- shared.featurecache = shared.featurecache or { }
- shared.featurecache[kind] = false -- signal
local otfdata = shared.otfdata
- local lookuptable = fonts.otf.valid_subtable(otfdata,tfmdata.language,tfmdata.script,kind)
- shared.lookuptable = shared.lookuptable or { }
- shared.lookuptable[kind] = lookuptable
+ local lookuptable = fonts.otf.valid_subtable(otfdata,kind,script,language)
if lookuptable then
- shared.processes = shared.processes or { }
- shared.processes[kind] = shared.processes[kind] or { }
- local processes = shared.processes[kind]
- local types = otfdata.luatex.name_to_type
- local flags = otfdata.luatex.ignore_flags
- local preparers = fonts.otf.features.prepare
- local process = fonts.otf.features.process
- for noflookups, lookupname in ipairs(lookuptable) do
- local lookuptype = types[lookupname]
- local prepare = preparers[lookuptype]
- if prepare then
- local processdata = prepare(tfmdata,kind,lookupname)
- if processdata then
- local processflags = flags[lookupname] or {false,false,false}
- processes[#processes+1] = { process[lookuptype], lookupname, processdata, processflags }
+ local fullkind = kind .. script .. language
+ if not shared.lookuptable [fullkind] then
+ --~ print(tfmdata,file.basename(tfmdata.fullname or ""),kind,script,language,lookuptable,fullkind)
+ local processes = { }
+ -- featuredata and featurecache are indexed by lookup so we can share them
+ shared.featuredata [kind] = shared.featuredata [kind] or { }
+ shared.featurecache[kind] = shared.featurecache[kind] or false -- signal
+ shared.lookuptable [fullkind] = lookuptable
+ shared.processes [fullkind] = processes
+ local types = otfdata.luatex.name_to_type
+ local flags = otfdata.luatex.ignore_flags
+ local preparers = fonts.otf.features.prepare
+ local process = fonts.otf.features.process
+ local falsetable = { false, false, false }
+ for i=1,#lookuptable do
+ local lookupname = lookuptable[i]
+ local lookuptype = types[lookupname]
+ local prepare = preparers[lookuptype]
+ if prepare then
+ local processdata = prepare(tfmdata,kind,lookupname)
+ if processdata then
+ local processflags = flags[lookupname] or falsetable --- share false table
+ processes[#processes+1] = { process[lookuptype], lookupname, processdata, processflags }
+ end
end
end
end
@@ -1721,7 +1900,9 @@ do
-- helper: todo, we don't need to store non local ones for chains so we can pass the
-- validator as parameter
- function fonts.otf.features.collect_ligatures(tfmdata,kind,internal) -- ligs are spread all over the place
+ local pairs = pairs
+
+ function fonts.otf.features.collect_ligatures(tfmdata,kind) -- ligs are spread all over the place
local otfdata = tfmdata.shared.otfdata
local unicodes = tfmdata.shared.otfdata.luatex.unicodes -- actually the char index is ok too
local trace = fonts.otf.trace_features
@@ -1739,7 +1920,7 @@ do
ligatures[lookup] = t
end
local first = true
- for s in p.specification.components:gmatch("(%S+)") do
+ for s in p.specification.components:gmatch("[^ ]+") do
local u = unicodes[s]
if first then
if not t[u] then
@@ -1748,44 +1929,27 @@ do
t = t[u]
first = false
else
- if not t[1][u] then
- t[1][u] = { { } }
+ local t1 = t[1]
+ if not t1[u] then
+ t1[u] = { { } }
end
- t = t[1][u]
+ t = t1[u]
end
end
t[2] = o.unicodeenc
end
end
end
- if internal then
- local always = otfdata.luatex.always_valid
- for _,o in pairs(otfdata.glyphs) do
- if o.lookups then
- for lookup, ps in pairs(o.lookups) do
- if always[lookup] then
- collect(lookup,o,ps)
- end
- end
- end
- end
- else -- check if this valid is still ok
---~ local valid = fonts.otf.valid_feature(otfdata,tfmdata.language,tfmdata.script)
- local forced, always, okay = fonts.otf.valid_feature(otfdata,tfmdata.language,tfmdata.script,kind)
- for _,o in pairs(otfdata.glyphs) do
- if o.lookups then
---~ for lookup, ps in pairs(o.lookups) do
---~ if valid(kind,lookup) then
---~ collect(lookup,o,ps)
---~ end
---~ end
- if forced then
- for lookup, ps in pairs(o.lookups) do collect(lookup,o,ps) end
- elseif okay then
- for lookup, ps in pairs(o.lookups) do if always[lookup] or okay[lookup] then collect(lookup,o,ps) end end
- else
- for lookup, ps in pairs(o.lookups) do if always[lookup] then collect(lookup,o,ps) end end
- end
+ local forced, always, okay = fonts.otf.valid_feature(otfdata,kind,tfmdata.script,tfmdata.language)
+ for _,o in pairs(otfdata.glyphs) do
+ local lookups = o.lookups
+ if lookups then
+ if forced then
+ for lookup, ps in pairs(lookups) do collect(lookup,o,ps) end
+ elseif okay then
+ for lookup, ps in pairs(lookups) do if always[lookup] or okay[lookup] then collect(lookup,o,ps) end end
+ else
+ for lookup, ps in pairs(lookups) do if always[lookup] then collect(lookup,o,ps) end end
end
end
end
@@ -1855,7 +2019,8 @@ do
if p.specification and p.type == 'multiple' then
local old, new = o.unicodeenc, { }
substitutions[old] = new
- for pc in p.specification.components:gmatch("(%S+)") do
+ -- for pc in p.specification.components:gmatch("([^ ]+)") do
+ for pc in p.specification.components:gmatch("[^ ]+") do
new[#new+1] = unicodes[pc]
end
if trace then
@@ -1891,7 +2056,8 @@ do
if p.specification and p.type == 'alternate' then
local old = o.unicodeenc
local t = { }
- for pc in p.specification.components:gmatch("(%S+)") do
+ -- for pc in p.specification.components:gmatch("([^ ]+)") do
+ for pc in p.specification.components:gmatch("[^ ]+") do
t[#t+1] = unicodes[pc]
end
substitutions[old] = t
@@ -1921,7 +2087,7 @@ do
end
function fonts.otf.features.prepare.contextchain(tfmdata,kind,lookupname)
- local featuredata = tfmdata.shared.featuredata[kind]
+ local featuredata = tfmdata.shared.featuredata[kind]
local contexts = featuredata[lookupname]
if not contexts then
featuredata[lookupname] = { }
@@ -1932,16 +2098,18 @@ do
local flags = otfdata.luatex.ignore_flags
local types = otfdata.luatex.name_to_type
otfdata.luatex.covers = otfdata.luatex.covers or { }
- local cache = otfdata.luatex.covers
local characters = tfmdata.characters
+ local cache = otfdata.luatex.covers
local function uncover(covers)
+ -- lpeg hardly faster (.005 sec on mk)
if covers then
local result = { }
- for n, c in ipairs(covers) do
+ for n=1,#covers do
+ local c = covers[n]
local cc = cache[c]
if not cc then
local t = { }
- for s in c:gmatch("(%S+)") do
+ for s in c:gmatch("[^ ]+") do
t[unicodes[s]] = true
end
cache[c] = t
@@ -1959,7 +2127,9 @@ do
if not lookupdata then
logs.error("otf process", string.format("missing lookupdata table %s",lookupname))
elseif lookupdata.rules then
- for nofrules, rule in ipairs(lookupdata.rules) do
+ local rules = lookupdata.rules
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
local coverage = rule.coverage
if coverage and coverage.current then
local current = uncover(coverage.current)
@@ -2016,9 +2186,11 @@ do
local validanchors = { }
local glyphs = otfdata.glyphs
if otfdata.anchor_classes then
- for k,v in ipairs(otfdata.anchor_classes) do
- if v.lookup == lookupname then
- validanchors[v.name] = true
+ local classes = otfdata.anchor_classes
+ for k=1,#classes do
+ local class = classes[k]
+ if class.lookup == lookupname then
+ validanchors[class.name] = true
end
end
end
@@ -2067,6 +2239,7 @@ do
local featuredata = tfmdata.shared.featuredata[kind]
local kerns = featuredata[lookupname]
if not kerns then
+ local trace = fonts.otf.trace_features
featuredata[lookupname] = { }
kerns = featuredata[lookupname]
local otfdata = tfmdata.shared.otfdata
@@ -2087,25 +2260,27 @@ do
else
kerns[one] = { two = off }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
end
elseif o.kerns then
local one = o.unicodeenc
- for _, l in ipairs(o.kerns) do
- if l.lookup == lookupname then
- local char = l.char
+ local okerns = o.kerns
+ for ok=1,#okerns do
+ local k = okerns[ok]
+ if k.lookup == lookupname then
+ local char = k.char
if char then
local two = unicodes[char]
local krn = kerns[one]
if krn then
- krn[two] = l.off
+ krn[two] = k.off
else
- kerns[one] = { two = l.off }
+ kerns[one] = { two = k.off }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
@@ -2128,7 +2303,7 @@ do
else
kerns[one] = { two = specification.offsets }
end
- if fonts.otf.trace_features then
+ if trace then
logs.report("define otf",string.format("feature %s kern pair %s - %s",kind,one,two))
end
end
@@ -2232,11 +2407,11 @@ end
do
+ -- todo: use nodes helpers
+
local glyph = node.id('glyph')
local glue = node.id('glue')
- local kern_node = node.new("kern")
- local glue_node = node.new("glue")
- local glyph_node = node.new("glyph")
+ local disc = node.id('disc')
local fontdata = fonts.tfm.id
local has_attribute = node.has_attribute
@@ -2245,10 +2420,11 @@ do
local marknumber = attributes.numbers['mark'] or 200
local format = string.format
local report = logs.report
+ local scale = tex.scale
fonts.otf.features.process = { }
- -- we share aome vars here, after all, we have no nested lookups and
+ -- we share some vars here, after all, we have no nested lookups and
-- less code
local tfmdata = false
@@ -2258,22 +2434,38 @@ do
local glyphs = false
local currentfont = false
- function fonts.otf.features.process.feature(head,font,kind,attribute)
+ -- we cheat a bit and assume that a font,attr combination are kind of ranged
+
+ local context_setups = fonts.define.specify.context_setups
+ local context_numbers = fonts.define.specify.context_numbers
+
+ function fonts.otf.features.process.feature(head,font,attr,kind,attribute)
tfmdata = fontdata[font]
- otfdata = tfmdata.shared.otfdata
+ local shared = tfmdata.shared
+ otfdata = shared.otfdata
characters = tfmdata.characters
marks = otfdata.luatex.marks
glyphs = otfdata.glyphs
currentfont = font
- local lookuptable = tfmdata.shared.lookuptable[kind]
+ local script, language
+ if attr and attr > 0 then
+ local features = context_setups[context_numbers[attr]]
+ language, script = features.language or "dflt", features.script or "dflt"
+ else
+ language, script = tfmdata.language or "dflt", tfmdata.script or "dflt"
+ end
+ local fullkind = kind .. script .. language
+ local lookuptable = shared.lookuptable[fullkind]
if lookuptable then
local types = otfdata.luatex.name_to_type
local start, done, ok = head, false, false
- local processes = tfmdata.shared.processes[kind]
+ local processes = shared.processes[fullkind]
if #processes == 1 then
local p = processes[1]
- while start do
- if start.id == glyph and start.font == font and (not attribute or has_attribute(start,state,attribute)) then
+ while start do -- evt splitsen
+ if start.id == glyph and start.subtype<256 and start.font == font and
+ (not attr or has_attribute(start,0,attr)) and -- dynamic feature
+ (not attribute or has_attribute(start,state,attribute)) then
-- we can make the p vars also global to this closure
local pp = p[3] -- all lookups
local pc = pp[start.char]
@@ -2290,7 +2482,9 @@ do
end
else
while start do
- if start.id == glyph and start.font == font and (not attribute or has_attribute(start,state,attribute)) then
+ if start.id == glyph and start.subtype<256 and start.font == font and
+ (not attr or has_attribute(start,0,attr)) and -- dynamic feature
+ (not attribute or has_attribute(start,state,attribute)) then
for i=1,#processes do local p = processes[i]
local pp = p[3]
local pc = pp[start.char]
@@ -2316,59 +2510,92 @@ do
end
end
- -- todo: components / else subtype 0 / maybe we should be able to force this
+ -- we can assume that languages that use marks are not hyphenated
+ -- we can also assume that at most one discretionary is present
- local function toligature(start,stop,char,markflag)
+ local function toligature(start,stop,char,markflag,discfound) -- brr head
if start ~= stop then
- local deletemarks = markflag ~= "mark"
- start.components = node.copy_list(start,stop)
- node.slide(start.components)
- -- todo: components
- start.subtype = 1
- start.char = char
- local marknum = 1
- local next = start.next
- while true do
- if marks[next.char] then
- if not deletemarks then
- set_attribute(next,marknumber,marknum)
- end
- else
- marknum = marknum + 1
- end
- if next == stop then
- break
- else
- next = next.next
- end
- end
- next = stop.next
- while next do
- if next.id == glyph and next.font == currentfont and marks[next.char] then
- set_attribute(next,marknumber,marknum)
- next = next.next
- else
- break
+ if discfound then
+ local lignode = node.copy(start)
+ lignode.font = start.font
+ lignode.char = char
+ lignode.subtype = 2
+ start = node.do_ligature_n(start, stop, lignode)
+ if start.id == disc then
+ local prev = start.prev
+ start = start.next
end
- end
- local next = start.next
- while true do
- if next == stop or deletemarks or marks[next.char] then
- local crap = next
- next.prev.next = next.next
- if next.next then
- next.next.prev = next.prev
+ else
+ local deletemarks = markflag ~= "mark"
+ start.components = node.copy_list(start,stop)
+ node.slide(start.components)
+ -- todo: components
+ start.subtype = 2
+ start.char = char
+ local marknum = 1
+ local next = start.next
+ while true do
+ if marks[next.char] then
+ if not deletemarks then
+ set_attribute(next,marknumber,marknum)
+ end
+ else
+ marknum = marknum + 1
end
if next == stop then
- stop = crap.prev
- node.free(crap)
break
else
next = next.next
- node.free(crap)
end
- else
- next = next.next
+ end
+ next = stop.next
+ while next do
+ if next.id == glyph and next.font == currentfont and marks[next.char] then
+ set_attribute(next,marknumber,marknum)
+ next = next.next
+ else
+ break
+ end
+ end
+ local next = start.next
+--~ while true do
+--~ if next == stop or deletemarks or marks[next.char] then
+--~ local crap = next
+--~ next.prev.next = next.next
+--~ if next.next then
+--~ next.next.prev = next.prev
+--~ end
+--~ if next == stop then
+--~ stop = crap.prev
+--~ node.free(crap)
+--~ break
+--~ else
+--~ next = next.next
+--~ node.free(crap)
+--~ end
+--~ else
+--~ next = next.next
+--~ end
+--~ end
+ while true do
+ if next == stop or deletemarks or marks[next.char] then
+ local crap = next
+ local np, nn = next.prev, next.next
+ np.next = nn
+ if nn then
+ nn.prev = np
+ end
+ if next == stop then
+ stop = crap.prev
+ node.free(crap)
+ break
+ else
+ next = nn
+ node.free(crap)
+ end
+ else
+ next = nn
+ end
end
end
end
@@ -2405,11 +2632,12 @@ do
if #multiples > 1 then
for k=2,#multiples do
local n = node.copy(start)
+ local sn = start.next
n.char = multiples[k]
- n.next = start.next
+ n.next = sn
n.prev = start
- if start.next then
- start.next.prev = n
+ if sn then
+ sn.prev = n
end
start.next = n
start = n
@@ -2425,23 +2653,35 @@ do
end
function fonts.otf.features.process.gsub_ligature(start,kind,lookupname,ligatures,alldata,flags)
- local s, stop = start.next, nil
- while s and s.id == glyph and s.subtype == 0 and s.font == currentfont do
- if marks[s.char] then
- s = s.next
- else
- local lg = ligatures[1][s.char]
- if not lg then
- break
+ local s, stop, discfound = start.next, nil, false
+ while s do
+ local id = s.id
+ if id == glyph and s.subtype<256 then
+ if s.font == currentfont then
+ if marks[s.char] then
+ s = s.next
+ else
+ local lg = ligatures[1][s.char]
+ if not lg then
+ break
+ else
+ stop = s
+ ligatures = lg
+ s = s.next
+ end
+ end
else
- stop = s
- ligatures = lg
- s = s.next
+ break
end
+ elseif id == disc then
+ discfound = true
+ s = s.next
+ else
+ break
end
end
if stop and ligatures[2] then
- start = toligature(start,stop,ligatures[2],flags[1])
+ start = toligature(start,stop,ligatures[2],flags[1],discfound)
if fonts.otf.trace_ligatures then
report("process otf",format("%s: inserting ligature %s (%s)",kind,start.char,utf.char(start.char)))
end
@@ -2456,9 +2696,10 @@ do
local bases = baseanchors['basechar']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markanchors = anchors[component.char]
if markanchors then
@@ -2467,8 +2708,8 @@ do
for anchor,data in pairs(marks) do
local ba = bases[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2485,7 +2726,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2501,9 +2742,10 @@ do
local bases = baseanchors['baselig']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markanchors = anchors[component.char]
if markanchors then
@@ -2515,8 +2757,8 @@ do
local n = has_attribute(component,marknumber)
local ban = ba[n]
if ban then
- local dx = tex.scale(ban.x-data.x, tfmdata.factor)
- local dy = tex.scale(ban.y-data.y, tfmdata.factor)
+ local dx = scale(ban.x-data.x, factor)
+ local dy = scale(ban.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2534,7 +2776,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2551,10 +2793,11 @@ do
local bases = baseanchors['basemark']
if bases then
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local baseattr = has_attribute(start,marknumber) or 1
local trace = fonts.otf.trace_anchors
local last, done = start, false
+ local factor = tfmdata.factor
while true do
local markattr = has_attribute(component,marknumber) or 1
if baseattr == markattr then
@@ -2565,12 +2808,12 @@ do
for anchor,data in pairs(marks) do
local ba = bases[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
- report("process otf",format("%s:%s:%s anchoring mark %s to basemark %s => (%s,%s) => (%s,%s)",kind,anchor,n,start.char,component.char,dx,dy,component.xoffset,component.yoffset))
+ report("process otf",format("%s:%s:%s anchoring mark %s to basemark %s => (%s,%s) => (%s,%s)",kind,anchor,markattr,start.char,component.char,dx,dy,component.xoffset,component.yoffset))
end
done = true
break
@@ -2583,7 +2826,7 @@ do
--~ if component and component.id == kern then
--~ component = component.next
--~ end
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2603,11 +2846,12 @@ do
local next, done, x, y, total, t, first = start.next, false, 0, 0, 0, { }, nil
local function finish()
local i = 0
+ local factor = tfmdata.factor
while first do
if characters[first.char].class == 'mark' then
first = first.next
else
- first.yoffset = tex.scale(total, tfmdata.factor)
+ first.yoffset = scale(total, factor)
if first == next then
break
else
@@ -2620,7 +2864,7 @@ do
x, y, total, t, first = 0, 0, 0, { }, nil
end
while next do
- if next.id == glyph and next.font == currentfont then
+ if next.id == glyph and next.subtype<256 and next.font == currentfont then
local nextchar = next.char
if marks[nextchar] then
next = next.next
@@ -2664,7 +2908,9 @@ do
function fonts.otf.features.process.gpos_pair(start,kind,lookupname,basekerns,kerns)
local next, prev, done = start.next, start, false
-- to be optimized
- while next and next.id == glyph and next.font == currentfont do
+ local trace = fonts.otf.trace_kerns
+ local factor = tfmdata.factor
+ while next and next.id == glyph and next.subtype<256 and next.font == currentfont do
if characters[next.char].class == 'mark' then
prev = next
next = next.next
@@ -2675,8 +2921,7 @@ do
elseif type(krn) == "table" then
local a, b = krn[1], krn[2]
if a and a.x then
- local k = node.copy(kern_node)
- k.kern = tex.scale(a.x,fontdata[currentfont].factor) -- tfmdata.factor
+ local k = nodes.kern(scale(a.x,factor))
if b and b.x then
report("otf process","we need to do something with the second kern xoff " .. b.x)
end
@@ -2684,14 +2929,13 @@ do
k.prev = prev
prev.next = k
next.prev = k
- if fonts.otf.trace_kerns then
+ if trace then
-- todo
end
end
else
- -- todo, just start, next = node.insert_before(head,next,nodes.kern(tex.scale(kern,fontdata[currentfont].factor)))
- local k = node.copy(kern_node)
- k.kern = tex.scale(krn,fontdata[currentfont].factor) -- tfmdata.factor
+ -- todo, just start, next = node.insert_before(head,next,nodes.kern(scale(kern,factor)))
+ local k = nodes.kern(scale(krn,factor))
k.next = next
k.prev = prev
prev.next = k
@@ -2759,7 +3003,8 @@ do
local lv = looks[lookups[l]]
if lv then
replacement = { }
- for c in lv[1].specification.components:gmatch("(%S+)") do
+ -- for c in lv[1].specification.components:gmatch("([^ ]+)") do
+ for c in lv[1].specification.components:gmatch("[^ ]+") do
replacement[#replacement+1] = unicodes[c]
end
cacheslot[char] = replacement
@@ -2782,11 +3027,12 @@ do
if #replacement > 1 then
for k=2,#replacement do
local n = node.copy(start)
+ local sn = start.next
n.char = replacement[k]
- n.next = start.next
+ n.next = sn
n.prev = start
- if start.next then
- start.next.prev = n
+ if sn then
+ sn.prev = n
end
start.next = n
start = n
@@ -2809,7 +3055,8 @@ do
local lv = looks[lookups[l]]
if lv then
replacement = { }
- for c in lv[1].specification.components:gmatch("(%S+)") do
+ -- for c in lv[1].specification.components:gmatch("([^ ]+)") do
+ for c in lv[1].specification.components:gmatch("[^ ]+") do
replacement[#replacement+1] = unicodes[c]
end
cacheslot[char] = replacement
@@ -2836,19 +3083,22 @@ do
if lookups then
local featurecache = fontdata[currentfont].shared.featurecache
if not featurecache[kind] then
- featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind)
- -- to be tested: only collect internal
- -- featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind,true) --
+ featurecache[kind] = fonts.otf.features.collect_ligatures(tfmdata,kind) -- double cached ?
end
local lookups = otfdata.luatex.internals[lookups[1]].lookups
local ligaturecache = featurecache[kind]
+ local trace = fonts.otf.trace_ligatures
for i=1,#lookups do
local ligatures = ligaturecache[lookups[i]]
if ligatures and ligatures[start.char] then
ligatures = ligatures[start.char]
- local s = start.next
+ local s, discfound = start.next, false
while s do
- if characters[s.char].class == 'mark' then
+ local id = s.id
+ if id == disc then
+ s = s.next
+ discfound = true
+ elseif characters[s.char].class == 'mark' then
s = s.next
else
local lg = ligatures[1][s.char]
@@ -2865,10 +3115,10 @@ do
end
end
if ligatures[2] then
- if fonts.otf.trace_ligatures then
+ if trace then
report("otf chain",format("%s: replacing character %s by ligature",kind,start.char))
end
- return toligature(start,stop,ligatures[2],flags[1])
+ return toligature(start,stop,ligatures[2],flags[1],discfound)
end
break
end
@@ -2879,7 +3129,7 @@ do
function chainprocs.gpos_mark2base(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -2899,6 +3149,7 @@ do
local trace = fonts.otf.trace_anchors
local last, done = start, false
local baseanchors = glyph.anchors['basechar'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -2907,8 +3158,8 @@ do
for anchor,data in pairs(markanchors) do
local ba = baseanchors[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2921,7 +3172,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2936,7 +3187,7 @@ do
function chainprocs.gpos_mark2ligature(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -2957,6 +3208,7 @@ do
local done = false
local last = start
local baseanchors = glyph.anchors['baselig'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -2968,8 +3220,8 @@ do
local n = has_attribute(component,marknumber)
local ban = ba[n]
if ban then
- local dx = tex.scale(ban.x-data.x, tfmdata.factor)
- local dy = tex.scale(ban.y-data.y, tfmdata.factor)
+ local dx = scale(ban.x-data.x, factor)
+ local dy = scale(ban.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -2983,7 +3235,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
-- ok
else
break
@@ -2998,7 +3250,7 @@ do
function chainprocs.gpos_mark2mark(start,stop,kind,lookupname,sequence,lookups)
local component = start.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
local char = start.char
local anchortag = sequence[1][char]
if anchortag == true then
@@ -3020,6 +3272,7 @@ do
local trace = fonts.otf.trace_anchors
local last, done = false
local baseanchors = glyph.anchors['basemark'][anchortag]
+ local factor = tfmdata.factor
while true do
local nextchar = component.char
local charnext = characters[nextchar]
@@ -3028,8 +3281,8 @@ do
for anchor,data in pairs(markanchors) do
local ba = baseanchors[anchor]
if ba then
- local dx = tex.scale(ba.x-data.x, tfmdata.factor)
- local dy = tex.scale(ba.y-data.y, tfmdata.factor)
+ local dx = scale(ba.x-data.x, factor)
+ local dy = scale(ba.y-data.y, factor)
component.xoffset = start.xoffset - dx
component.yoffset = start.yoffset + dy
if trace then
@@ -3042,7 +3295,7 @@ do
end
last = component
component = component.next
- if component and component.id == glyph and component.font == currentfont and marks[component.char] then
+ if component and component.id == glyph and component.subtype<256 and component.font == currentfont and marks[component.char] then
markattr = has_attribute(component,marknumber)
if baseattr ~= markattr then
break
@@ -3089,15 +3342,22 @@ do
match = sequence[1][start.char]
else -- n = #sequence -> faster
for n=1,#sequence do
- if stop and stop.id == glyph and stop.font == currentfont then
- local char = stop.char
- local class = characters[char].class
- if class == skipmark or class == skipligature or class == skipbase then
- -- skip 'm
- elseif sequence[n][char] then
- if n < #sequence then
- stop = stop.next
+ if stop then
+ local id = stop.id
+ if id == glyph and stop.subtype<256 and stop.font == currentfont then
+ local char = stop.char
+ local class = characters[char].class
+ if class == skipmark or class == skipligature or class == skipbase then
+ -- skip 'm
+ elseif sequence[n][char] then
+ if n < #sequence then
+ stop = stop.next
+ end
+ else
+ match = false break
end
+ elseif id == disc then -- what to do with kerns?
+ stop = stop.next
else
match = false break
end
@@ -3111,11 +3371,12 @@ do
local prev = start.prev
if prev then
if #before == 1 then
- match = prev.id == glyph and prev.font == currentfont and before[1][prev.char]
+ match = prev.id == glyph and prev.subtype<256 and prev.font == currentfont and before[1][prev.char]
else
for n=#before,1 do
if prev then
- if prev.id == glyph and prev.font == currentfont then -- normal char
+ local id = prev.id
+ if id == glyph and prev.subtype<256 and prev.font == currentfont then -- normal char
local char = prev.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3123,6 +3384,8 @@ do
elseif not before[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not before[n][32] then
match = false break
end
@@ -3146,11 +3409,12 @@ do
local next = stop.next
if next then
if #after == 1 then
- match = next.id == glyph and next.font == currentfont and after[1][next.char]
+ match = next.id == glyph and next.subtype<256 and next.font == currentfont and after[1][next.char]
else
for n=1,#after do
if next then
- if next.id == glyph and next.font == currentfont then -- normal char
+ local id = next.id
+ if id == glyph and next.subtype<256 and next.font == currentfont then -- normal char
local char = next.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3158,6 +3422,8 @@ do
elseif not after[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not after[n][32] then -- brrr
match = false break
end
@@ -3180,6 +3446,7 @@ do
if match then
local trace = fonts.otf.trace_contexts
if trace then
+ local char = start.char
report("otf chain",format("%s: rule %s of %s matches %s times at char %s (%s) lookuptype %s",kind,rule,lookupname,#sequence,char,utf.char(char),lookuptype))
end
if lookups then
@@ -3214,11 +3481,12 @@ do
local prev = start.prev
if prev then
if #after == 1 then
- match = prev.id == glyph and prev.font == currentfont and after[1][prev.char]
+ match = prev.id == glyph and prev.subtype<256 and prev.font == currentfont and after[1][prev.char]
else
for n=1,#after do
if prev then
- if prev.id == glyph and prev.font == currentfont then -- normal char
+ local id = prev.id
+ if id == glyph and prev.subtype<256 and prev.font == currentfont then -- normal char
local char = prev.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3226,6 +3494,8 @@ do
elseif not after[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not after[n][32] then
match = false break
end
@@ -3249,11 +3519,12 @@ do
local next = stop.next
if next then
if #after == 1 then
- match = next.id == glyph and next.font == currentfont and before[1][next.char]
+ match = next.id == glyph and next.subtype<256 and next.font == currentfont and before[1][next.char]
else
for n=#before,1 do
if next then
- if next.id == glyph and next.font == currentfont then -- normal char
+ local id = next.id
+ if id == glyph and next.subtype<256 and next.font == currentfont then -- normal char
local char = next.char
local class = characters[char].class
if class == skipmark or class == skipligature or class == skipbase then
@@ -3261,6 +3532,8 @@ do
elseif not before[n][char] then
match = false break
end
+ elseif id == disc then
+ -- skip 'm
elseif not before[n][32] then -- brrr
match = false break
end
@@ -3315,113 +3588,108 @@ do
local process = fonts.otf.features.process.feature
- function fonts.methods.node.otf.aalt(head,font) return process(head,font,'aalt') end
- function fonts.methods.node.otf.afrc(head,font) return process(head,font,'afrc') end
- function fonts.methods.node.otf.akhn(head,font) return process(head,font,'akhn') end
- function fonts.methods.node.otf.c2pc(head,font) return process(head,font,'c2pc') end
- function fonts.methods.node.otf.c2sc(head,font) return process(head,font,'c2sc') end
- function fonts.methods.node.otf.calt(head,font) return process(head,font,'calt') end
- function fonts.methods.node.otf.case(head,font) return process(head,font,'case') end
- function fonts.methods.node.otf.ccmp(head,font) return process(head,font,'ccmp') end
- function fonts.methods.node.otf.clig(head,font) return process(head,font,'clig') end
- function fonts.methods.node.otf.cpsp(head,font) return process(head,font,'cpsp') end
- function fonts.methods.node.otf.cswh(head,font) return process(head,font,'cswh') end
- function fonts.methods.node.otf.curs(head,font) return process(head,font,'curs') end
- function fonts.methods.node.otf.dlig(head,font) return process(head,font,'dlig') end
- function fonts.methods.node.otf.dnom(head,font) return process(head,font,'dnom') end
- function fonts.methods.node.otf.expt(head,font) return process(head,font,'expt') end
- function fonts.methods.node.otf.fin2(head,font) return process(head,font,'fin2') end
- function fonts.methods.node.otf.fin3(head,font) return process(head,font,'fin3') end
- function fonts.methods.node.otf.fina(head,font) return process(head,font,'fina',3) end
- function fonts.methods.node.otf.frac(head,font) return process(head,font,'frac') end
- function fonts.methods.node.otf.fwid(head,font) return process(head,font,'fwid') end
- function fonts.methods.node.otf.haln(head,font) return process(head,font,'haln') end
- function fonts.methods.node.otf.hist(head,font) return process(head,font,'hist') end
- function fonts.methods.node.otf.hkna(head,font) return process(head,font,'hkna') end
- function fonts.methods.node.otf.hlig(head,font) return process(head,font,'hlig') end
- function fonts.methods.node.otf.hngl(head,font) return process(head,font,'hngl') end
- function fonts.methods.node.otf.hwid(head,font) return process(head,font,'hwid') end
- function fonts.methods.node.otf.init(head,font) return process(head,font,'init',1) end
- function fonts.methods.node.otf.isol(head,font) return process(head,font,'isol',4) end
- function fonts.methods.node.otf.ital(head,font) return process(head,font,'ital') end
- function fonts.methods.node.otf.jp78(head,font) return process(head,font,'jp78') end
- function fonts.methods.node.otf.jp83(head,font) return process(head,font,'jp83') end
- function fonts.methods.node.otf.jp90(head,font) return process(head,font,'jp90') end
- function fonts.methods.node.otf.kern(head,font) return process(head,font,'kern') end
- function fonts.methods.node.otf.liga(head,font) return process(head,font,'liga') end
- function fonts.methods.node.otf.lnum(head,font) return process(head,font,'lnum') end
- function fonts.methods.node.otf.locl(head,font) return process(head,font,'locl') end
- function fonts.methods.node.otf.mark(head,font) return process(head,font,'mark') end
- function fonts.methods.node.otf.med2(head,font) return process(head,font,'med2') end
- function fonts.methods.node.otf.medi(head,font) return process(head,font,'medi',2) end
- function fonts.methods.node.otf.mgrk(head,font) return process(head,font,'mgrk') end
- function fonts.methods.node.otf.mkmk(head,font) return process(head,font,'mkmk') end
- function fonts.methods.node.otf.nalt(head,font) return process(head,font,'nalt') end
- function fonts.methods.node.otf.nlck(head,font) return process(head,font,'nlck') end
- function fonts.methods.node.otf.nukt(head,font) return process(head,font,'nukt') end
- function fonts.methods.node.otf.numr(head,font) return process(head,font,'numr') end
- function fonts.methods.node.otf.onum(head,font) return process(head,font,'onum') end
- function fonts.methods.node.otf.ordn(head,font) return process(head,font,'ordn') end
- function fonts.methods.node.otf.ornm(head,font) return process(head,font,'ornm') end
- function fonts.methods.node.otf.pnum(head,font) return process(head,font,'pnum') end
- function fonts.methods.node.otf.pref(head,font) return process(head,font,'pref') end
- function fonts.methods.node.otf.pres(head,font) return process(head,font,'pres') end
- function fonts.methods.node.otf.pstf(head,font) return process(head,font,'pstf') end
- function fonts.methods.node.otf.rlig(head,font) return process(head,font,'rlig') end
- function fonts.methods.node.otf.rphf(head,font) return process(head,font,'rphf') end
- function fonts.methods.node.otf.salt(head,font) return process(head,font,'calt') end
- function fonts.methods.node.otf.sinf(head,font) return process(head,font,'sinf') end
- function fonts.methods.node.otf.smcp(head,font) return process(head,font,'smcp') end
- function fonts.methods.node.otf.smpl(head,font) return process(head,font,'smpl') end
- function fonts.methods.node.otf.ss01(head,font) return process(head,font,'ss01') end
- function fonts.methods.node.otf.ss02(head,font) return process(head,font,'ss02') end
- function fonts.methods.node.otf.ss03(head,font) return process(head,font,'ss03') end
- function fonts.methods.node.otf.ss04(head,font) return process(head,font,'ss04') end
- function fonts.methods.node.otf.ss05(head,font) return process(head,font,'ss05') end
- function fonts.methods.node.otf.ss06(head,font) return process(head,font,'ss06') end
- function fonts.methods.node.otf.ss07(head,font) return process(head,font,'ss07') end
- function fonts.methods.node.otf.ss08(head,font) return process(head,font,'ss08') end
- function fonts.methods.node.otf.ss09(head,font) return process(head,font,'ss09') end
- function fonts.methods.node.otf.subs(head,font) return process(head,font,'subs') end
- function fonts.methods.node.otf.sups(head,font) return process(head,font,'sups') end
- function fonts.methods.node.otf.swsh(head,font) return process(head,font,'swsh') end
- function fonts.methods.node.otf.titl(head,font) return process(head,font,'titl') end
- function fonts.methods.node.otf.tnam(head,font) return process(head,font,'tnam') end
- function fonts.methods.node.otf.tnum(head,font) return process(head,font,'tnum') end
- function fonts.methods.node.otf.trad(head,font) return process(head,font,'trad') end
- function fonts.methods.node.otf.unic(head,font) return process(head,font,'unic') end
- function fonts.methods.node.otf.zero(head,font) return process(head,font,'zero') end
+ function fonts.methods.node.otf.aalt(head,font,attr) return process(head,font,attr,'aalt') end
+ function fonts.methods.node.otf.afrc(head,font,attr) return process(head,font,attr,'afrc') end
+ function fonts.methods.node.otf.akhn(head,font,attr) return process(head,font,attr,'akhn') end
+ function fonts.methods.node.otf.c2pc(head,font,attr) return process(head,font,attr,'c2pc') end
+ function fonts.methods.node.otf.c2sc(head,font,attr) return process(head,font,attr,'c2sc') end
+ function fonts.methods.node.otf.calt(head,font,attr) return process(head,font,attr,'calt') end
+ function fonts.methods.node.otf.case(head,font,attr) return process(head,font,attr,'case') end
+ function fonts.methods.node.otf.ccmp(head,font,attr) return process(head,font,attr,'ccmp') end
+ function fonts.methods.node.otf.clig(head,font,attr) return process(head,font,attr,'clig') end
+ function fonts.methods.node.otf.cpsp(head,font,attr) return process(head,font,attr,'cpsp') end
+ function fonts.methods.node.otf.cswh(head,font,attr) return process(head,font,attr,'cswh') end
+ function fonts.methods.node.otf.curs(head,font,attr) return process(head,font,attr,'curs') end
+ function fonts.methods.node.otf.dlig(head,font,attr) return process(head,font,attr,'dlig') end
+ function fonts.methods.node.otf.dnom(head,font,attr) return process(head,font,attr,'dnom') end
+ function fonts.methods.node.otf.expt(head,font,attr) return process(head,font,attr,'expt') end
+ function fonts.methods.node.otf.fin2(head,font,attr) return process(head,font,attr,'fin2') end
+ function fonts.methods.node.otf.fin3(head,font,attr) return process(head,font,attr,'fin3') end
+ function fonts.methods.node.otf.fina(head,font,attr) return process(head,font,attr,'fina',3) end
+ function fonts.methods.node.otf.frac(head,font,attr) return process(head,font,attr,'frac') end
+ function fonts.methods.node.otf.fwid(head,font,attr) return process(head,font,attr,'fwid') end
+ function fonts.methods.node.otf.haln(head,font,attr) return process(head,font,attr,'haln') end
+ function fonts.methods.node.otf.hist(head,font,attr) return process(head,font,attr,'hist') end
+ function fonts.methods.node.otf.hkna(head,font,attr) return process(head,font,attr,'hkna') end
+ function fonts.methods.node.otf.hlig(head,font,attr) return process(head,font,attr,'hlig') end
+ function fonts.methods.node.otf.hngl(head,font,attr) return process(head,font,attr,'hngl') end
+ function fonts.methods.node.otf.hwid(head,font,attr) return process(head,font,attr,'hwid') end
+ function fonts.methods.node.otf.init(head,font,attr) return process(head,font,attr,'init',1) end
+ function fonts.methods.node.otf.isol(head,font,attr) return process(head,font,attr,'isol',4) end
+ function fonts.methods.node.otf.ital(head,font,attr) return process(head,font,attr,'ital') end
+ function fonts.methods.node.otf.jp78(head,font,attr) return process(head,font,attr,'jp78') end
+ function fonts.methods.node.otf.jp83(head,font,attr) return process(head,font,attr,'jp83') end
+ function fonts.methods.node.otf.jp90(head,font,attr) return process(head,font,attr,'jp90') end
+ function fonts.methods.node.otf.kern(head,font,attr) return process(head,font,attr,'kern') end
+ function fonts.methods.node.otf.liga(head,font,attr) return process(head,font,attr,'liga') end
+ function fonts.methods.node.otf.lnum(head,font,attr) return process(head,font,attr,'lnum') end
+ function fonts.methods.node.otf.locl(head,font,attr) return process(head,font,attr,'locl') end
+ function fonts.methods.node.otf.mark(head,font,attr) return process(head,font,attr,'mark') end
+ function fonts.methods.node.otf.med2(head,font,attr) return process(head,font,attr,'med2') end
+ function fonts.methods.node.otf.medi(head,font,attr) return process(head,font,attr,'medi',2) end
+ function fonts.methods.node.otf.mgrk(head,font,attr) return process(head,font,attr,'mgrk') end
+ function fonts.methods.node.otf.mkmk(head,font,attr) return process(head,font,attr,'mkmk') end
+ function fonts.methods.node.otf.nalt(head,font,attr) return process(head,font,attr,'nalt') end
+ function fonts.methods.node.otf.nlck(head,font,attr) return process(head,font,attr,'nlck') end
+ function fonts.methods.node.otf.nukt(head,font,attr) return process(head,font,attr,'nukt') end
+ function fonts.methods.node.otf.numr(head,font,attr) return process(head,font,attr,'numr') end
+ function fonts.methods.node.otf.onum(head,font,attr) return process(head,font,attr,'onum') end
+ function fonts.methods.node.otf.ordn(head,font,attr) return process(head,font,attr,'ordn') end
+ function fonts.methods.node.otf.ornm(head,font,attr) return process(head,font,attr,'ornm') end
+ function fonts.methods.node.otf.pnum(head,font,attr) return process(head,font,attr,'pnum') end
+ function fonts.methods.node.otf.pref(head,font,attr) return process(head,font,attr,'pref') end
+ function fonts.methods.node.otf.pres(head,font,attr) return process(head,font,attr,'pres') end
+ function fonts.methods.node.otf.pstf(head,font,attr) return process(head,font,attr,'pstf') end
+ function fonts.methods.node.otf.rlig(head,font,attr) return process(head,font,attr,'rlig') end
+ function fonts.methods.node.otf.rphf(head,font,attr) return process(head,font,attr,'rphf') end
+ function fonts.methods.node.otf.salt(head,font,attr) return process(head,font,attr,'calt') end
+ function fonts.methods.node.otf.sinf(head,font,attr) return process(head,font,attr,'sinf') end
+ function fonts.methods.node.otf.smcp(head,font,attr) return process(head,font,attr,'smcp') end
+ function fonts.methods.node.otf.smpl(head,font,attr) return process(head,font,attr,'smpl') end
+ function fonts.methods.node.otf.ss01(head,font,attr) return process(head,font,attr,'ss01') end
+ function fonts.methods.node.otf.ss02(head,font,attr) return process(head,font,attr,'ss02') end
+ function fonts.methods.node.otf.ss03(head,font,attr) return process(head,font,attr,'ss03') end
+ function fonts.methods.node.otf.ss04(head,font,attr) return process(head,font,attr,'ss04') end
+ function fonts.methods.node.otf.ss05(head,font,attr) return process(head,font,attr,'ss05') end
+ function fonts.methods.node.otf.ss06(head,font,attr) return process(head,font,attr,'ss06') end
+ function fonts.methods.node.otf.ss07(head,font,attr) return process(head,font,attr,'ss07') end
+ function fonts.methods.node.otf.ss08(head,font,attr) return process(head,font,attr,'ss08') end
+ function fonts.methods.node.otf.ss09(head,font,attr) return process(head,font,attr,'ss09') end
+ function fonts.methods.node.otf.subs(head,font,attr) return process(head,font,attr,'subs') end
+ function fonts.methods.node.otf.sups(head,font,attr) return process(head,font,attr,'sups') end
+ function fonts.methods.node.otf.swsh(head,font,attr) return process(head,font,attr,'swsh') end
+ function fonts.methods.node.otf.titl(head,font,attr) return process(head,font,attr,'titl') end
+ function fonts.methods.node.otf.tnam(head,font,attr) return process(head,font,attr,'tnam') end
+ function fonts.methods.node.otf.tnum(head,font,attr) return process(head,font,attr,'tnum') end
+ function fonts.methods.node.otf.trad(head,font,attr) return process(head,font,attr,'trad') end
+ function fonts.methods.node.otf.unic(head,font,attr) return process(head,font,attr,'unic') end
+ function fonts.methods.node.otf.zero(head,font,attr) return process(head,font,attr,'zero') end
end
---~ function fonts.initializers.node.otf.install(feature,attribute)
---~ function fonts.initializers.node.otf[feature](tfm,value) return fonts.otf.features.prepare.feature(tfm,feature,value) end
---~ function fonts.methods.node.otf[feature] (head,font) return fonts.otf.features.process.feature(head,font,feature,attribute) end
---~ end
-
-- common stuff
-function fonts.otf.features.language(tfm,value)
+function fonts.otf.features.language(tfmdata,value)
if value then
value = value:lower()
if fonts.otf.tables.languages[value] then
- tfm.language = value
+ tfmdata.language = value
end
end
end
-function fonts.otf.features.script(tfm,value)
+function fonts.otf.features.script(tfmdata,value)
if value then
value = value:lower()
if fonts.otf.tables.scripts[value] then
- tfm.script = value
+ tfmdata.script = value
end
end
end
-function fonts.otf.features.mode(tfm,value)
+function fonts.otf.features.mode(tfmdata,value)
if value then
- tfm.mode = value:lower()
+ tfmdata.mode = value:lower()
end
end
@@ -3435,10 +3703,11 @@ fonts.initializers.node.otf.script = fonts.otf.features.script
fonts.initializers.node.otf.mode = fonts.otf.features.mode
fonts.initializers.node.otf.method = fonts.otf.features.mode
-fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
-fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
-fonts.initializers.node.otf.texquotes = fonts.initializers.base.otf.texquotes
-fonts.initializers.node.otf.texligatures = fonts.initializers.base.otf.texligatures
+--~ fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
+--~ fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
+
+--~ fonts.methods.node.otf.trep = function(head,font,attr) return process(head,font,attr,'trep') end
+--~ fonts.methods.node.otf.tlig = function(head,font,attr) return process(head,font,attr,'tlig') end
-- we need this because fonts can be bugged
@@ -3553,28 +3822,47 @@ do
-- font related value, but then we also need dynamic features which is
-- somewhat slower; and .. we need a chain of them
- function fonts.initializers.node.otf.analyze(tfm,value)
- local script, language = tfm.script, tfm.language
- local action = fonts.analyzers.initializers[script]
+ local type = type
+
+ local initializers, methods = fonts.analyzers.initializers, fonts.analyzers.methods
+
+ function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
+ if attr and attr > 0 then
+ script, language = a_to_script[attr], a_to_language[attr]
+ else
+ script, language = tfmdata.script, tfmdata.language
+ end
+ local action = initializers[script]
if action then
if type(action) == "function" then
- return action(tfm,value)
- elseif action[language] then
- return action[language](tfm,value)
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
end
end
return nil
end
- function fonts.methods.node.otf.analyze(head,font)
+ function fonts.methods.node.otf.analyze(head,font,attr)
local tfmdata = fontdata[font]
- local script, language = fontdata[font].script, fontdata[font].language
- local action = fonts.analyzers.methods[script]
+ local script, language
+ if attr and attr > 0 then
+ script, language = a_to_script[attr], a_to_language[attr]
+ else
+ script, language = tfmdata.script, tfmdata.language
+ end
+ local action = methods[script]
if action then
if type(action) == "function" then
- return action(head,font)
- elseif action[language] then
- return action[language](head,font)
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
end
end
return head, false
@@ -3623,8 +3911,8 @@ do
local fcs = fonts.color.set
local fcr = fonts.color.reset
- function fonts.analyzers.methods.nocolor(head,font)
- for n in nodes.traverse(glyph) do
+ function fonts.analyzers.methods.nocolor(head,font,attr)
+ for n in node.traverse(head,glyph) do
if not font or n.font == font then
fcr(n)
end
@@ -3632,7 +3920,7 @@ do
return head, true
end
- function fonts.analyzers.methods.arab(head,font) -- maybe make a special version with no trace
+ function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special version with no trace
local characters = fontdata[font].characters
local first, last, current, done = nil, nil, head, false
local trace = fonts.color.trace
@@ -3675,10 +3963,13 @@ do
--~ laststate = 0
end
while current do
- if current.id == glyph and current.font == font then
+ if current.id == glyph and current.subtype<256 and current.font == font then
done = true
local char = current.char
- if characters[char].class == "mark" then -- marks are now in components
+ local chardata = characters[char]
+ if not chardata then
+ -- troubles
+ elseif chardata.class == "mark" then -- marks are now in components
set_attribute(current,state,5) -- mark
if trace then fcs(current,"font:mark") end
elseif isol[char] then
@@ -3810,8 +4101,8 @@ do
-- will move to node-ini :
- local allowbreak = node.new("penalty") allowbreak.penalty = -100
- local nobreak = node.new("penalty") nobreak.penalty = 10000
+ local allowbreak = nodes.penalty( -100) nodes.register(allowbreak)
+ local nobreak = nodes.penalty( 10000) nodes.register(nobreak)
fonts.analyzers.methods.stretch_hang = true
@@ -3819,7 +4110,26 @@ do
-- it wil become either a mkiv feature or an attribute, so this is
-- experimental
- function fonts.analyzers.methods.hang(head,font) -- maybe make a special version with no trace
+--~ local function nodes.replace(head,current,newnode)
+--~ local oldnode = current
+--~ newnode.prev, newnode.next = oldnode.prev, oldnode.next
+--~ if oldnode.prev then
+--~ old.prev.next = newnode
+--~ end
+--~ if oldnode.next then
+--~ old.next.prev = newnode
+--~ end
+--~ if head == current then
+--~ head = newnode
+--~ end
+--~ node.free(oldnode)
+--~ return head, newnode
+--~ end
+--~ if char == 0x3000 then
+--~ head, current = node.replace(head,current,nodes.glue(fontdata[font].parameter[6],0,0))
+--~ end
+
+ function fonts.analyzers.methods.hang(head,font,attr) -- maybe make a special version with no trace
local characters = fontdata[font].characters
local current, last, done, stretch, prevchinese = head, nil, false, 0, false
local trace = fonts.color.trace
@@ -3827,7 +4137,7 @@ do
stretch = fontdata[font].parameters[6]
end
while current do
- if current.id == glyph then
+ if current.id == glyph and current.subtype<256 then
if current.font == font then
if prevchinese then
local temp = current.prev
@@ -3868,7 +4178,7 @@ do
elseif hyphenation[char] then
set_attribute(current,state,3) -- xxxx
local prev, next = current.prev, current.next
- if next and next.id == glyph and hyphenation[next.char] then
+ if next and next.id == glyph and next.subtype<256 and hyphenation[next.char] then
if trace then fcs(current,"font:medi") fcs(next,"font:medi")end -- we need nice names
if prev then
if prevchinese then
@@ -3894,24 +4204,31 @@ do
return head, done
end
-
-
fonts.analyzers.methods.hani = fonts.analyzers.methods.hang
end
-- experimental and will probably change
-function fonts.install_feature(type,...)
- if fonts[type] and fonts[type].install_feature then
- fonts[type].install_feature(...)
+do
+ local process = fonts.otf.features.process.feature
+ local prepare = fonts.otf.features.prepare.feature
+ function fonts.install_feature(type,...)
+ if fonts[type] and fonts[type].install_feature then
+ fonts[type].install_feature(...)
+ end
+ end
+ function fonts.otf.install_feature(tag)
+ fonts.methods.node.otf [tag] = function(head,font,attr) return process(head,font,attr,tag) end
+ fonts.initializers.node.otf[tag] = function(tfm,value) return prepare(tfm,tag,value) end
end
-end
-function fonts.otf.install_feature(tag)
- fonts.methods.node.otf [tag] = function(head,font) return fonts.otf.features.process.feature(head,font,tag) end
- fonts.initializers.node.otf[tag] = function(tfm,value) return fonts.otf.features.prepare.feature(tfm,tag,value) end
end
+-- todo: always load texhistoric
+
+fonts.install_feature("otf","tlig")
+fonts.install_feature("otf","trep")
+
--~ exclam + quoteleft => exclamdown
--~ question + quoteleft => questiondown
@@ -3934,3 +4251,9 @@ end
--~ hyphen + hyphen => endash
--~ endash + hyphen => emdash
+-- this is a hack, currently featurefiles erase existing features
+
+fonts.initializers.node.otf.tlig = fonts.initializers.base.otf.tlig
+fonts.initializers.node.otf.trep = fonts.initializers.base.otf.trep
+fonts.methods.node.otf ['tlig'] = nil
+fonts.methods.node.otf ['trep'] = nil
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 16910a8fd..ddc2924fc 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -127,7 +127,7 @@ function fonts.names.identify()
end
end
local function identify(completename,name,suffix)
- if not done[name] then
+ if not done[name] and io.exists(completename) then
nofread = nofread + 1
logs.info("fontnames", "identifying " .. suffix .. " font " .. completename)
logs.push()
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index 67a7866c5..abe3eaf36 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -86,6 +86,7 @@ function fonts.tfm.read_from_tfm(specification)
tfmdata.fonts = vfdata.fonts
end
end
+--~ print(table.serialize(tfmdata))
end
fonts.tfm.enhance(tfmdata,specification)
end
@@ -152,67 +153,76 @@ end
-- if t.tounicode = 1 then also characters[n].tounicode = "string"
-function fonts.tfm.scale(tfmtable, scaledpoints)
+function fonts.tfm.do_scale(tfmtable, scaledpoints)
+ -- beware, the boundingbox is passed as reference so we may not overwrite it
+ -- in the process, numbers are of course copies
+ --
-- 65536 = 1pt
-- 1000 units per designsize (not always)
local scale, round = tex.scale, tex.round -- replaces math.floor(n*m+0.5)
- local delta
if scaledpoints < 0 then
scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
end
- delta = scaledpoints/(tfmtable.units or 1000) -- brr, some open type fonts have 2048
+ local delta = scaledpoints/(tfmtable.units or 1000) -- brr, some open type fonts have 2048
local t = { }
t.factor = delta
for k,v in pairs(tfmtable) do
- if type(v) == "table" then
- t[k] = { }
- else
- t[k] = v
- end
+ t[k] = (type(v) == "table" and { }) or v
end
local tc = t.characters
+ local trace = fonts.trace
for k,v in pairs(tfmtable.characters) do
+ local w, h, d = v.width, v.height, v.depth
local chr = {
unicode = v.unicode,
name = v.name,
index = v.index or k,
- width = scale(v.width , delta),
- height = scale(v.height, delta),
- depth = scale(v.depth , delta),
+ width = (w == 0 and 0) or scale(w, delta),
+ height = (h == 0 and 0) or scale(h, delta),
+ depth = (d == 0 and 0) or scale(d, delta),
class = v.class
}
-if fonts.trace then
- logs.report("define font", string.format("n=%s, u=%s, i=%s, n=%s c=%s",k,v.unicode,v.index,v.name or '-',v.class or '-'))
-end
- local b = v.boundingbox -- maybe faster to have llx etc not in table
- if b then
- chr.boundingbox = scale(v.boundingbox,delta)
+ if trace then
+ logs.report("define font", string.format("n=%s, u=%s, i=%s, n=%s c=%s",k,v.unicode,v.index,v.name or '-',v.class or '-'))
end
- if v.italic then
- chr.italic = scale(v.italic,delta)
+ local vb = v.boundingbox
+ if vb then
+ chr.boundingbox = scale(vb,delta)
end
- if v.kerns then
- chr.kerns = scale(v.kerns,delta)
+ local vi = v.italic
+ if vi then
+ chr.italic = scale(vi,delta)
end
- if v.ligatures then
- local tt = { }
- for kk,vv in pairs(v.ligatures) do
- tt[kk] = vv
+ local vk = v.kerns
+ if vk then
+ chr.kerns = scale(vk,delta)
+ end
+ local vl = v.ligatures
+ if vl then
+ if true then
+ chr.ligatures = v.ligatures -- shared
+ else
+ local tt = { }
+ for i,l in pairs(vl) do
+ tt[i] = l
+ end
+ chr.ligatures = tt
end
- chr.ligatures = tt
end
- if v.commands then
+ local vc = v.commands
+ if vc then
-- we assume non scaled commands here
- local vc, tt = v.commands, { }
+ local tt = { }
for i=1,#vc do
local ivc = vc[i]
local key = ivc[1]
- if key == "right" or key == "left" then
+ if key == "right" or key == "left" or key == "down" or key == "up" then
tt[#tt+1] = { key, scale(ivc[2],delta) }
else
tt[#tt+1] = ivc -- shared since in cache and untouched
end
end
+--~ print(table.serialize(vc),table.serialize(tt))
chr.commands = tt
end
tc[k] = chr
@@ -226,19 +236,29 @@ end
end
end
-- t.encodingbytes, t.filename, t.fullname, t.name: elsewhere
- t.size = scaledpoints
- t.italicangle = tfmtable.italicangle
- t.ascender = scale(tfmtable.ascender or 0,delta)
- t.descender = scale(tfmtable.descender or 0,delta)
- t.shared = tfmtable.shared or { }
- if t.unique then
- t.unique = table.fastcopy(tfmtable.unique)
- else
- t.unique = { }
- end
+ t.size = scaledpoints
if t.fonts then
t.fonts = table.fastcopy(t.fonts) -- maybe we virtualize more afterwards
end
+ return t, delta
+end
+
+--[[ldx--
+The reason why the scaler is split, is that for a while we experimented
+with a helper function. However, in practice the calls are too slow to
+make this profitable and the based variant was just faster. A days
+wasted day but an experience richer.
+--ldx]]--
+
+function fonts.tfm.scale(tfmtable, scaledpoints)
+ local scale = tex.scale
+ local t, factor = fonts.tfm.do_scale(tfmtable, scaledpoints)
+ t.factor = factor
+ t.ascender = scale(tfmtable.ascender or 0, factor)
+ t.descender = scale(tfmtable.descender or 0, factor)
+ t.shared = tfmtable.shared or { }
+ t.unique = table.fastcopy(tfmtable.unique or {})
+--~ print("scaling", t.name, t.factor) -- , fonts.tfm.hash_features(tfmtable.specification))
return t
end
@@ -372,7 +392,9 @@ function fonts.initializers.common.complement(tfmdata,value) -- todo: value = la
if value then
local chr, index, data, get_virtual_id = tfmdata.characters, nil, characters.data, fonts.tfm.get_virtual_id
local selection = fonts.initializers.complements.load("LATIN") -- will be value
- for _, k in ipairs(selection) do
+ -- for _, k in ipairs(selection) do
+ for i=1,#selection do
+ local k = selection[i]
if not chr[k] then
local dk = data[k]
local vs, name = dk.specials, dk.adobename
@@ -520,8 +542,8 @@ do
local glyph = node.id('glyph')
local fontdata = fonts.tfm.id
local set_attribute = node.set_attribute
- local unset_attribute = node.unset_attribute
- local has_attribute = node.has_attribute
+-- local unset_attribute = node.unset_attribute
+-- local has_attribute = node.has_attribute
local state = attributes.numbers['state'] or 100
@@ -533,14 +555,6 @@ do
function fonts.analyzers.aux.setstate(head,font)
local characters = fontdata[font].characters
local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- local function finish()
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
while current do
if current.id == glyph and current.font == font then
if characters[current.char].class == "mark" then
@@ -553,12 +567,21 @@ do
last, n = current, n+1
set_attribute(current,state,2) -- medi
end
- else
- finish()
+ else -- finish
+ if first and first == last then
+ set_attribute(last,state,4) -- isol
+ elseif last then
+ set_attribute(last,state,3) -- fina
+ end
+ first, last, n = nil, nil, 0
end
current = current.next
end
- finish()
+ if first and first == last then
+ set_attribute(last,state,4) -- isol
+ elseif last then
+ set_attribute(last,state,3) -- fina
+ end
return head, done
end
@@ -585,7 +608,7 @@ do
-- check if head
if last and not last.components then
last.components = current
- last.components.prev = nil
+ current.prev = nil -- last.components.prev = nil
done = true
n = 1
else
diff --git a/tex/context/base/font-vf.lua b/tex/context/base/font-vf.lua
index f0258e281..7070acca1 100644
--- a/tex/context/base/font-vf.lua
+++ b/tex/context/base/font-vf.lua
@@ -67,14 +67,37 @@ function fonts.vf.aux.combine.load(g,name)
return fonts.tfm.read_and_define(name or g.specification.name,g.specification.size)
end
+function fonts.vf.aux.combine.names(g,name,force)
+ local f, id = fonts.tfm.read_and_define(name,g.specification.size)
+ if f and id then
+ local fc, gc = f.characters, g.characters
+ g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
+ local hn = #g.fonts
+ for k, v in pairs(fc) do
+ if force or not gc[k] then
+ gc[k] = table.fastcopy(v)
+ gc[k].commands = { { 'slot', hn, k } }
+ end
+ end
+ if not g.parameters and #g.fonts > 0 then -- share this code !
+ g.parameters = table.fastcopy(f.parameters)
+ g.italicangle = f.italicangle
+ g.ascender = f.ascender
+ g.descender = f.descender
+ end
+ end
+end
+
fonts.vf.aux.combine.commands = {
- ["initialize"] = function(g,v) fonts.vf.aux.combine.assign(g, g.name) end,
- ["include-method"] = function(g,v) fonts.vf.aux.combine.process(g,fonts.vf.combinations[v[2]]) end, -- name
+ ["initialize"] = function(g,v) fonts.vf.aux.combine.assign (g,g.name) end,
+ ["include-method"] = function(g,v) fonts.vf.aux.combine.process (g,fonts.vf.combinations[v[2]]) end, -- name
["copy-parameters"] = function(g,v) fonts.vf.aux.combine.parameters(g,v[2]) end, -- name
- ["copy-range"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
- ["copy-char"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
- ["fallback-range"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
- ["fallback-char"] = function(g,v) fonts.vf.aux.combine.assign(g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy-range"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
+ ["copy-char"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
+ ["fallback-range"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
+ ["fallback-char"] = function(g,v) fonts.vf.aux.combine.assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy_names"] = function(g,v) fonts.vf.aux.combine.names (g,v[2],true) end,
+ ["fallback_names"] = function(g,v) fonts.vf.aux.combine.names (g,v[2],false) end,
}
function fonts.vf.combine(specification,tag)
@@ -120,6 +143,15 @@ fonts.define.methods.install(
--~ }
--~ )
+--~ fonts.define.methods.install(
+--~ "lmsymbol10", {
+--~ { "fallback_names", "lmsy10.afm" } ,
+--~ { "fallback_names", "msam10.afm" } ,
+--~ { "fallback_names", "msbm10.afm" }
+--~ }
+--~ )
+--~ \font\TestFont=dummy@lmsymbol10 at 24pt
+
-- docu case
--~ fonts.define.methods.install(
diff --git a/tex/context/base/l-aux.lua b/tex/context/base/l-aux.lua
index f5aa7e67e..2c98a2f35 100644
--- a/tex/context/base/l-aux.lua
+++ b/tex/context/base/l-aux.lua
@@ -14,19 +14,19 @@ do
hash[key] = value
end
- local space = lpeg.S(' ')^0
- local equal = lpeg.S("=")^1
- local comma = lpeg.S(",")^0
- local nonspace = lpeg.P(1-lpeg.S(' '))^1
- local nonequal = lpeg.P(1-lpeg.S('='))^1
- local noncomma = lpeg.P(1-lpeg.S(','))^1
- local nonbrace = lpeg.P(1-lpeg.S('{}'))^1
- local nested = lpeg.S('{') * lpeg.C(nonbrace^1) * lpeg.S('}')
+ local space = lpeg.P(' ')
+ local equal = lpeg.P("=")
+ local comma = lpeg.P(",")
+ local lbrace = lpeg.P("{")
+ local rbrace = lpeg.P("}")
+ local nobrace = 1 - (lbrace+rbrace)
+ local nested = lpeg.P{ lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
- local key = lpeg.C(nonequal)
- local value = nested + lpeg.C(noncomma)
+ local key = lpeg.C((1-equal)^1)
+ local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
+ local pattern = ((space^0 * key * equal * value * comma^0) / set)^1
- local pattern = ((space * key * equal * value * comma) / set)^1
+ -- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
function aux.settings_to_hash(str)
hash = { }
@@ -34,7 +34,11 @@ do
return hash
end
- local pattern = lpeg.Ct((space * value * comma)^1)
+ local seperator = comma * space^0
+ local value = lbrace * lpeg.C(nobrace^0) * rbrace + lpeg.C((1-seperator)^0)
+ local pattern = lpeg.Ct(value*(seperator*value)^0)
+
+ -- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
function aux.settings_to_array(str)
return lpeg.match(pattern,str)
@@ -42,30 +46,27 @@ do
end
---~ do
---~ str = "a=1, b=2, c=3, d={abc}"
-
---~ for k,v in pairs(aux.settings_to_hash (str)) do print(k,v) end
---~ for k,v in pairs(aux.settings_to_array(str)) do print(k,v) end
---~ end
-
-function aux.hash_to_string(h,separator,yes,no,strict)
+function aux.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t = { }
- for _,k in ipairs(table.sortedkeys(h)) do
- local v = h[k]
- if type(v) == "boolean" then
- if yes and no then
- if v then
- t[#t+1] = k .. '=' .. yes
- elseif not strict then
- t[#t+1] = k .. '=' .. no
+ local t, s = { }, table.sortedkeys(h)
+ omit = omit and table.tohash(omit)
+ for i=1,#s do
+ local key = s[i]
+ if not omit or not omit[key] then
+ local value = h[key]
+ if type(value) == "boolean" then
+ if yes and no then
+ if value then
+ t[#t+1] = key .. '=' .. yes
+ elseif not strict then
+ t[#t+1] = key .. '=' .. no
+ end
+ elseif value or not strict then
+ t[#t+1] = key .. '=' .. tostring(value)
end
- elseif v or not strict then
- t[#t+1] = k .. '=' .. tostring(v)
+ else
+ t[#t+1] = key .. '=' .. value
end
- else
- t[#t+1] = k .. '=' .. v
end
end
return table.concat(t,separator or ",")
diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua
index 098f0b3a1..66c608cee 100644
--- a/tex/context/base/l-boolean.lua
+++ b/tex/context/base/l-boolean.lua
@@ -13,11 +13,12 @@ end
function toboolean(str,tolerant)
if tolerant then
- if type(str) == "string" then
+ local tstr = type(str)
+ if tstr == "string" then
return str == "true" or str == "yes" or str == "on" or str == "1"
- elseif type(str) == "number" then
+ elseif tstr == "number" then
return tonumber(str) ~= 0
- elseif type(str) == "nil" then
+ elseif tstr == "nil" then
return false
else
return str
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index df241d221..dfacfb291 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -13,15 +13,18 @@ dir = { }
if lfs then
function dir.glob_pattern(path,patt,recurse,action)
- for name in lfs.dir(path) do
- local full = path .. '/' .. name
- local mode = lfs.attributes(full,'mode')
- if mode == 'file' then
- if name:find(patt) then
- action(full)
+ local ok, scanner = xpcall(function() return lfs.dir(path) end, function() end) -- kepler safe
+ if ok and type(scanner) == "function" then
+ for name in scanner do
+ local full = path .. '/' .. name
+ local mode = lfs.attributes(full,'mode')
+ if mode == 'file' then
+ if name:find(patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ dir.glob_pattern(full,patt,recurse,action)
end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- dir.glob_pattern(full,patt,recurse,action)
end
end
end
@@ -46,6 +49,30 @@ if lfs then
return t
end
+ function dir.globfiles(path,recurse,func,files)
+ if type(func) == "string" then
+ local s = func -- alas, we need this indirect way
+ func = function(name) return name:find(s) end
+ end
+ files = files or { }
+ for name in lfs.dir(path) do
+ if name:find("^%.") then
+ --- skip
+ elseif lfs.attributes(name,'mode') == "directory" then
+ if recurse then
+ dir.globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif func then
+ if func(name) then
+ files[#files+1] = path .. "/" .. name
+ end
+ else
+ files[#files+1] = path .. "/" .. name
+ end
+ end
+ return files
+ end
+
-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
@@ -62,33 +89,23 @@ if lfs then
--~ mkdirs(".","/a/b/c")
--~ mkdirs("a","b","c")
- function dir.mkdirs(...) -- root,... or ... ; root is not split
- local pth, err = "", false
- for k,v in pairs({...}) do
- if k == 1 then
- if not lfs.isdir(v) then
- -- print("no root path " .. v)
- err = true
- else
- pth = v
- end
- elseif lfs.isdir(pth .. "/" .. v) then
- pth = pth .. "/" .. v
+ function dir.mkdirs(...)
+ local pth, err, lst = "", false, table.concat({...},"/")
+ for _, s in ipairs(lst:split("/")) do
+ if pth == "" then
+ pth = (s == "" and "/") or s
else
- for _,s in pairs(v:split("/")) do
- pth = pth .. "/" .. s
- if not lfs.isdir(pth) then
- ok = lfs.mkdir(pth)
- if not lfs.isdir(pth) then
- err = true
- end
- end
- if err then break end
- end
+ pth = pth .. "/" .. s
+ end
+ if s == "" then
+ -- can be network path
+ elseif not lfs.isdir(pth) then
+ lfs.mkdir(pth)
end
- if err then break end
end
return pth, not err
end
+ dir.makedirs = dir.mkdirs
+
end
diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua
index e53b8fb6a..b7783f892 100644
--- a/tex/context/base/l-io.lua
+++ b/tex/context/base/l-io.lua
@@ -73,32 +73,53 @@ end
do
+ local sb = string.byte
+
+--~ local nextchar = {
+--~ [ 4] = function(f)
+--~ return f:read(1), f:read(1), f:read(1), f:read(1)
+--~ end,
+--~ [ 2] = function(f)
+--~ return f:read(1), f:read(1)
+--~ end,
+--~ [ 1] = function(f)
+--~ return f:read(1)
+--~ end,
+--~ [-2] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ return b, a
+--~ end,
+--~ [-4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ return d, c, b, a
+--~ end
+--~ }
+
local nextchar = {
[ 4] = function(f)
- return f:read(1), f:read(1), f:read(1), f:read(1)
+ return f:read(1,1,1,1)
end,
[ 2] = function(f)
- return f:read(1), f:read(1)
+ return f:read(1,1)
end,
[ 1] = function(f)
return f:read(1)
end,
[-2] = function(f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
return b, a
end,
[-4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local c = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
return d, c, b, a
end
}
function io.characters(f,n)
- local sb = string.byte
if f then
return nextchar[n or 1], f
else
@@ -110,12 +131,62 @@ end
do
+ local sb = string.byte
+
+--~ local nextbyte = {
+--~ [4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ if d then
+--~ return sb(a), sb(b), sb(c), sb(d)
+--~ else
+--~ return nil, nil, nil, nil
+--~ end
+--~ end,
+--~ [2] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ if b then
+--~ return sb(a), sb(b)
+--~ else
+--~ return nil, nil
+--~ end
+--~ end,
+--~ [1] = function (f)
+--~ local a = f:read(1)
+--~ if a then
+--~ return sb(a)
+--~ else
+--~ return nil
+--~ end
+--~ end,
+--~ [-2] = function (f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ if b then
+--~ return sb(b), sb(a)
+--~ else
+--~ return nil, nil
+--~ end
+--~ end,
+--~ [-4] = function(f)
+--~ local a = f:read(1)
+--~ local b = f:read(1)
+--~ local c = f:read(1)
+--~ local d = f:read(1)
+--~ if d then
+--~ return sb(d), sb(c), sb(b), sb(a)
+--~ else
+--~ return nil, nil, nil, nil
+--~ end
+--~ end
+--~ }
+
local nextbyte = {
[4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local d = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
if d then
return sb(a), sb(b), sb(c), sb(d)
else
@@ -123,8 +194,7 @@ do
end
end,
[2] = function(f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
if b then
return sb(a), sb(b)
else
@@ -140,8 +210,7 @@ do
end
end,
[-2] = function (f)
- local a = f:read(1)
- local b = f:read(1)
+ local a, b = f:read(1,1)
if b then
return sb(b), sb(a)
else
@@ -149,10 +218,7 @@ do
end
end,
[-4] = function(f)
- local a = f:read(1)
- local b = f:read(1)
- local c = f:read(1)
- local d = f:read(1)
+ local a, b, c, d = f:read(1,1,1,1)
if d then
return sb(d), sb(c), sb(b), sb(a)
else
@@ -162,7 +228,6 @@ do
}
function io.bytes(f,n)
- local sb = string.byte
if f then
return nextbyte[n or 1], f
else
@@ -171,3 +236,35 @@ do
end
end
+
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(string.format(" [%s]",table.concat(options,"|")))
+ end
+ if default then
+ io.write(string.format(" [%s]",default))
+ end
+ io.write(string.format(" "))
+ local answer = io.read()
+ answer = answer:gsub("^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for _,v in pairs(options) do
+ if v == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for _,v in pairs(options) do
+ if v:find(pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
new file mode 100644
index 000000000..9e589621b
--- /dev/null
+++ b/tex/context/base/l-lpeg.lua
@@ -0,0 +1,41 @@
+-- filename : l-lpeg.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-lpeg'] = 1.001
+
+--~ l-lpeg.lua :
+
+--~ lpeg.digit = lpeg.R('09')^1
+--~ lpeg.sign = lpeg.S('+-')^1
+--~ lpeg.cardinal = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.integer = lpeg.P(lpeg.sign^0 * lpeg.digit^1)
+--~ lpeg.float = lpeg.P(lpeg.sign^0 * lpeg.digit^0 * lpeg.P('.') * lpeg.digit^1)
+--~ lpeg.number = lpeg.float + lpeg.integer
+--~ lpeg.oct = lpeg.P("0") * lpeg.R('07')^1
+--~ lpeg.hex = lpeg.P("0x") * (lpeg.R('09') + lpeg.R('AF'))^1
+--~ lpeg.uppercase = lpeg.P("AZ")
+--~ lpeg.lowercase = lpeg.P("az")
+
+--~ lpeg.eol = lpeg.S('\r\n\f')^1 -- includes formfeed
+--~ lpeg.space = lpeg.S(' ')^1
+--~ lpeg.nonspace = lpeg.P(1-lpeg.space)^1
+--~ lpeg.whitespace = lpeg.S(' \r\n\f\t')^1
+--~ lpeg.nonwhitespace = lpeg.P(1-lpeg.whitespace)^1
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return lpeg.P { lpeg.P(pattern) + 1 * lpeg.V(1) }
+end
+
+function lpeg.startswith(pattern) --slightly adapted
+ return lpeg.P(pattern)
+end
+
+--~ g = lpeg.splitter(" ",function(s) ... end) -- gmatch:lpeg = 3:2
+
+function lpeg.splitter(pattern, action)
+ return (((1-lpeg.P(pattern))^1)/action+1)^0
+end
+
+
diff --git a/tex/context/base/l-md5.lua b/tex/context/base/l-md5.lua
index 2a24f4169..4deb9bd74 100644
--- a/tex/context/base/l-md5.lua
+++ b/tex/context/base/l-md5.lua
@@ -13,6 +13,6 @@ if md5 then do
if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
- if not md5.dec then function md5.dec(str) return convert(stt,"%03i") end end
+ if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
end end
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index 0c1d92911..1173a928e 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -25,3 +25,35 @@ end
if not os.setenv then
function os.setenv() return false end
end
+
+if not os.times then
+ -- utime = user time
+ -- stime = system time
+ -- cutime = children user time
+ -- cstime = children system time
+ function os.times()
+ return {
+ utime = os.clock(), -- user
+ stime = 0, -- system
+ cutime = 0, -- children user
+ cstime = 0, -- children system
+ }
+ end
+end
+
+if os.gettimeofday then
+ os.clock = os.gettimeofday
+end
+
+do
+ local startuptime = os.gettimeofday()
+ function os.runtime()
+ return os.gettimeofday() - startuptime
+ end
+end
+
+--~ print(os.gettimeofday()-os.time())
+--~ os.sleep(1.234)
+--~ print (">>",os.runtime())
+--~ print(os.date("%H:%M:%S",os.gettimeofday()))
+--~ print(os.date("%H:%M:%S",os.time()))
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index 14fad31d7..c39a72ec2 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -37,12 +37,17 @@ function table.sortedkeys(tab)
srt[#srt+1] = key
if kind == 3 then
-- no further check
- elseif type(key) == "string" then
- if kind == 2 then kind = 3 else kind = 1 end
- elseif type(key) == "number" then
- if kind == 1 then kind = 3 else kind = 2 end
else
- kind = 3
+ local tkey = type(key)
+ if tkey == "string" then
+ -- if kind == 2 then kind = 3 else kind = 1 end
+ kind = (kind == 2 and 3) or 1
+ elseif tkey == "number" then
+ -- if kind == 1 then kind = 3 else kind = 2 end
+ kind = (kind == 1 and 3) or 2
+ else
+ kind = 3
+ end
end
end
if kind == 0 or kind == 3 then
@@ -65,32 +70,96 @@ function table.prepend(t, list)
end
end
+--~ function table.merge(t, ...)
+--~ for _, list in ipairs({...}) do
+--~ for k,v in pairs(list) do
+--~ t[k] = v
+--~ end
+--~ end
+--~ return t
+--~ end
+
function table.merge(t, ...)
- for _, list in ipairs({...}) do
- for k,v in pairs(list) do
+ local lst = {...}
+ for i=1,#lst do
+ for k, v in pairs(lst[i]) do
t[k] = v
end
end
+ return t
end
+--~ function table.merged(...)
+--~ local tmp = { }
+--~ for _, list in ipairs({...}) do
+--~ for k,v in pairs(list) do
+--~ tmp[k] = v
+--~ end
+--~ end
+--~ return tmp
+--~ end
+
function table.merged(...)
- local tmp = { }
- for _, list in ipairs({...}) do
- for k,v in pairs(list) do
+ local tmp, lst = { }, {...}
+ for i=1,#lst do
+ for k, v in pairs(lst[i]) do
tmp[k] = v
end
end
return tmp
end
-if not table.fastcopy then
+--~ function table.imerge(t, ...)
+--~ for _, list in ipairs({...}) do
+--~ for _, v in ipairs(list) do
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ return t
+--~ end
- function table.fastcopy(old) -- fast one
+function table.imerge(t, ...)
+ local lst = {...}
+ for i=1,#lst do
+ local nst = lst[i]
+ for j=1,#nst do
+ t[#t+1] = nst[j]
+ end
+ end
+ return t
+end
+
+--~ function table.imerged(...)
+--~ local tmp = { }
+--~ for _, list in ipairs({...}) do
+--~ for _,v in pairs(list) do
+--~ tmp[#tmp+1] = v
+--~ end
+--~ end
+--~ return tmp
+--~ end
+
+function table.imerged(...)
+ local tmp, lst = { }, {...}
+ for i=1,#lst do
+ local nst = lst[i]
+ for j=1,#nst do
+ tmp[#tmp+1] = nst[j]
+ end
+ end
+ return tmp
+end
+
+if not table.fastcopy then do
+
+ local type, pairs, getmetatable, setmetatable = type, pairs, getmetatable, setmetatable
+
+ local function fastcopy(old) -- fast one
if old then
local new = { }
for k,v in pairs(old) do
if type(v) == "table" then
- new[k] = table.fastcopy(v) -- was just table.copy
+ new[k] = fastcopy(v) -- was just table.copy
else
new[k] = v
end
@@ -105,11 +174,15 @@ if not table.fastcopy then
end
end
-end
+ table.fastcopy = fastcopy
+
+end end
-if not table.copy then
+if not table.copy then do
- function table.copy(t, tables) -- taken from lua wiki, slightly adapted
+ local type, pairs, getmetatable, setmetatable = type, pairs, getmetatable, setmetatable
+
+ local function copy(t, tables) -- taken from lua wiki, slightly adapted
tables = tables or { }
local tcopy = {}
if not tables[t] then
@@ -120,7 +193,7 @@ if not table.copy then
if tables[i] then
i = tables[i]
else
- i = table.copy(i, tables)
+ i = copy(i, tables)
end
end
if type(v) ~= "table" then
@@ -128,7 +201,7 @@ if not table.copy then
elseif tables[v] then
tcopy[i] = tables[v]
else
- tcopy[i] = table.copy(v, tables)
+ tcopy[i] = copy(v, tables)
end
end
local mt = getmetatable(t)
@@ -138,7 +211,9 @@ if not table.copy then
return tcopy
end
-end
+ table.copy = copy
+
+end end
-- rougly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
@@ -211,7 +286,9 @@ do
end
if n == #t then
local tt = { }
- for _,v in ipairs(t) do
+ -- for _,v in ipairs(t) do
+ for i=1,#t do
+ local v = t[i]
local tv = type(v)
if tv == "number" or tv == "boolean" then
tt[#tt+1] = tostring(v)
@@ -240,15 +317,16 @@ do
end
else
depth = ""
- if type(name) == "string" then
+ local tname = type(name)
+ if tname == "string" then
if name == "return" then
handle("return {")
else
handle(name .. "={")
end
- elseif type(name) == "number" then
+ elseif tname == "number" then
handle("[" .. name .. "]={")
- elseif type(name) == "boolean" then
+ elseif tname == "boolean" then
if name then
handle("return {")
else
@@ -263,7 +341,7 @@ do
local inline = compact and table.serialize_inline
local first, last = nil, 0 -- #root cannot be trusted here
if compact then
- for k,v in ipairs(root) do
+ for k,v in ipairs(root) do -- NOT: for k=1,#root do
if not first then first = k end
last = last + 1
end
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
new file mode 100644
index 000000000..2e0907eb7
--- /dev/null
+++ b/tex/context/base/l-url.lua
@@ -0,0 +1,77 @@
+-- filename : l-url.lua
+-- author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+-- copyright: PRAGMA ADE / ConTeXt Development Team
+-- license : see context related readme files
+
+if not versions then versions = { } end versions['l-url'] = 1.001
+if not url then url = { } end
+
+-- from the spec (on the web):
+--
+-- foo://example.com:8042/over/there?name=ferret#nose
+-- \_/ \______________/\_________/ \_________/ \__/
+-- | | | | |
+-- scheme authority path query fragment
+-- | _____________________|__
+-- / \ / \
+-- urn:example:animal:ferret:nose
+
+do
+
+ local function tochar(s)
+ return string.char(tonumber(s,16))
+ end
+
+ local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+
+ local hexdigit = lpeg.R("09","AF","af")
+ local escaped = percent * lpeg.C(hexdigit * hexdigit) / tochar
+
+ local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^0) * colon + lpeg.Cc("")
+ local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
+ local path = lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
+ local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
+ local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+
+ local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+
+ function url.split(str)
+ return (type(str) == "string" and parser:match(str)) or str
+ end
+
+end
+
+function url.hashed(str)
+ str = url.split(str)
+ return { scheme = str[1], authority = str[2], path = str[3], query = str[4], fragment = str[5] }
+end
+
+function url.filename(filename)
+ local t = url.hashed(filename)
+ return (t.scheme == "file" and t.path:gsub("^/([a-zA-Z]:/)","%1")) or filename
+end
+
+--~ print(url.filename("file:///c:/oeps.txt"))
+--~ print(url.filename("c:/oeps.txt"))
+--~ print(url.filename("file:///oeps.txt"))
+--~ print(url.filename("/oeps.txt"))
+
+-- from the spec on the web (sort of):
+--~
+--~ function test(str)
+--~ print(table.serialize(url.hashed(str)))
+--~ -- print(table.serialize(url.split(str)))
+--~ end
+---~
+--~ test("%56pass%20words")
+--~ test("file:///c:/oeps.txt")
+--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
+--~ test("http://www.ietf.org/rfc/rfc2396.txt")
+--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
+--~ test("mailto:John.Doe@example.com")
+--~ test("news:comp.infosystems.www.servers.unix")
+--~ test("tel:+1-816-555-1212")
+--~ test("telnet://192.0.2.16:80/")
+--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
+--~ test("/etc/passwords")
+--~ test("http://www.pragma-ade.com/spaced%20name")
diff --git a/tex/context/base/l-xml.lua b/tex/context/base/l-xml.lua
index 9236411f7..a15e3e81b 100644
--- a/tex/context/base/l-xml.lua
+++ b/tex/context/base/l-xml.lua
@@ -51,7 +51,8 @@ xml.xmlns = { }
do
- local parser = lpeg.P(false) -- printing shows that this has no side effects
+ local check = lpeg.P(false)
+ local parse = check
--[[ldx--
The next function associates a namespace prefix with an . This
@@ -63,7 +64,8 @@ do
--ldx]]--
function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- parser = parser + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ check = check + lpeg.C(lpeg.P(pattern:lower())) / namespace
+ parse = lpeg.P { lpeg.P(check) + 1 * lpeg.V(1) }
end
--[[ldx--
@@ -77,7 +79,7 @@ do
--ldx]]--
function xml.checkns(namespace,url)
- local ns = parser:match(url:lower())
+ local ns = parse:match(url:lower())
if ns and namespace ~= ns then
xml.xmlns[namespace] = ns
end
@@ -95,7 +97,7 @@ do
--ldx]]--
function xml.resolvens(url)
- return parser:match(url:lower()) or ""
+ return parse:match(url:lower()) or ""
end
--[[ldx--
@@ -146,11 +148,15 @@ do
local mt = { __tostring = xml.text }
+ function xml.check_error(top,toclose)
+ return ""
+ end
+
local function add_attribute(namespace,tag,value)
if tag == "xmlns" then
xmlns[#xmlns+1] = xml.resolvens(value)
at[tag] = value
- elseif ns == "xmlns" then
+ elseif namespace == "xmlns" then
xml.checkns(tag,value)
at["xmlns:" .. tag] = value
else
@@ -162,7 +168,7 @@ do
dt[#dt+1] = spacing
end
local resolved = (namespace == "" and xmlns[#xmlns]) or nsremap[namespace] or namespace
- top = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
+ top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
setmetatable(top, mt)
dt = top.dt
stack[#stack+1] = top
@@ -175,9 +181,9 @@ do
local toclose = remove(stack)
top = stack[#stack]
if #stack < 1 then
- errorstr = string.format("nothing to close with %s", tag)
+ errorstr = string.format("nothing to close with %s %s", tag, xml.check_error(top,toclose) or "")
elseif toclose.tg ~= tag then -- no namespace check
- errorstr = string.format("unable to close %s with %s", toclose.tg, tag)
+ errorstr = string.format("unable to close %s with %s %s", toclose.tg, tag, xml.check_error(top,toclose) or "")
end
dt = top.dt
dt[#dt+1] = toclose
@@ -193,7 +199,7 @@ do
top = stack[#stack]
setmetatable(top, mt)
dt = top.dt
- dt[#dt+1] = { ns=namespace or "", nr=resolved, tg=tag, at=at, dt={}, __p__ = top }
+ dt[#dt+1] = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
at = { }
if at.xmlns then
remove(xmlns)
@@ -282,14 +288,13 @@ do
-- text + comment + emptyelement + cdata + instruction + lpeg.V("parent"), -- 5.8
-- text + lpeg.V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
-
local grammar = lpeg.P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * lpeg.V("parent") * trailer,
parent = beginelement * lpeg.V("children")^0 * endelement,
children = text + lpeg.V("parent") + emptyelement + comment + cdata + instruction,
}
- function xml.convert(data, no_root) -- no collapse any more
+ function xml.convert(data, no_root)
stack, top, at, xmlns, errorstr, result = {}, {}, {}, {}, nil, nil
stack[#stack+1] = top
top.dt = { }
@@ -300,7 +305,7 @@ do
errorstr = "invalid xml file"
end
if errorstr then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={} } } }
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={}, er = true } }, error = true }
setmetatable(stack, mt)
if xml.error_handler then xml.error_handler("load",errorstr) end
else
@@ -324,6 +329,10 @@ do
function. Maybe it will go away (when not used).
--ldx]]--
+ function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+ end
+
function xml.package(tag,attributes,data)
local ns, tg = tag:match("^(.-):?([^:]+)$")
local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
@@ -331,6 +340,10 @@ do
return t
end
+ function xml.is_valid(root)
+ return root and not root.error
+ end
+
xml.error_handler = (logs and logs.report) or print
end
@@ -343,16 +356,18 @@ a filename or a file handle.
function xml.load(filename)
if type(filename) == "string" then
- local root, f = { }, io.open(filename,'r')
+ local f = io.open(filename,'r')
if f then
- root = xml.convert(f:read("*all"))
+ local root = xml.convert(f:read("*all"))
f:close()
+ return root
else
- -- if we want an error: root = xml.convert("")
+ return xml.convert("")
end
- return root -- no nil but an empty table if it fails
- else
+ elseif filename then -- filehandle
return xml.convert(filename:read("*all"))
+ else
+ return xml.convert("")
end
end
@@ -494,10 +509,10 @@ do
else
if ats then
-- handle(format("<%s:%s %s/>",ens,etg,table.concat(ats," ")))
- handle("<%" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
+ handle("<" .. ens .. ":" .. etg .. table.concat(ats," ") .. "/>")
else
-- handle(format("<%s:%s/>",ens,etg))
- handle("<%" .. ens .. ":" .. "/>")
+ handle("<" .. ens .. ":" .. "/>")
end
end
else
@@ -706,6 +721,8 @@ do
str = str:gsub("@([a-zA-Z%-_]+)", "(a['%1'] or '')")
str = str:gsub("position%(%)", "i")
str = str:gsub("text%(%)", "t")
+ str = str:gsub("!=", "~=")
+ str = str:gsub("([^=!~<>])=([^=!~<>])", "%1==%2")
str = str:gsub("([a-zA-Z%-_]+)%(", "functions.%1(")
return str, loadstring(string.format("return function(functions,i,a,t) return %s end", str))()
end
@@ -730,7 +747,7 @@ do
local bar = lpeg.P('|')
local hat = lpeg.P('^')
local valid = lpeg.R('az', 'AZ', '09') + lpeg.S('_-')
- local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1)
+ local name_yes = lpeg.C(valid^1) * colon * lpeg.C(valid^1 + star) -- permits ns:*
local name_nop = lpeg.C(lpeg.P(true)) * lpeg.C(valid^1)
local name = name_yes + name_nop
local number = lpeg.C((lpeg.S('+-')^0 * lpeg.R('09')^1)) / tonumber
@@ -851,8 +868,10 @@ do
-- root
return false
end
- elseif #map == 2 and m == 12 and map[2][1] == 20 then
- return { { 29, map[2][2], map[2][3] } }
+ elseif #map == 2 and m == 12 and map[2][1] == 20 then
+ -- return { { 29, map[2][2], map[2][3], map[2][4], map[2][5] } }
+ map[2][1] = 29
+ return { map[2] }
end
if m ~= 11 and m ~= 12 and m ~= 13 and m ~= 14 and m ~= 15 and m ~= 16 then
table.insert(map, 1, { 16 })
@@ -987,8 +1006,10 @@ do
local rootdt = root.dt
for k=1,#rootdt do
local e = rootdt[k]
- local ns, tg = e.rn or e.ns, e.tg
- if ns == action[2] and tg == action[3] then
+ local ns, tg = (e.rn or e.ns), e.tg
+ local matched = ns == action[3] and tg == action[4]
+ if not action[2] then matched = not matched end
+ if matched then
if handle(root,rootdt,k) then return false end
end
end
@@ -1001,7 +1022,8 @@ do
end
else
if (command == 16 or command == 12) and index == 1 then -- initial
- wildcard = true
+--~ wildcard = true
+ wildcard = command == 16 -- ok?
index = index + 1
action = pattern[index]
command = action and action[1] or 0 -- something is wrong
@@ -1032,7 +1054,8 @@ do
if tg then
idx = idx + 1
if command == 30 then
- local matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
if matched then
n = n + dn
@@ -1050,20 +1073,23 @@ do
else
local matched, multiple = false, false
if command == 20 then -- match
- matched = ns == action[2] and tg == action[3]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
elseif command == 21 then -- match one of
multiple = true
- for i=2,#action,2 do
+ for i=3,#action,2 do
if ns == action[i] and tg == action[i+1] then matched = true break end
end
if not action[2] then matched = not matched end
elseif command == 22 then -- eq
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and e.at[action[6]] == action[7]
elseif command == 23 then -- ne
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = mached and e.at[action[6]] ~= action[7]
elseif command == 24 then -- one of eq
@@ -1081,18 +1107,20 @@ do
if not action[2] then matched = not matched end
matched = matched and e.at[action[#action-1]] ~= action[#action]
elseif command == 27 then -- has attribute
- local ans = action[3]
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and e.at[action[5]]
elseif command == 28 then -- has value
local edt = e.dt
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
matched = matched and edt and edt[1] == action[5]
elseif command == 31 then
local edt = e.dt
- matched = ns == action[3] and tg == action[4]
+ local tg_a = action[4]
+ if tg == tg_a then matched = ns == action[3] elseif tg_a == '*' then matched, multiple = ns == action[3], true else matched = false end
if not action[2] then matched = not matched end
if matched then
matched = action[6](functions,idx,e.at,edt[1])
@@ -1537,28 +1565,33 @@ do
end
end
- function xml.include(xmldata,element,attribute,pathlist,collapse)
- element = element or 'ctx:include'
- attribute = attribute or 'name'
- pathlist = pathlist or { '.' }
- -- todo, check op ri
+ function xml.include(xmldata,pattern,attribute,recursive,findfile)
+ -- parse="text" (default: xml), encoding="" (todo)
+ pattern = pattern or 'include'
+ attribute = attribute or 'href'
local function include(r,d,k)
- local ek = d[k]
- local name = (ek.at and ek.at[attribute]) or ""
- if name ~= "" then
- -- maybe file lookup in tree
- local fullname
- for _, path in ipairs(pathlist) do
- if path == '.' then
- fullname = name
- else
- fullname = file.join(path,name)
- end
- local f = io.open(fullname)
+ local ek, name = d[k], nil
+ if ek.at then
+ for a in attribute:gmatch("([^|]+)") do
+ name = ek.at[a]
+ if name then break end
+ end
+ end
+ if name then
+ name = (findfile and findfile(name)) or name
+ if name ~= "" then
+ local f = io.open(name)
if f then
- xml.assign(d,k,xml.load(f,collapse))
+ if ek.at["parse"] == "text" then -- for the moment hard coded
+ d[k] = xml.escaped(f:read("*all"))
+ else
+ local xi = xml.load(f)
+ if recursive then
+ xml.include(xi,pattern,attribute,recursive,findfile)
+ end
+ xml.assign(d,k,xi)
+ end
f:close()
- break
else
xml.empty(d,k)
end
@@ -1567,7 +1600,7 @@ do
xml.empty(d,k)
end
end
- while xml.each_element(xmldata, element, include) do end
+ xml.each_element(xmldata, pattern, include)
end
function xml.strip_whitespace(root, pattern)
@@ -1635,6 +1668,20 @@ do
end)
end
+ function xml.filters.found(root,pattern,check_content)
+ local found = false
+ traverse(root, lpath(pattern), function(r,d,k)
+ if check_content then
+ local dk = d and d[k]
+ found = dk and dk.dt and next(dk.dt) and true
+ else
+ found = true
+ end
+ return true
+ end)
+ return found
+ end
+
end
--[[ldx--
@@ -1648,6 +1695,7 @@ xml.index = xml.filters.index
xml.position = xml.filters.index
xml.first = xml.filters.first
xml.last = xml.filters.last
+xml.found = xml.filters.found
xml.each = xml.each_element
xml.process = xml.process_element
@@ -1696,12 +1744,46 @@ function xml.serialize_path(root,lpath,handle)
xml.serialize(dk,handle)
end
-xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
-xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+--~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
+--~ xml.unescapes = { } for k,v in pairs(xml.escapes) do xml.unescapes[v] = k end
+
+--~ function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
+--~ function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
+--~ function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+
+do
+
+ -- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
+ --
+ -- 1021:0335:0287:0247
+
+ -- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
+ --
+ -- 1559:0257:0288:0190 (last one suggested by roberto)
+
+ -- escaped = lpeg.Cs((lpeg.S("<&>") / xml.escapes + 1)^0)
+ -- escaped = lpeg.Cs((lpeg.S("<")/"<" + lpeg.S(">")/">" + lpeg.S("&")/"&" + 1)^0)
+ local normal = (1 - lpeg.S("<&>"))^0
+ local special = lpeg.P("<")/"<" + lpeg.P(">")/">" + lpeg.P("&")/"&"
+ local escaped = lpeg.Cs(normal * (special * normal)^0)
+
+ -- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
+
+ -- unescaped = lpeg.Cs((lpeg.S("<")/"<" + lpeg.S(">")/">" + lpeg.S("&")/"&" + 1)^0)
+ -- unescaped = lpeg.Cs((((lpeg.P("&")/"") * (lpeg.P("lt")/"<" + lpeg.P("gt")/">" + lpeg.P("amp")/"&") * (lpeg.P(";")/"")) + 1)^0)
+ local normal = (1 - lpeg.S"&")^0
+ local special = lpeg.P("<")/"<" + lpeg.P(">")/">" + lpeg.P("&")/"&"
+ local unescaped = lpeg.Cs(normal * (special * normal)^0)
-function xml.escaped (str) return str:gsub("(.)" , xml.escapes ) end
-function xml.unescaped(str) return str:gsub("(&.-;)", xml.unescapes) end
-function xml.cleansed (str) return str:gsub("<.->" , '' ) end -- "%b<>"
+ -- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
+
+ local cleansed = lpeg.Cs(((lpeg.P("<") * (1-lpeg.P(">"))^0 * lpeg.P(">"))/"" + 1)^0)
+
+ function xml.escaped (str) return escaped :match(str) end
+ function xml.unescaped(str) return unescaped:match(str) end
+ function xml.cleansed (str) return cleansed :match(str) end
+
+end
function xml.join(t,separator,lastseparator)
if #t > 0 then
@@ -1806,3 +1888,10 @@ end end
--~ xml.xshow(xml.first(x,"b[@n=='03' or @n=='08']"))
--~ xml.xshow(xml.all (x,"b[number(@n)>2 and number(@n)<6]"))
--~ xml.xshow(xml.first(x,"b[find(text(),'ALSO')]"))
+
+--~ str = [[
+--~
+--~
+--~ my secret
+--~
+--~ ]]
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index fad3b22ec..f5091fbb2 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -1,3 +1,8 @@
+
+--~ lang:hyphenation(string)
+--~ string =lang:hyphenation()
+--~ lang:clear_hyphenation()
+
if not modules then modules = { } end modules ['lang-ini'] = {
version = 1.001,
comment = "companion to lang-ini.tex",
@@ -6,19 +11,46 @@ if not modules then modules = { } end modules ['lang-ini'] = {
license = "see context related readme files"
}
+if lang.use_new then lang.use_new(true) end
+
languages = languages or {}
languages.version = 1.009
-
-languages.hyphenation = languages.hyphenation or {}
-languages.hyphenation.data = languages.hyphenation.data or { }
+languages.hyphenation = languages.hyphenation or { }
+languages.hyphenation.data = languages.hyphenation.data or { }
do
-- we can consider hiding data (faster access too)
- local function filter(filename,what)
- local data = io.loaddata(input.find_file(texmf.instance,filename))
- local start, stop = data:find(string.format("\\%s%%s*(%%b{})",what or "patterns"))
- return (start and stop and data:sub(start+1,stop-1)) or ""
+ --~ local function filter(filename,what)
+ --~ local data = io.loaddata(input.find_file(texmf.instance,filename))
+ --~ local data = data:match(string.format("\\%s%%s*(%%b{})",what or "patterns"))
+ --~ return data:match("{%s*(.-)%s*}") or ""
+ --~ end
+
+ -- loading the 26 languages that we normally load in mkiv, the string based variant
+ -- takes .84 seconds (probably due to the sub's) while the lpeg variant takes .78
+ -- seconds
+
+ local leftbrace = lpeg.P("{")
+ local rightbrace = lpeg.P("}")
+ local spaces = lpeg.S(" \r\n\t\f")
+ local spacing = spaces^0
+ local validchar = 1-(spaces+rightbrace+leftbrace)
+ local validword = validchar^1
+ local content = spacing * leftbrace * spacing * lpeg.C((spacing * validword)^0) * spacing * rightbrace * lpeg.P(true)
+
+ local command = lpeg.P("\\patterns")
+ local parser = (1-command)^0 * command * content
+
+ local function filterpatterns(filename)
+ return parser:match(io.loaddata(input.find_file(texmf.instance,filename)) or "")
+ end
+
+ local command = lpeg.P("\\hyphenation")
+ local parser = (1-command)^0 * command * content
+
+ local function filterexceptions(filename)
+ return parser:match(io.loaddata(input.find_file(texmf.instance,filename)) or {})
end
local function record(tag)
@@ -32,40 +64,46 @@ do
languages.hyphenation.record = record
- function languages.hyphenation.number(tag)
+ function languages.hyphenation.define(tag)
local data = record(tag)
return data:id()
end
- function languages.hyphenation.load(tag, patterns, exceptions)
+ function languages.hyphenation.number(tag)
+ local d = languages.hyphenation.data[tag]
+ return (d and d:id()) or 0
+ end
+
+ function languages.hyphenation.load(tag, filename, filter, target)
input.starttiming(languages)
local data = record(tag)
- patterns = (patterns and input.find_file(texmf.instance,patterns )) or ""
- exceptions = (exceptions and input.find_file(texmf.instance,exceptions)) or ""
- if patterns ~= "" then
- data:patterns(filter(patterns,"patterns"))
- end
- if exceptions ~= "" then
- data:exceptions(string.split(filter(exceptions,"hyphenation"),"%s+"))
- -- local t = { }
- -- for s in string.gmatch(filter(exceptions,"hyphenation"), "(%S+)") do
- -- t[#t+1] = s
- -- end
- -- print(tag,#t)
- -- data:exceptions(t)
+ filename = (filename and filename ~= "" and input.find_file(texmf.instance,filename)) or ""
+ local ok = filename ~= ""
+ if ok then
+ lang[target](data,filterpatterns(filename))
+ else
+ lang[target](data,"")
end
languages.hyphenation.data[tag] = data
input.stoptiming(languages)
+ return ok
+ end
+
+ function languages.hyphenation.loadpatterns(tag, patterns)
+ return languages.hyphenation.load(tag, patterns, filterpatterns, "patterns")
+ end
+
+ function languages.hyphenation.loadexceptions(tag, exceptions)
+ return languages.hyphenation.load(tag, patterns, filterexceptions, "hyphenation")
end
function languages.hyphenation.exceptions(tag, ...)
local data = record(tag)
- data:exceptions(...)
+ data:hyphenation(...)
end
function languages.hyphenation.hyphenate(tag, str)
- local data = record(tag)
- return data:hyphenate(str)
+ return lang.hyphenate(record(tag), str)
end
function languages.hyphenation.lefthyphenmin(tag, value)
@@ -79,250 +117,231 @@ do
return data:righthyphenmin()
end
- function languages.n()
+ function languages.hyphenation.n()
return table.count(languages.hyphenation.data)
end
end
--- beware, the collowing code has to be adapted, and was used in
--- experiments with loading lists of words; if we keep supporting
--- this, i will add a namespace; this will happen when the hyphenation
--- code is in place
-
-languages.dictionary = languages.dictionary or {}
-languages.dictionary.data = languages.dictionary.data or { }
-languages.dictionary.template = "words-%s.txt"
-languages.dictionary.patterns = languages.dictionary.patterns or { }
-
--- maybe not in dictionary namespace
-
-languages.dictionary.current = nil
-languages.dictionary.number = nil
-languages.dictionary.attribute = nil
-
-function languages.dictionary.set(attribute,number,name)
- if not languages.dictionary.patterns[number] then
- input.start_timing(languages)
- local fullname = string.format(languages.dictionary.template,name)
- local foundname = input.find_file(texmf.instance,fullname,'other text file')
- if foundname and foundname ~= "" then
- -- texio.write_nl(string.format("loading patterns for language %s as %s from %s",name,number,foundname))
- languages.dictionary.patterns[number] = tex.load_dict(foundname) or { }
+do
+
+ -- we can speed this one up with locals if needed
+
+ local function tolang(what)
+ if type(what) == "number" then
+ return languages.hyphenation.data[languages.numbers[what]]
+ elseif type(what) == "string" then
+ return languages.hyphenation.data[what]
else
- languages.dictionary.patterns[number] = { }
+ return what
end
- input.stop_timing(languages)
end
- languages.dictionary.attribute = attribute
- languages.dictionary.number = number
- languages.dictionary.current = languages.dictionary.patterns[number]
+
+ function languages.prehyphenchar(what)
+ return lang.prehyphenchar(tolang(what))
+ end
+ function languages.posthyphenchar(what)
+ return lang.posthyphenchar(tolang(what))
+ end
+
+ languages.tolang = tolang
+
end
-function languages.dictionary.add(word,pattern)
- if languages.dictionary.current and word and pattern then
- languages.dictionary.current[word] = pattern
+languages.registered = languages.registered or { }
+languages.associated = languages.associated or { }
+languages.numbers = languages.numbers or { }
+
+input.storage.register(false,"languages/registered",languages.registered,"languages.registered")
+input.storage.register(false,"languages/associated",languages.associated,"languages.associated")
+
+function languages.register(tag,parent,patterns,exceptions)
+ parent = parent or tag
+ languages.registered[tag] = {
+ parent = parent,
+ patterns = patterns or string.format("lang-%s.pat",parent),
+ exceptions = exceptions or string.format("lang-%s.hyp",parent),
+ loaded = false,
+ number = 0,
+ }
+end
+
+function languages.associate(tag,script,language)
+ languages.associated[tag] = { script, language }
+end
+
+function languages.association(tag)
+ if type(tag) == "number" then
+ tag = languages.numbers[tag]
+ end
+ local lat = tag and languages.associated[tag]
+ if lat then
+ return lat[1], lat[2]
+ else
+ return nil, nil
end
end
-function languages.dictionary.remove(word)
- if languages.dictionary.current and word then
- languages.dictionary.current[word] = nil
+function languages.loadable(tag)
+ local l = languages.registered[tag]
+ if l and l.patterns and input.find_file(texmf.instance,patterns) then
+ return true
+ else
+ return false
end
end
-function languages.dictionary.hyphenate(str)
- if languages.dictionary.current then
- local result = languages.dictionary.current[str]
- if result then
- return result
- else
- -- todo: be clever
+languages.share = false -- we don't share language numbers
+
+function languages.enable(tags)
+ -- beware: we cannot set tex.language, but need tex.normallanguage
+ for i=1,#tags do
+ local tag = tags[i]
+ local l = languages.registered[tag]
+ if l then
+ if not l.loaded then
+ local tag = l.parent
+ local number = languages.hyphenation.number(tag)
+ if languages.share and number > 0 then
+ l.number = number
+ else
+ -- we assume the same filenames
+ l.number = languages.hyphenation.define(tag)
+ languages.hyphenation.loadpatterns(tag,l.patterns)
+ languages.hyphenation.loadexceptions(tag,l.exceptions)
+ languages.numbers[l.number] = tag
+ end
+ l.loaded = true
+ end
+ if l.number > 0 then
+ return l.number
+ end
end
end
- return str
+ return 0
end
-function languages.dictionary.found(number, str)
- local patterns = languages.dictionary.patterns[number]
- return patterns and patterns[str]
+-- e['implementer']= 'imple{m}{-}{-}menter'
+-- e['manual'] = 'man{}{}{}'
+-- e['as'] = 'a-s'
+-- e['user-friendly'] = 'user=friend-ly'
+-- e['exceptionally-friendly'] = 'excep-tionally=friend-ly'
+
+function languages.hyphenation.loadwords(tag, filename)
+ local id = languages.hyphenation.number(tag)
+ if id > 0 then
+ local l = lang.new(id)
+ input.starttiming(languages)
+ local data = io.loaddata(filename) or ""
+ l:hyphenation(data)
+ input.stoptiming(languages)
+ end
end
-do
+languages.hyphenation.define ("zerolanguage")
+languages.hyphenation.loadpatterns ("zerolanguage") -- else bug
+languages.hyphenation.loadexceptions("zerolanguage") -- else bug
- local discnode = node.new('disc')
+languages.logger = languages.logger or { }
- discnode.pre = node.new('glyph')
- discnode.pre.subtype = 0
- discnode.pre.char = 45 -- will be configurable
- discnode.pre.font = 0
+function languages.logger.report()
+ local result = {}
+ for _, tag in ipairs(table.sortedkeys(languages.registered)) do
+ local l = languages.registered[tag]
+ if l.loaded then
+ local p = (l.patterns and "pat") or '-'
+ local e = (l.exceptions and "exc") or '-'
+ result[#result+1] = string.format("%s:%s:%s:%s:%s", tag, l.parent, p, e, l.number)
+ end
+ end
+ return (#result > 0 and table.concat(result," ")) or "none"
+end
- local glyph, disc, kern = node.id('glyph'), node.id('disc'), node.id('kern')
- local bynode = node.traverse
- local bychar = string.utfcharacters
+languages.words = languages.words or {}
+languages.words.data = languages.words.data or {}
+languages.words.enable = false
+languages.words.threshold = 4
- local function reconstruct(prev,str,fnt)
- local done = false
- if #str < 4 then
- -- too short
- else
- local wrd = languages.dictionary.hyphenate(str)
- if wrd == str then
- -- not found
- else
- local pre, post, after, comp = nil, nil, false, nil
- for chr in bychar(wrd) do
- if prev then
- if not comp and prev.next and prev.next.subtype > 0 then
- comp = prev.next.components
- pre = node.copy(comp)
- comp = comp.next
- post, after = nil, false
- elseif chr == '-' then
- if not comp then
- done = true
- local n = node.copy(discnode)
- n.pre.font = fnt.font
- n.pre.attr = fnt.attr
- if pre then
- pre.next = n.pre
- n.pre = pre
- pre, pos, after = nil, nil, false
- end
- n.next = prev.next
- prev.next = n
- prev = n
- else
- after = true
- end
- elseif comp then
- local g = node.copy(comp)
- comp = comp.next
- if after then
- if post then post.next = g else post = g end
- else
- if pre then pre.next = g else pre = g end
- end
- if not comp then
- done = true
- local n = node.copy(discnode)
- n.pre.font = fnt.font
- n.pre.attr = fnt.attr
- pre.next = n.pre
- n.pre = pre
- n.post = post
- n.replace = 1
- n.next = prev.next
- prev.next = n
- prev = n
- pre, pos, after = nil, nil, false
- prev = prev.next -- hm, now we get error 1
- end
- else
- prev = prev.next
- end
- else
- -- print("ERROR 1")
- end
- end
- end
+languages.words.colors = {
+ ["known"] = "green",
+ ["unknown"] = "red",
+}
+
+do
+
+ spacing = lpeg.S(" \n\r\t")
+ markup = lpeg.S("-=")
+ lbrace = lpeg.P("{")
+ rbrace = lpeg.P("}")
+ disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this
+ word = lpeg.Cs((markup/"" + disc/"" + (1-spacing))^1)
+
+ function languages.words.load(tag, filename)
+ local filename = input.find_file(texmf.instance,filename,'other text file') or ""
+ if filename ~= "" then
+ input.starttiming(languages)
+ local data = io.loaddata(filename) or ""
+ local words = languages.words.data[tag] or {}
+ parser = (spacing + word/function(s) words[s] = true end)^0
+ parser:match(data)
+ languages.words.data[tag] = words
+ input.stoptiming(languages)
end
- return done
end
- function nodes.hyphenate_words(head) -- we forget about the very first, no head stuff here
- local cd = characters.data
- local uc = utf.char
- local n, p = head, nil
- local done, prev, str, fnt, lan = false, false, "", nil, nil
- local currentlanguage = languages.dictionary.current
- local att, patterns = languages.dictionary.attribute, languages.dictionary.patterns
- local function action() -- maybe inline
- if reconstruct(prev,str,fnt) then
- done = true
- end
- str, prev = "", false
- end
- while n do
- local id = n.id
- if id == glyph then
- local l = node.has_attribute(n,att)
- if l then
- if l ~= lan then
- if prev then action() end
- lan = l
- languages.dictionary.current = patterns[lan]
- end
- elseif prev then
- action()
- end
- if not languages.dictionary.current then
- -- skip
- elseif n.subtype > 0 then
- if not prev then
- prev, fnt = p, n
- end
- for g in bynode(n.components) do
- str = str .. uc(g.char)
- end
- else
- local code = n.char
- if cd[code].lccode then
- if not prev then
- prev, fnt = p, n
- end
- str = str .. uc(code)
- elseif prev then
- action()
- end
- end
- elseif id == kern and n.subtype == 0 and p then
- p.next = n.next
- node.free(p,n)
- n = p
- elseif prev then
- action()
- end
- p = n
- n = n.next
- end
- if prev then
- action()
- end
- languages.dictionary.current = currentlanguage
- return head
+end
+
+function languages.words.found(id, str)
+ local tag = languages.numbers[id]
+ if tag then
+ local data = languages.words.data[tag]
+ return data and (data[str] or data[str:lower()])
+ else
+ return false
end
+end
+
+-- The following code is an adaption of experimental code for
+-- hyphenating and spell checking.
+
+do
- function nodes.mark_words(head,attribute,found)
+ local glyph, disc, kern = node.id('glyph'), node.id('disc'), node.id('kern')
+
+ local bynode = node.traverse
+ local bychar = string.utfcharacters
+
+ function mark_words(head,found) -- can be optimized
local cd = characters.data
local uc = utf.char
- local current, start, str, att, n = head, nil, "", nil, 0
+ local current, start, str, language, n = head, nil, "", nil, 0
local function action()
- local f = found(att,str)
- if f then
- for i=1,n do
- f(start)
- start = start.next
+ if #str > 0 then
+ local f = found(language,str)
+ if f then
+ for i=1,n do
+ f(start)
+ start = start.next
+ end
end
end
str, start, n = "", nil, 0
end
- local has_attribute = node.has_attribute
while current do
local id = current.id
if id == glyph then
- local a = has_attribute(current,attribute)
+ local a = current.lang
if a then
- if a ~= att then
+ if a ~= language then
if start then
action()
end
- att = a
+ language = a
end
elseif start then
action()
- att = a
+ language = a
end
if current.subtype > 0 then
start = start or current
@@ -332,7 +351,7 @@ do
end
else
local code = current.char
- if cd[code].lccode then
+ if cd[code].uccode or cd[code].lccode then
start = start or current
n = n + 1
str = str .. uc(code)
@@ -357,34 +376,65 @@ do
return head
end
- function languages.dictionary.check(head, attribute, yes, nop)
+ languages.words.methods = { }
+ languages.words.method = 1
+
+ languages.words.methods[1] = function(head, attribute, yes, nop)
local set = node.set_attribute
local unset = node.unset_attribute
local wrong, right = false, false
if nop then wrong = function(n) set(n,attribute,nop) end end
if yes then right = function(n) set(n,attribute,yes) end end
for n in node.traverse(head) do
- unset(n,attribute)
+ unset(n,attribute) -- hm
end
- local found = languages.dictionary.found
- nodes.mark_words(head, languages.dictionary.attribute, function(att,str)
- if #str < 4 then
+ local found, done = languages.words.found, false
+ mark_words(head, function(language,str)
+ if #str < languages.words.threshold then
return false
- elseif found(att,str) then
+ elseif found(language,str) then
+ done = true
return right
else
+ done = true
return wrong
end
end)
- nodes.hyphenate_words(head)
- return head
+ return head, done
+ end
+
+ local lw = languages.words
+
+ function languages.words.check(head)
+ if head.next and lw.enable then
+ local color = attributes.numbers['color']
+ local colors = lw.colors
+ local alc = attributes.list[color]
+ return lw.methods[lw.method](head, color, alc[colors.known], alc[colors.unknown])
+ else
+ return head, false
+ end
end
end
-languages.set = languages.dictionary.set
-languages.add = languages.dictionary.add
-languages.remove = languages.dictionary.remove
-languages.hyphenate = languages.dictionary.hyphenate
-languages.found = languages.dictionary.found
-languages.check = languages.dictionary.check
+-- for the moment we hook it into the attribute handler
+
+--~ languagehacks = { }
+
+--~ function languagehacks.process(namespace,attribute,head)
+--~ return languages.check(head)
+--~ end
+
+--~ chars.plugins.language = {
+--~ namespace = languagehacks,
+--~ processor = languagehacks.process
+--~ }
+
+-- must happen at the tex end
+
+languages.associate('en','latn','eng')
+languages.associate('uk','latn','eng')
+languages.associate('nl','latn','nld')
+languages.associate('de','latn','deu')
+languages.associate('fr','latn','fra')
diff --git a/tex/context/base/lang-ini.mkii b/tex/context/base/lang-ini.mkii
index a0f6f3881..9fa912acf 100644
--- a/tex/context/base/lang-ini.mkii
+++ b/tex/context/base/lang-ini.mkii
@@ -11,4 +11,135 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-% mkiv code is experimental
+\unprotect
+
+\def\mkdoloadpatterns#1#2%
+ {\expanded{\getcommacommandsize[\getvalue{\??la#2\s!encoding}]}%
+ \ifnum\commalistsize>0
+ %\message{[nofpatterns #2: \commalistsize/\getvalue{\??la#2\s!encoding}]}%
+ \dorecurse\commalistsize
+ {\expanded{\getfromcommacommand[\getvalue{\??la#2\s!encoding}][\recurselevel]}%
+ \let\patternencoding\commalistelement
+ \expanded{\getfromcommacommand[\getvalue{\??la#2\s!mapping }][\recurselevel]}%
+ \let\patternmapping \commalistelement
+ %\message{[patterns: #1/#2/\patternencoding/\patternmapping]}%
+ \dodoloadpatterns{#1}{#2}\patternencoding\patternmapping}%
+ \else
+ %\message{[patterns: #1/#2]}%
+ \dodoloadpatterns{#1}{#2}{}{}%
+ \fi}
+
+\def\setuphyppatencoding
+ {\pathypsettings
+ \enableregime[utf]}
+
+\def\dodoloadpatterns#1#2#3#4% beware, loaded language also incr
+ {\normallanguage\loadedlanguage % when not really needed
+ \bgroup
+ \let\synchronizepatterns\relax % needed?
+ \let\enabledmapping \empty % needed?
+ \doifelsenothing{#3}{\enableencoding[\s!default]}{\enableencoding[#3]}%
+ \doifelsenothing{#4}{\enablemapping [\s!default]}{\enablemapping [#4]}%
+ \setuphyppatencoding
+ \ifundefined{\??la\??la:\currentencoding:\currentmapping:#2}%
+ \let\doshowpatterns\relax
+ \edef\alreadyloadedlanguage
+ {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}\empty}%
+ \edef\alreadyloadedlanguage
+ {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\f!languageprefix#2.\f!patternsextension}\alreadyloadedlanguage}%
+ \ifx\alreadyloadedlanguage\empty
+ \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\loadedlanguage
+ \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
+ \startpatternloading{\truefilename{\f!languageprefix#2.\f!patternsextension}}{#3}{#4}%
+ \readsysfile{\truefilename{\f!languageprefix#2.\f!patternsextension}}
+ {\setxvalue{\??la#1\s!patterns}{#2}%
+ \setxvalue{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}{\number\loadedlanguage}%
+ \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{\number\normallanguage}{\currentencoding}{\currentmapping}}%
+ \doglobal\addtocommalist{#2}\preloadedpatterns
+ \showmessage\m!linguals1{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
+ {\showmessage\m!linguals2{#2,#1,\loadedlanguage,\currentencoding,\currentmapping,\f!languageprefix#2.\f!patternsextension,\truefilename{\f!languageprefix#2.\f!patternsextension}}}%
+ \stoppatternloading
+ \startpatternloading{\truefilename{\f!languageprefix#2.\f!hyphensextension}}{#3}{#4}%
+ \readsysfile{\truefilename{\f!languageprefix#2.\f!hyphensextension}}
+ {\showmessage\m!linguals3{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
+ {\showmessage\m!linguals4{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}%
+ \stoppatternloading
+ \doglobal\increment\loadedlanguage
+ % \stopencoding
+ \else % optimization, introduced 2004.08.24, while sorting out changes in tl
+ \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\alreadyloadedlanguage
+ \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
+ \setxvalue{\??la#1\s!patterns}{#2}%
+ \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{[\number\alreadyloadedlanguage]}{\currentencoding}{\currentmapping}}%
+ \doglobal\addtocommalist{#2}\preloadedpatterns
+ \showmessage\m!linguals1{#2,#1,[\alreadyloadedlanguage],\currentencoding,\currentmapping}%
+ \fi
+ \fi
+ \egroup}
+
+%D Since we can only load patterns in ini\TeX, we nil the
+%D loading before dumping (which saves a bit of memory, but
+%D strangely enough not in the format).
+
+\appendtoks
+ \gdef\doloadpatterns{\doglobal\increment\loadedlanguage\gobbletwoarguments}%
+ \globallet\dodoloadpatterns\gobblefourarguments
+\to \everydump
+
+\def\mkdoifpatternselse#1%
+ {\expanded{\doifinsetelse{#1}{\preloadedpatterns}}}
+
+\def\mksetnormallanguage#1#2% current default
+ {% called quite often, so we use \csname
+ % \def\synchronizepatterns{\setnormallanguage
+ % {\csname\??la\currentlanguage\s!patterns\endcsname}}% called often
+ % of even better pre-expand in an ugly way:
+ \@EA\def\@EA\synchronizepatterns\@EA{\@EA\dosetnormallanguage
+ \csname\??la\currentlanguage\s!patterns\endcsname}%
+ \donefalse
+ \synchronizepatterns
+ \ifdone\else
+ \def\synchronizepatterns{\dosetnormallanguage\currentlanguage}%
+ \synchronizepatterns
+ \ifdone\else
+ \ifx\currentdefaultlanguage\empty\else
+ \@EA\def\@EA\synchronizepatterns\@EA{\@EA\dosetnormallanguage
+ \csname\??la\currentdefaultlanguage\s!patterns\endcsname}%
+ \synchronizepatterns
+ \ifdone\else
+ \dosetnormallanguage\currentdefaultlanguage
+ \synchronizepatterns
+ \fi
+ \fi
+ \fi
+ \fi}
+
+\def\dosetnormallanguage#1% #1 == \cs
+ {\dodosetnormallanguage{:\currentencoding:\currentmapping:}#1{%
+ \dodosetnormallanguage{:\currentencoding:\s!default :}#1{%
+ \dodosetnormallanguage{:\s!default :\currentmapping:}#1{%
+ \dodosetnormallanguage{:\s!default :\s!default :}#1\empty}}}}
+
+\def\dodosetnormallanguage#1#2%
+ {\ifcsname\??la\??la#1#2\endcsname
+ \edef\thenormallanguage{\csname\??la\??la#1#2\endcsname}% can be \chardef
+ \ifx\thenormallanguage\empty
+ \@EAEAEA\firstofoneargument
+ \else
+ \donetrue
+ \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
+ \normallanguage\thenormallanguage\relax % \relax is needed for lookahead problems
+ \@EAEAEA\gobbleoneargument
+ \fi
+ \else
+ \@EA\firstofoneargument
+ \fi}
+
+\beginXETEX
+ \def\synchronizepatternswithfont{}
+ \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
+ \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
+ \def\setuphyppatencoding {\pathypsettings}
+\endXETEX
+
+\protect \endinput
diff --git a/tex/context/base/lang-ini.mkiv b/tex/context/base/lang-ini.mkiv
index ac87d85e7..4e4c3c81f 100644
--- a/tex/context/base/lang-ini.mkiv
+++ b/tex/context/base/lang-ini.mkiv
@@ -15,14 +15,63 @@
\registerctxluafile{lang-ini}{1.001}
-\def\synchronizepatternswithfont{}
-\def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
-\def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
-\def\setuphyppatencoding {\pathypsettings}
+\let\synchronizepatterns \relax % todo: cleanup
+\let\synchronizepatternswithfont\relax % todo: cleanup
-% temporarily here, awaiting new mechanisms
+\def\mkdoloadpatterns#1#2%
+ {\ctxlua{languages.register(
+ "#1",
+ "#2",
+ "\truefilename{\f!languageprefix#2.\f!patternsextension}",
+ "\truefilename{\f!languageprefix#2.\f!hyphensextension }")
+ }}
-\def\loadpatternfiles#1{\ctxlua{languages.hyphenation.load('#1', 'lang-#1.pat', 'lang-#1.hyp')}}
-\def\hyphenateword #1{\ctxlua{tex.sprint(languages.hyphenation.hyphenate("\currentlanguage",[[#1]]))}}
+\def\mkdoifpatternselse#1%
+ {\ctxlua{cs.testcase(languages.loadable("#1"))}}
+
+\def\mksetnormallanguage#1#2% current default / we can freeze the number here
+ {\normallanguage=\ctxlua{tex.sprint(languages.enable({
+ "\csname\??la#1\s!patterns\endcsname","#1",
+ "\csname\??la#2\s!patterns\endcsname","#2",
+ }))}\relax}
+
+% to be tested
+%
+% \def\mkdosetnormallanguage#1#2% current default
+% {\normallanguage=\ctxlua{tex.sprint(languages.enable({
+% "\csname\??la#1\s!patterns\endcsname","#1",
+% "\csname\??la#2\s!patterns\endcsname","#2",
+% }))}}%
+% \setxvalue{\??la\??la#1#2}{\number\normallanguage}}
+%
+% \def\mksetnormallanguage#1#2% current default / we can freeze the number here
+% {\normallanguage\executeifdefined{\??la\??la#1#2}{\mkdosetnormallanguage{#1}{#2}}}
+
+
+\def\loadspellchecklist
+ {\dodoubleempty\doloadspellchecklist}
+
+% mkiv only -- todo: internationalize command names
+
+% \loadspellchecklist[en][words-en.txt]
+% \loadspellchecklist[nl][words-nl.txt]
+% \setupspellchecking[state=start]
+
+\def\loadspellchecklist[#1][#2]%
+ {\ctxlua{languages.words.load("#1","#2")}}
+
+\def\setupspellchecking
+ {\dosingleargument\dosetupspellchecking}
+
+\def\setupspellchecking[#1]% todo colors
+ {\getparameters[\??wl][#1]%
+ \doifelse\@@wlstate\v!start
+ {\ctxlua{languages.words.enable=true }}
+ {\ctxlua{languages.words.enable=false}}}
+
+\setupspellchecking
+ [\c!state=\v!stop]
+
+\uchyph=1
\protect \endinput
diff --git a/tex/context/base/lang-ini.tex b/tex/context/base/lang-ini.tex
index 6f0352772..bdefd6a1a 100644
--- a/tex/context/base/lang-ini.tex
+++ b/tex/context/base/lang-ini.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Todo : --language=pl,en,nl : nl incs number of language
+%D This module needs a further cleanup (real split between ii/iv).
%D This module implements the (for the moment still simple)
%D multi||language support of \CONTEXT, which should not be
@@ -157,24 +157,6 @@
\normallanguage\zerocount \def\loadedlanguage{1}
-%D Let's start with setting the lowercase code of quotes, so
-%D that we get proper hyphenation in languages like Dutch,
-%D French and Czech.
-
-% elsewhere: \lccode`\'=`\'
-
-% \def\showlccodes
-% {\currentlanguage:\space
-% \dostepwiserecurse{0}{255}{1}
-% {\ifnum\lccode\recurselevel>0
-% \char\recurselevel:\the\lccode\recurselevel\relax\space
-% \fi}
-% \endgraf}
-
-% \de \hyphenatedword{Works''} \showlccodes
-% \nl \hyphenatedword{Works''} \showlccodes
-% \uk \hyphenatedword{Works''} \showlccodes
-
%D \macros
%D {currentlanguage, setupcurrentlanguage}
%D
@@ -272,8 +254,7 @@
\def\doiflanguageelse#1{\doifdefinedelse{\??la#1\c!state}}
\def\doinstalllanguage[#1][#2]%
- {%\ConvertConstantAfter\doifinstringelse{=}{#2}
- \doifassignmentelse{#2}
+ {\doifassignmentelse{#2}
{\doiflanguageelse{#1}
{\getparameters[\??la#1][#2]}
{\setvalue{\l!prefix!#1}{#1}%
@@ -326,97 +307,11 @@
\let \patternencoding \s!default
\let \patternmapping \s!default
-\def\doloadpatterns#1#2%
- {\expanded{\getcommacommandsize[\getvalue{\??la#2\s!encoding}]}%
- \ifnum\commalistsize>0
- %\message{[nofpatterns #2: \commalistsize/\getvalue{\??la#2\s!encoding}]}%
- \dorecurse\commalistsize
- {\expanded{\getfromcommacommand[\getvalue{\??la#2\s!encoding}][\recurselevel]}%
- \let\patternencoding\commalistelement
- \expanded{\getfromcommacommand[\getvalue{\??la#2\s!mapping }][\recurselevel]}%
- \let\patternmapping \commalistelement
- %\message{[patterns: #1/#2/\patternencoding/\patternmapping]}%
- \dodoloadpatterns{#1}{#2}\patternencoding\patternmapping}%
- \else
- %\message{[patterns: #1/#2]}%
- \dodoloadpatterns{#1}{#2}{}{}%
- \fi}
-
-\def\setuphyppatencoding
- {\pathypsettings
- \enableregime[utf]}
-
-\beginXETEX
- \def\synchronizepatternswithfont{}
- \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
- \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
- \def\setuphyppatencoding {\pathypsettings}
-\endXETEX
-
-\beginLUATEX
- \def\synchronizepatternswithfont{}
- \def\doloadpatterns #1#2{\dodoloadpatterns{#1}{#2}\s!default\s!default}
- \def\setnormallanguage #1{\dosetnormallanguage{:\s!default:\s!default:}#1\empty}
- \def\setuphyppatencoding {\pathypsettings}
-\endLUATEX
-
-\def\dodoloadpatterns#1#2#3#4% beware, loaded language also incr
- {\normallanguage\loadedlanguage % when not really needed
- \bgroup
- \let\synchronizepatterns\relax % needed?
- \let\enabledmapping \empty % needed?
- \doifelsenothing{#3}{\enableencoding[\s!default]}{\enableencoding[#3]}%
- \doifelsenothing{#4}{\enablemapping [\s!default]}{\enablemapping [#4]}%
- \setuphyppatencoding
- \ifundefined{\??la\??la:\currentencoding:\currentmapping:#2}%
- \let\doshowpatterns\relax
- \edef\alreadyloadedlanguage
- {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}\empty}%
- \edef\alreadyloadedlanguage
- {\executeifdefined{\??la\??la:\currentencoding:\currentmapping:\f!languageprefix#2.\f!patternsextension}\alreadyloadedlanguage}%
- \ifx\alreadyloadedlanguage\empty
- \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\loadedlanguage
- \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
- \startpatternloading{\truefilename{\f!languageprefix#2.\f!patternsextension}}{#3}{#4}%
- \readsysfile{\truefilename{\f!languageprefix#2.\f!patternsextension}}
- {\setxvalue{\??la#1\s!patterns}{#2}%
- \setxvalue{\??la\??la:\currentencoding:\currentmapping:\truefilename{\f!languageprefix#2.\f!patternsextension}}{\number\loadedlanguage}%
- \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{\number\normallanguage}{\currentencoding}{\currentmapping}}%
- \doglobal\addtocommalist{#2}\preloadedpatterns
- \showmessage\m!linguals1{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
- {\showmessage\m!linguals2{#2,#1,\loadedlanguage,\currentencoding,\currentmapping,\f!languageprefix#2.\f!patternsextension,\truefilename{\f!languageprefix#2.\f!patternsextension}}}%
- \stoppatternloading
- \startpatternloading{\truefilename{\f!languageprefix#2.\f!hyphensextension}}{#3}{#4}%
- \readsysfile{\truefilename{\f!languageprefix#2.\f!hyphensextension}}
- {\showmessage\m!linguals3{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}
- {\showmessage\m!linguals4{#2,#1,\loadedlanguage,\currentencoding,\currentmapping}}%
- \stoppatternloading
- \doglobal\increment\loadedlanguage
- % \stopencoding
- \else % optimization, introduced 2004.08.24, while sorting out changes in tl
- \letgvalue{\??la\??la:\currentencoding:\currentmapping:#2}\alreadyloadedlanguage
- \doifundefined{\??la\??la:\s!default:\s!default:#2}{\letgvalue{\??la\??la:\s!default:\s!default:#2}\loadedlanguage}% fall back
- \setxvalue{\??la#1\s!patterns}{#2}%
- \xdef\preloadedpmessage{\preloadedpmessage\doshowpatterns{#2}{[\number\alreadyloadedlanguage]}{\currentencoding}{\currentmapping}}%
- \doglobal\addtocommalist{#2}\preloadedpatterns
- \showmessage\m!linguals1{#2,#1,[\alreadyloadedlanguage],\currentencoding,\currentmapping}%
- \fi
- \fi
- \egroup}
-
-\def\doifpatternselse#1%
- {\expanded{\doifinsetelse{#1}{\preloadedpatterns}}}
+\ifx\mkloadpatterns \undefined \let\mkloadpatterns \gobbletwoarguments \fi
+\ifx\mkdoifpatternselse\undefined \let\mkdoifpatternselse\gobbletwoarguments \fi
-%D Since we can only load patterns in ini\TeX, we nil the
-%D loading before dumping (which saves a bit of memory, but
-%D strangely enough not in the format).
-
-\appendtoks
- \gdef\doloadpatterns{\doglobal\increment\loadedlanguage\gobbletwoarguments}%
- \globallet\dodoloadpatterns\gobblefourarguments
-\to \everydump
-
-\loadmarkfile{lang-ini} % not yet
+\def\doloadpatterns {\mkdoloadpatterns}
+\def\doifpatternselse{\mkdoifpatternselse}
%D \macros
%D {setuplanguage}
@@ -525,45 +420,12 @@
%D We take care of local as well as standardized language
%D switching (fr and fa, de and du, but nl and nl).
-% new, encoding specific patterns
-
\ifx\synchronizepatterns \undefined \let\synchronizepatterns\relax \fi
\ifx\synchronizepatternswithfont\undefined \def\synchronizepatternswithfont{\synchronizepatterns} \fi
-\beginTEX
-
-\def\dosetnormallanguage#1#2%
- {\@EA\ifx\csname\??la\??la#1#2\endcsname\relax
- \@EA\firstofoneargument
- \else\@EA\ifx\csname\??la\??la#1#2\endcsname\empty
- \@EAEAEA\firstofoneargument
- \else
- \donetrue
- \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
- \normallanguage\csname\??la\??la#1#2\endcsname\relax % \relax is needed for lookahead problems
- \@EAEAEA\gobbleoneargument
- \fi\fi}
-
-\endTEX
-
-\beginETEX
-
-\def\dosetnormallanguage#1#2%
- {\ifcsname\??la\??la#1#2\endcsname
- \edef\thenormallanguage{\csname\??la\??la#1#2\endcsname}%
- \ifx\thenormallanguage\empty
- \@EAEAEA\firstofoneargument
- \else
- \donetrue
- \@EA\xdef\csname\??la\currentlanguage\s!patterns\endcsname{#2}%
- \normallanguage\thenormallanguage\relax % \relax is needed for lookahead problems
- \@EAEAEA\gobbleoneargument
- \fi
- \else
- \@EA\firstofoneargument
- \fi}
+\ifx\mksetnormallanguage\undefined \let\mksetnormallanguage\gobbletwoarguments \fi
-\endETEX
+\def\setnormallanguage{\mksetnormallanguage}
\newevery \everylanguage \relax
\newevery \everyresetlanguagespecifics \relax
@@ -571,20 +433,6 @@
\def\disablelanguagespecifics
{\ignorecompoundcharacter}
-% \def\setnormallanguage#1%
-% {\dosetnormallanguage{:\currentencoding:\currentmapping:}{#1}{%
-% \dosetnormallanguage{:\currentencoding:\s!default :}{#1}{%
-% \dosetnormallanguage{:\s!default :\currentmapping:}{#1}{%
-% \dosetnormallanguage{:\s!default :\s!default :}{#1}\empty}}}}
-%
-% assume #1 = \cs
-
-\def\setnormallanguage#1%
- {\dosetnormallanguage{:\currentencoding:\currentmapping:}#1{%
- \dosetnormallanguage{:\currentencoding:\s!default :}#1{%
- \dosetnormallanguage{:\s!default :\currentmapping:}#1{%
- \dosetnormallanguage{:\s!default :\s!default :}#1\empty}}}}
-
\def\sethyphenationvariables
{\lefthyphenmin 0\languageparameter\s!lefthyphenmin \relax
\righthyphenmin0\languageparameter\s!righthyphenmin\relax
@@ -593,53 +441,16 @@
\def\docomplexlanguage% assumes that \currentlanguage is set
{\edef\currentdefaultlanguage{\defaultlanguage\currentlanguage}%
- % called quite often, so we use \csname
- % \def\synchronizepatterns{\setnormallanguage
- % {\csname\??la\currentlanguage\s!patterns\endcsname}}% called often
- % of even better pre-expand in an ugly way:
- \@EA\def\@EA\synchronizepatterns\@EA{\@EA\setnormallanguage
- \csname\??la\currentlanguage\s!patterns\endcsname}%
- \donefalse
- \synchronizepatterns
- \ifdone\else
- \def\synchronizepatterns{\setnormallanguage\currentlanguage}%
- \synchronizepatterns
- \ifdone\else
- \ifx\currentdefaultlanguage\empty\else
- % \def\synchronizepatterns{\setnormallanguage
- % {\csname\??la\currentdefaultlanguage\s!patterns\endcsname}}%
- \@EA\def\@EA\synchronizepatterns\@EA{\@EA\setnormallanguage
- \csname\??la\currentdefaultlanguage\s!patterns\endcsname}%
- \synchronizepatterns
- \ifdone\else
- \setnormallanguage\currentdefaultlanguage
- \synchronizepatterns
- \fi
- \fi
- \fi
- \fi
+ \mksetnormallanguage\currentlanguage\currentdefaultlanguage
\the\everylanguage
\enablelanguagespecifics[\currentlanguage]%
- % strange, what is this doing here, dangerous for {il2,ec}
- % \edef\languagemapping{\csname\??la\currentlanguage\s!mapping\endcsname}%
- % \ifx\languagemapping\empty\else
- % \fastenablemapping\languagemapping
- % \fi
\sethyphenationvariables
- %\lefthyphenmin 0\languageparameter\s!lefthyphenmin
- %\righthyphenmin0\languageparameter\s!righthyphenmin
\relax
% will be definable and move to core-spa !
\doifelse{\languageparameter\c!spacing}\v!broad
\nonfrenchspacing\frenchspacing}
-\ifx\enablelanguagespecifics\undefined
-
- \def\enablelanguagespecifics[#1]{}
-
-\fi
-
-\beginETEX
+\ifx\enablelanguagespecifics\undefined \def\enablelanguagespecifics[#1]{} \fi
\def\complexlanguage[#1]%
{\edef\askedlanguage{#1}%
@@ -655,32 +466,10 @@
\fi
\fi}
-\endETEX
-
-\beginTEX
-
-\def\complexlanguage[#1]%
- {\edef\askedlanguage{#1}%
- \ifx\askedlanguage\empty \else
- \@EA\ifx\csname\l!prefix!\askedlanguage\endcsname\relax
- \showmessage\m!linguals6{#1}%
- \else
- \edef\askedlanguage{\csname\l!prefix!\askedlanguage\endcsname}%
- \ifx\currentlanguage\askedlanguage \else
- \setcurrentlanguage\currentmainlanguage\askedlanguage
- \docomplexlanguage
- \fi
- \fi
- \fi}
-
-\endTEX
-
\let\simplelanguage\normallanguage
\definecomplexorsimple\language
-\beginETEX
-
\def\mainlanguage[#1]%
{\edef\askedlanguage{#1}%
\ifx\askedlanguage\empty \else
@@ -693,24 +482,6 @@
\fi
\fi}
-\endETEX
-
-\beginTEX
-
-\def\mainlanguage[#1]%
- {\edef\askedlanguage{#1}%
- \ifx\askedlanguage\empty \else
- \@EA\ifx\csname\l!prefix!\askedlanguage\endcsname\relax\else
- \edef\askedlanguage{\csname\l!prefix!\askedlanguage\endcsname}%
- \ifx\currentmainlanguage\askedlanguage \else
- \setcurrentlanguage\askedlanguage\askedlanguage
- \docomplexlanguage
- \fi
- \fi
- \fi}
-
-\endTEX
-
%D \macros
%D {defaultlanguage,languagedefault}
%D
@@ -729,9 +500,6 @@
\def\languagedefault#1#2%
{\csname\??la\defaultlanguage{#1}#2\endcsname}
-% \def\languageparameter#1%
-% {\csname\??la\defaultlanguage\currentlanguage#1\endcsname}
-
\def\languageparameter % @EA = speedup
{\@EA\dolanguageparameter\@EA{\defaultlanguage\currentlanguage}}
@@ -744,120 +512,27 @@
\def\defaultlanguageparameter#1%
{\csname\??la\s!default#1\endcsname}
-\beginETEX
-
- \def\dolanguageparameter#1#2%
- {\csname\??la
- \ifcsname\??la\currentlanguage#2\endcsname
- \currentlanguage
- \else\ifcsname\??la#1#2\endcsname
- \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
- \else
- \s!default
- \fi\fi
- #2\endcsname}
-
- \def\dospecificlanguageparameter#1#2#3%
- {\csname\??la
- \ifcsname\??la#2#3\endcsname
- \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
- \else\ifcsname\??la#1#3\endcsname
- \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
- \else
- \s!default
- \fi\fi
- #3\endcsname}
-
-\endETEX
-
-\beginTEX
-
- \def\dolanguageparameter#1#2%
- {\csname\??la
- \@EA\ifx\csname\??la\currentlanguage#2\endcsname\relax
- \@EA\ifx\csname\??la#1#2\endcsname\relax
- \s!default
- \else
- \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
- \fi
- \else
- \currentlanguage
- \fi
- #2\endcsname}
-
- \def\dospecificlanguageparameter#1#2#3%
- {\csname\??la
- \@EA\ifx\csname\??la#2#3\endcsname\relax
- \@EA\ifx\csname\??la#1#3\endcsname\relax
- \s!default
- \else
- \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
- \fi
- \else
- \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
- \fi
- #3\endcsname}
-
-\endTEX
-
-% moved
-%
-% %D \macros
-% %D {leftguillemot,rightguillemot,leftsubguillemot,rightsubguillemot,
-% %D ...single...quote,...double...quote}
-% %D
-% %D We assign logical names to all kind of quote and sentence
-% %D boundary characters.
-% %D
-% %D When using Computer Modern Roman, the next definitions
-% %D looks a bit better than the default ligatures.
-% %D
-% %D \starttyping
-% %D \def\lowerleftsingleninequote {,}
-% %D \def\lowerleftdoubleninequote {,\kern-.1em,}
-% %D \def\upperleftsingleninequote {'}
-% %D \def\upperleftdoubleninequote {''\kern-.1em}
-% %D \def\upperleftsinglesixquote {`}
-% %D \def\upperleftdoublesixquote {`\kern-.1em`}
-% %D
-% %D \def\lowerrightsingleninequote {,}
-% %D \def\lowerrightdoubleninequote {,\kern-.1em,}
-% %D \def\upperrightsingleninequote {'}
-% %D \def\upperrightdoubleninequote {''}
-% %D \def\upperrightsinglesixquote {`}
-% %D \def\upperrightdoublesixquote {\kern-.125em``}
-% %D \stoptyping
-% %D
-% %D But in other fonts, these definitions can give problems, so
-% %D we just say:
-%
-% \def\lowerleftsingleninequote {,}
-% \def\lowerleftdoubleninequote {,,}
-% \def\upperleftsingleninequote {'}
-% \def\upperleftdoubleninequote {''}
-% \def\upperleftsinglesixquote {`}
-% \def\upperleftdoublesixquote {``}
-%
-% \def\lowerrightsingleninequote {,}
-% \def\lowerrightdoubleninequote {,,}
-% \def\upperrightsingleninequote {'}
-% \def\upperrightdoubleninequote {''}
-% \def\upperrightsinglesixquote {`}
-% \def\upperrightdoublesixquote {``}
-%
-% %D Yes I know, they are ugly:
-%
-% \def\leftfakeguillemot
-% {\dontleavehmode\hbox{\raise.25ex\hbox{$\scriptscriptstyle\ll$}}}
-%
-% \def\rightfakeguillemot
-% {\hbox{\raise.25ex\hbox{$\scriptscriptstyle\gg$}}}
-%
-% \def\leftsubfakeguillemot
-% {\dontleavehmode\hbox{\raise.25ex\hbox{$\scriptscriptstyle<$}}}
-%
-% \def\rightsubfakeguillemot
-% {\hbox{\raise.25ex\hbox{$\scriptscriptstyle>$}}}
+\def\dolanguageparameter#1#2%
+ {\csname\??la
+ \ifcsname\??la\currentlanguage#2\endcsname
+ \currentlanguage
+ \else\ifcsname\??la#1#2\endcsname
+ \@EA\ifx\csname\??la#1#2\endcsname\empty\s!default\else#1\fi
+ \else
+ \s!default
+ \fi\fi
+ #2\endcsname}
+
+\def\dospecificlanguageparameter#1#2#3%
+ {\csname\??la
+ \ifcsname\??la#2#3\endcsname
+ \@EA\ifx\csname\??la#2#3\endcsname\empty\s!default\else#2\fi
+ \else\ifcsname\??la#1#3\endcsname
+ \@EA\ifx\csname\??la#1#3\endcsname\empty\s!default\else#1\fi
+ \else
+ \s!default
+ \fi\fi
+ #3\endcsname}
%D New (see nomarking and nolist):
@@ -987,13 +662,14 @@
\def\nopatterns{\normallanguage\minusone}
+%D Mark plugin:
+
+\loadmarkfile{lang-ini} % not yet
+
%D We default to the language belonging to the interface. This
%D is one of the few places outside the interface modules where
%D \type{\startinterface} is used.
-\let\normaldoublequote ="
-\let\normalforwardslash=/
-
%D We default to english:
\setupcurrentlanguage[\s!en]
@@ -1002,8 +678,4 @@
\appendtoks\showmessage\m!linguals9\currentlanguage\to\everyjob
-%D Brrr:
-
-% \ifx\@@ladefault\undefined \let\@@ladefault\s!en \fi
-
\protect \endinput
diff --git a/tex/context/base/lang-sla.tex b/tex/context/base/lang-sla.tex
index 330c6f7f5..268a9c332 100644
--- a/tex/context/base/lang-sla.tex
+++ b/tex/context/base/lang-sla.tex
@@ -20,7 +20,7 @@
%D us an email.
%D
%D \starttabulate[|lB|l|]
-%D \NC Czech \NC Tom Hidec, Petr Sojka \NC \NR
+%D \NC Czech \NC Tom Hudec, Petr Sojka \NC \NR
%D \NC Polish \NC Grzegorz Sapijaszko \NC \NR
%D \NC Croatian \NC \Zcaron eljko Vrba \NC \NR
%D \NC Slovenian \NC Mojca Miklavec \NC \NR
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index efb534d7d..a22c70acb 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -99,7 +99,8 @@ garbagecollector = { }
do
local level = 0
- collectgarbage("setstepmul", 165)
+--~ collectgarbage("setstepmul", 165)
+--~ collectgarbage("setstepmul",50)
garbagecollector.trace = false
garbagecollector.tune = false -- for the moment
diff --git a/tex/context/base/luat-inp.lua b/tex/context/base/luat-inp.lua
index 541bde5c3..ba5d97c29 100644
--- a/tex/context/base/luat-inp.lua
+++ b/tex/context/base/luat-inp.lua
@@ -19,6 +19,7 @@
-- Beware, loading and saving is overloaded in luat-tmp!
-- todo: instances.[hashes,cnffiles,configurations,522] -> ipairs (alles check, sneller)
+-- todo: check escaping in find etc, too much, too slow
if not versions then versions = { } end versions['luat-inp'] = 1.001
if not environment then environment = { } end
@@ -256,31 +257,36 @@ input.settrace(tonumber(os.getenv("MTX.INPUT.TRACE") or os.getenv("MTX_INPUT_TRA
-- These functions can be used to test the performance, especially
-- loading the database files.
-function input.start_timing(instance)
- if instance then
- instance.starttime = os.clock()
- if not instance.loadtime then
- instance.loadtime = 0
+do
+ local clock = os.clock
+
+ function input.starttiming(instance)
+ if instance then
+ instance.starttime = clock()
+ if not instance.loadtime then
+ instance.loadtime = 0
+ end
end
end
-end
-function input.stop_timing(instance, report)
- if instance and instance.starttime then
- instance.stoptime = os.clock()
- local loadtime = instance.stoptime - instance.starttime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- input.report('load time', string.format("%0.3f",loadtime))
+ function input.stoptiming(instance, report)
+ if instance then
+ local starttime = instance.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ instance.stoptime = stoptime
+ instance.loadtime = instance.loadtime + loadtime
+ if report then
+ input.report('load time', string.format("%0.3f",loadtime))
+ end
+ return loadtime
+ end
end
- return loadtime
- else
return 0
end
-end
-input.stoptiming = input.stop_timing
-input.starttiming = input.start_timing
+end
function input.elapsedtime(instance)
return string.format("%0.3f",instance.loadtime or 0)
@@ -594,99 +600,106 @@ function input.generatedatabase(instance,specification)
return input.methodhandler('generators', instance, specification)
end
-function input.generators.tex(instance,specification)
- local tag = specification
- if not instance.lsrmode and lfs and lfs.dir then
- input.report("scanning path",specification)
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local small = instance.smallcache
- local function action(path)
- local mode, full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if name:find("^%.") then
- -- skip
- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%|%<%>%,%?\n\r\t]") then
- -- texio.write_nl("skipping " .. name)
- -- skip
+do
+
+ local weird = lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+ function input.generators.tex(instance,specification)
+ local tag = specification
+ if not instance.lsrmode and lfs and lfs.dir then
+ input.report("scanning path",specification)
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local n, m, r = 0, 0, 0
+ local spec = specification .. '/'
+ local attributes = lfs.attributes
+ local directory = lfs.dir
+ local small = instance.smallcache
+ local function action(path)
+ local mode, full
+ if path then
+ full = spec .. path .. '/'
else
- mode = attributes(full..name,'mode')
- if mode == "directory" then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- elseif path and mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if not small then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
+ full = spec
+ end
+ for name in directory(full) do
+ if name:find("^%.") then
+ -- skip
+ -- elseif name:find("[%~%`%!%#%$%%%^%&%*%(%)%=%{%}%[%]%:%;\"\'%|%<%>%,%?\n\r\t]") then -- too much escaped
+ elseif weird:match(name) then
+ -- texio.write_nl("skipping " .. name)
+ -- skip
+ else
+ mode = attributes(full..name,'mode')
+ if mode == "directory" then
+ m = m + 1
+ if path then
+ action(path..'/'..name)
+ else
+ action(name)
end
- else
- files[name] = path
- local lower = name:lower()
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
+ elseif path and mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if not small then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ end
+ else
+ files[name] = path
+ local lower = name:lower()
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
end
end
end
end
end
- end
- action()
- input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
- else
- local fullname = file.join(specification,input.lsrname)
- local path = '.'
- local f = io.open(fullname)
- if f then
- instance.files[tag] = { }
- local files = instance.files[tag]
- local small = instance.smallcache
- input.report("loading lsr file",fullname)
- -- for line in f:lines() do -- much slower then the next one
- for line in (f:read("*a")):gmatch("(.-)\n") do
- if line:find("^[%a%d]") then
- local fl = files[line]
- if fl then
- if not small then
- if type(fl) == 'string' then
- files[line] = { fl, path } -- table
- else
- fl[#fl+1] = path
+ action()
+ input.report(string.format("%s files found on %s directories with %s uppercase remappings",n,m,r))
+ else
+ local fullname = file.join(specification,input.lsrname)
+ local path = '.'
+ local f = io.open(fullname)
+ if f then
+ instance.files[tag] = { }
+ local files = instance.files[tag]
+ local small = instance.smallcache
+ input.report("loading lsr file",fullname)
+ -- for line in f:lines() do -- much slower then the next one
+ for line in (f:read("*a")):gmatch("(.-)\n") do
+ if line:find("^[%a%d]") then
+ local fl = files[line]
+ if fl then
+ if not small then
+ if type(fl) == 'string' then
+ files[line] = { fl, path } -- table
+ else
+ fl[#fl+1] = path
+ end
+ end
+ else
+ files[line] = path -- string
+ local lower = line:lower()
+ if line ~= lower then
+ files["remap:"..lower] = line
end
end
else
- files[line] = path -- string
- local lower = line:lower()
- if line ~= lower then
- files["remap:"..lower] = line
- end
+ path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
- else
- path = line:match("%.%/(.-)%:$") or path -- match could be nil due to empty line
end
+ f:close()
end
- f:close()
end
end
+
end
-- savers, todo
@@ -1109,10 +1122,168 @@ end
-- a,b,c/{p,q,r}/d/{x,y,z}//
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+
+-- this one is better and faster, but it took me a while to realize
+-- that this kind of replacement is cleaner than messy parsing and
+-- fuzzy concatenating we can probably gain a bit with selectively
+-- applying lpeg, but experiments with lpeg parsing this proved not to
+-- work that well; the parsing is ok, but dealing with the resulting
+-- table is a pain because we need to work inside-out recursively
+
+--~ function input.aux.splitpathexpr(str, t, validate)
+--~ -- no need for optimization, only called a few times, we can use lpeg for the sub
+--~ t = t or { }
+--~ while true do
+--~ local done = false
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for s in b:gmatch("([^,]+)") do
+--~ t[#t+1] = a .. s
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+--~ local t = { }
+--~ for s in a:gmatch("([^,]+)") do
+--~ t[#t+1] = s .. b
+--~ end
+--~ ok, done = true, true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+--~ ok, done = true, true
+--~ return a .. b .. c
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if not done then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+--~ local t = { }
+--~ for sa in a:gmatch("([^,]+)") do
+--~ for sb in b:gmatch("([^,]+)") do
+--~ t[#t+1] = sa .. sb
+--~ end
+--~ end
+--~ ok = true
+--~ return "{" .. table.concat(t,",") .. "}"
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ while true do
+--~ ok = false
+--~ str = str:gsub("{([^{}]-)}", function(a)
+--~ ok = true
+--~ return a
+--~ end)
+--~ if not ok then break end
+--~ end
+--~ if validate then
+--~ for s in str:gmatch("([^,]+)") do
+--~ s = validate(s)
+--~ if s then t[#t+1] = s end
+--~ end
+--~ else
+--~ for s in str:gmatch("([^,]+)") do
+--~ t[#t+1] = s
+--~ end
+--~ end
+--~ return t
+--~ end
+
+function input.aux.splitpathexpr(str, t, validate)
+ -- no need for optimization, only called a few times, we can use lpeg for the sub
+ t = t or { }
+ local concat = table.concat
+ while true do
+ local done = false
+ while true do
+ ok = false
+ str = str:gsub("([^{},]+){([^{}]-)}", function(a,b)
+ local t = { }
+ b:piecewise(",", function(s) t[#t+1] = a .. s end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}([^{},]+)", function(a,b)
+ local t = { }
+ a:piecewise(",", function(s) t[#t+1] = s .. b end)
+ ok, done = true, true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("([,{]){([^{}]+)}([,}])", function(a,b,c)
+ ok, done = true, true
+ return a .. b .. c
+ end)
+ if not ok then break end
+ end
+ if not done then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}{([^{}]-)}", function(a,b)
+ local t = { }
+ a:piecewise(",", function(sa)
+ b:piecewise(",", function(sb)
+ t[#t+1] = sa .. sb
+ end)
+ end)
+ ok = true
+ return "{" .. concat(t,",") .. "}"
+ end)
+ if not ok then break end
+ end
+ while true do
+ ok = false
+ str = str:gsub("{([^{}]-)}", function(a)
+ ok = true
+ return a
+ end)
+ if not ok then break end
+ end
+ if validate then
+ str:piecewise(",", function(s)
+ s = validate(s)
+ if s then t[#t+1] = s end
+ end)
+ else
+ str:piecewise(",", function(s)
+ t[#t+1] = s
+ end)
+ end
+ return t
+end
function input.aux.expanded_path(instance,pathlist)
-- a previous version fed back into pathlist
- local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+ local newlist, ok = { }, false
for _,v in ipairs(pathlist) do
if v:find("[{}]") then
ok = true
@@ -1120,45 +1291,11 @@ function input.aux.expanded_path(instance,pathlist)
end
end
if ok then
- for _,v in ipairs(pathlist) do
- oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
- if p == "{" then
- i = i + 1
- if i > n then n = i end
- return "<" .. (i-1) .. ">"
- else
- i = i - 1
- return "" .. i .. ">"
- end
- end))
- end
- for i=1,n do
- while true do
- local more = false
- local pattern = "^(.-)<"..(n-i)..">(.-)"..(n-i)..">(.-)$"
- local t = { }
- for _,v in ipairs(oldlist) do
- local pre, mid, post = v:match(pattern)
- if pre and mid and post then
- more = true
- for vv in string.gmatch(mid..',',"(.-),") do
- if vv == '.' then
- t[#t+1] = pre..post
- else
- t[#t+1] = pre..vv..post
- end
- end
- else
- t[#t+1] = v
- end
- end
- oldlist = t
- if not more then break end
- end
- end
- for _,v in ipairs(oldlist) do
- v = file.collapse_path(v)
- if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+ for _, v in ipairs(pathlist) do
+ input.aux.splitpathexpr(v, newlist, function(s)
+ s = file.collapse_path(s)
+ return s ~= "" and not s:find(instance.dummy_path_expr) and s
+ end)
end
else
for _,v in ipairs(pathlist) do
@@ -1171,6 +1308,83 @@ function input.aux.expanded_path(instance,pathlist)
return newlist
end
+--~ old one, imperfect and not that efficient
+--~
+--~ function input.aux.expanded_path(instance,pathlist)
+--~ -- a previous version fed back into pathlist
+--~ local i, n, oldlist, newlist, ok = 0, 0, { }, { }, false
+--~ for _,v in ipairs(pathlist) do
+--~ if v:find("[{}]") then
+--~ ok = true
+--~ break
+--~ end
+--~ end
+--~ if ok then
+--~ for _,v in ipairs(pathlist) do
+--~ oldlist[#oldlist+1] = (v:gsub("([\{\}])", function(p)
+--~ if p == "{" then
+--~ i = i + 1
+--~ if i > n then n = i end
+--~ return "<" .. (i-1) .. ">"
+--~ else
+--~ i = i - 1
+--~ return "" .. i .. ">"
+--~ end
+--~ end))
+--~ end
+--~ for i=1,n do
+--~ while true do
+--~ local more = false
+--~ local pattern = "^(.-)<"..(n-i)..">(.-)"..(n-i)..">(.-)$"
+--~ local t = { }
+--~ for _,v in ipairs(oldlist) do
+--~ local pre, mid, post = v:match(pattern)
+--~ if pre and mid and post then
+--~ more = true
+--~ for vv in string.gmatch(mid..',',"(.-),") do -- (mid, "([^,]+)")
+--~ if vv == '.' then
+--~ t[#t+1] = pre..post
+--~ else
+--~ t[#t+1] = pre..vv..post
+--~ end
+--~ end
+--~ else
+--~ t[#t+1] = v
+--~ end
+--~ end
+--~ oldlist = t
+--~ if not more then break end
+--~ end
+--~ end
+--~ if true then
+--~ -- many dups are possible due to messy resolve / order can be messed up too, brr !
+--~ local ok = { }
+--~ for _,o in ipairs(oldlist) do
+--~ for v in o:gmatch("([^,]+)") do
+--~ if not ok[v] then
+--~ ok[v] = true
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(oldlist) do
+--~ v = file.collapse_path(v)
+--~ if v ~= "" and not v:find(instance.dummy_path_expr) then newlist[#newlist+1] = v end
+--~ end
+--~ end
+--~ else
+--~ for _,v in ipairs(pathlist) do
+--~ for vv in string.gmatch(v..',',"(.-),") do
+--~ vv = file.collapse_path(v)
+--~ if vv ~= "" then newlist[#newlist+1] = vv end
+--~ end
+--~ end
+--~ end
+--~ return newlist
+--~ end
+
--~ function input.is_readable(name) -- brrr, get rid of this
--~ return name:find("^zip##") or file.is_readable(name)
--~ end
@@ -1269,24 +1483,51 @@ function input.suffixes_of_format(str)
end
end
-function input.aux.qualified_path(filename) -- make platform dependent / not good yet
- return
- filename:find("^%.+/") or
- filename:find("^/") or
- filename:find("^%a+%:") or
- filename:find("^%a+##")
-end
+--~ function input.aux.qualified_path(filename) -- make platform dependent / not good yet
+--~ return
+--~ filename:find("^%.+/") or
+--~ filename:find("^/") or
+--~ filename:find("^%a+%:") or
+--~ filename:find("^%a+##")
+--~ end
+
+--~ function input.normalize_name(original)
+--~ -- internally we use type##spec##subspec ; this hackery slightly slows down searching
+--~ local str = original or ""
+--~ str = str:gsub("::", "##") -- :: -> ##
+--~ str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
+--~ str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
+--~ if (input.trace>1) and (original ~= str) then
+--~ input.logger('= normalizer',original.." -> "..str)
+--~ end
+--~ return str
+--~ end
+
+do -- called about 700 times for an empty doc (font initializations etc)
+ -- i need to weed the font files for redundant calls
-function input.normalize_name(original)
- -- internally we use type##spec##subspec ; this hackery slightly slows down searching
- local str = original or ""
- str = str:gsub("::", "##") -- :: -> ##
- str = str:gsub("^(%a+)://" ,"%1##") -- zip:// -> zip##
- str = str:gsub("(.+)##(.+)##/(.+)","%1##%2##%3") -- ##/spec -> ##spec
- if (input.trace>1) and (original ~= str) then
- input.logger('= normalizer',original.." -> "..str)
+ local letter = lpeg.R("az","AZ")
+ local separator = lpeg.P("##")
+
+ local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator
+ local normalized = lpeg.Cs(
+ (letter^1*(lpeg.P("://")/"##") * (1-lpeg.P(false))^1) +
+ (lpeg.P("::")/"##" + (1-separator)^1*separator*(1-separator)^1*separator*(lpeg.P("/")/"") + 1)^0
+ )
+
+ -- ./name ../name /name c: zip## (todo: use url internally and get rid of ##)
+ function input.aux.qualified_path(filename)
+ return qualified:match(filename)
+ end
+
+ -- zip:// -> zip## ; :: -> ## ; aa##bb##/cc -> aa##bb##cc
+ function input.normalize_name(original)
+ local str = normalized:match(original or "")
+ if input.trace > 1 and original ~= str then
+ input.logger('= normalizer',original.." -> "..str)
+ end
+ return str
end
- return str
end
-- split the next one up, better for jit
@@ -1651,13 +1892,13 @@ function input.automount(instance)
end
function input.load(instance)
- input.start_timing(instance)
+ input.starttiming(instance)
input.identify_cnf(instance)
input.load_cnf(instance)
input.expand_variables(instance)
input.load_hash(instance)
input.automount(instance)
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
function input.for_files(instance, command, files, filetype, mustexist)
diff --git a/tex/context/base/luat-lib.tex b/tex/context/base/luat-lib.tex
index 84b5bcfff..d557ca4b6 100644
--- a/tex/context/base/luat-lib.tex
+++ b/tex/context/base/luat-lib.tex
@@ -35,6 +35,7 @@
% this will change once we have proper write overloads
\registerctxluafile{l-string} {1.001}
+\registerctxluafile{l-lpeg} {1.001}
\registerctxluafile{l-boolean}{1.001}
\registerctxluafile{l-number} {1.001}
\registerctxluafile{l-math} {1.001}
@@ -48,6 +49,7 @@
\registerctxluafile{l-unicode}{1.001}
\registerctxluafile{l-utils} {1.001}
\registerctxluafile{l-tex} {1.001}
+\registerctxluafile{l-url} {1.001}
\registerctxluafile{l-xml} {1.001}
%registerctxluafile{l-xmlctx} {1.001}
diff --git a/tex/context/base/luat-lmx.lua b/tex/context/base/luat-lmx.lua
index 672db3ca6..75a7098c1 100644
--- a/tex/context/base/luat-lmx.lua
+++ b/tex/context/base/luat-lmx.lua
@@ -32,7 +32,7 @@ end
lmx.converting = false
-function lmx.convert(template,result) -- use lpeg instead
+function lmx.convert(template,result) -- todo: use lpeg instead
if not lmx.converting then -- else, if error then again tex error and loop
local data = input.texdatablob(texmf.instance, template)
local f = false
@@ -46,7 +46,7 @@ function lmx.convert(template,result) -- use lpeg instead
return lmx.variables[str] or ""
end
function lmx.escape(str)
- return string.gsub(string.gsub(str,'&','&'),'[<>"]',lmx.escapes)
+ return string.gsub(str:gsub('&','&'),'[<>"]',lmx.escapes)
end
function lmx.type(str)
if str then lmx.print("" .. lmx.escape(str) .. "") end
@@ -57,18 +57,18 @@ function lmx.convert(template,result) -- use lpeg instead
function lmx.tv(str)
lmx.type(lmx.variable(str))
end
- data = string.gsub(data, "<%?lmx%-include%s+(.-)%s-%?>", function(filename)
+ data = data:gsub("<%?lmx%-include%s+(.-)%s-%?>", function(filename)
return lmx.loadedfile(filename)
end)
local definitions = { }
- data = string.gsub(data, "<%?lmx%-define%-begin%s+(%S-)%s-%?>(.-)<%?lmx%-define%-end%s-%?>", function(tag,content)
+ data = data:gsub("<%?lmx%-define%-begin%s+(%S-)%s-%?>(.-)<%?lmx%-define%-end%s-%?>", function(tag,content)
definitions[tag] = content
return ""
end)
- data = string.gsub(data, "<%?lmx%-resolve%s+(%S-)%s-%?>", function(tag)
+ data = data:gsub("<%?lmx%-resolve%s+(%S-)%s-%?>", function(tag)
return definitions[tag] or ""
end)
- data = string.gsub(data, "%c%s-(<%?lua .-%?>)%s-%c", function(lua)
+ data = data:gsub("%c%s-(<%?lua .-%?>)%s-%c", function(lua)
return "\n" .. lua .. " "
end)
data = string.gsub(data .. "","(.-)<%?lua%s+(.-)%?>", function(txt, lua)
diff --git a/tex/context/base/luat-log.lua b/tex/context/base/luat-log.lua
index faecf7e29..12cf45c48 100644
--- a/tex/context/base/luat-log.lua
+++ b/tex/context/base/luat-log.lua
@@ -45,7 +45,12 @@ logs.tex = logs.tex or { }
logs.level = 0
do
- local write_nl, write, format = texio.write_nl or print, texio.write or print, string.format
+ local write_nl, write, format = texio.write_nl or print, texio.write or io.write, string.format
+
+ if texlua then
+ write_nl = print
+ write = io.write
+ end
function logs.xml.debug(category,str)
if logs.level > 3 then write_nl(format("%s ",category,str)) end
diff --git a/tex/context/base/luat-tex.lua b/tex/context/base/luat-tex.lua
index 9127ab9da..591f3af20 100644
--- a/tex/context/base/luat-tex.lua
+++ b/tex/context/base/luat-tex.lua
@@ -92,18 +92,22 @@ if texconfig and not texlua then
else
input.logger('+ ' .. tag .. ' opener',filename)
-- todo: file;name -> freeze / eerste regel scannen -> freeze
+ local filters = input.filters
t = {
reader = function(self)
local line = file_handle:read()
if line == "" then
return ""
- elseif input.filters.utf_translator then
- return input.filters.utf_translator(line)
- elseif input.filters.dynamic_translator then
- return input.filters.dynamic_translator(line)
- else
- return line
end
+ local translator = filters.utf_translator
+ if translator then
+ return translator(line)
+ end
+ translator = filters.dynamic_translator
+ if translator then
+ return translator(line)
+ end
+ return line
end,
close = function()
input.logger('= ' .. tag .. ' closer',filename)
@@ -254,8 +258,8 @@ if texconfig and not texlua then
function input.register_start_actions(f) table.insert(input.start_actions, f) end
function input.register_stop_actions (f) table.insert(input.stop_actions, f) end
---~ callback.register('start_run', function() for _, a in pairs(input.start_actions) do a() end end)
---~ callback.register('stop_run' , function() for _, a in pairs(input.stop_actions ) do a() end end)
+ --~ callback.register('start_run', function() for _, a in pairs(input.start_actions) do a() end end)
+ --~ callback.register('stop_run' , function() for _, a in pairs(input.stop_actions ) do a() end end)
end
@@ -397,3 +401,100 @@ function cs.testcase(b)
tex.sprint(tex.texcatcodes, "\\secondoftwoarguments")
end
end
+
+-- This is not the most ideal place, but it will do. Maybe we need to move
+-- attributes to node-att.lua.
+
+if node then
+
+ nodes = nodes or { }
+
+ do
+
+ -- just for testing
+
+ local reserved = { }
+
+ function nodes.register(n)
+ reserved[#reserved+1] = n
+ end
+
+ function nodes.cleanup_reserved(nofboxes) -- todo
+ local nr, free = #reserved, node.free
+ for i=1,nr do
+ free(reserved[i])
+ end
+ local nl, tb, flush = 0, tex.box, node.flush_list
+ if nofboxes then
+ for i=1,nofboxes do
+ local l = tb[i]
+ if l then
+ flush(l)
+ tb[i] = nil
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ return nr, nl, nofboxes
+ end
+
+ -- nodes.register = function() end
+ -- nodes.cleanup_reserved = function() end
+
+ end
+
+ do
+
+ local pdfliteral = node.new("whatsit",8) pdfliteral.next, pdfliteral.prev = nil, nil pdfliteral.mode = 1
+ local disc = node.new("disc") disc.next, disc.prev = nil, nil
+ local kern = node.new("kern",1) kern.next, kern.prev = nil, nil
+ local penalty = node.new("penalty") penalty.next, penalty.prev = nil, nil
+ local glue = node.new("glue") glue.next, glue.prev = nil, nil
+ local glue_spec = node.new("glue_spec") glue_spec.next, glue_spec.prev = nil, nil
+
+ nodes.register(pdfliteral)
+ nodes.register(disc)
+ nodes.register(kern)
+ nodes.register(penalty)
+ nodes.register(glue)
+ nodes.register(glue_spec)
+
+ local copy = node.copy
+
+ function nodes.penalty(p)
+ local n = copy(penalty)
+ n.penalty = p
+ return n
+ end
+ function nodes.kern(k)
+ local n = copy(kern)
+ n.kern = k
+ return n
+ end
+ function nodes.glue(width,stretch,shrink)
+ local n = copy(glue)
+ local s = copy(glue_spec)
+ s.width, s.stretch, s.shrink = width, stretch, shrink
+ n.spec = s
+ return n
+ end
+ function nodes.glue_spec(width,stretch,shrink)
+ local s = copy(glue_spec)
+ s.width, s.stretch, s.shrink = width, stretch, shrink
+ return s
+ end
+
+ function nodes.disc()
+ return copy(disc)
+ end
+
+ function nodes.pdfliteral(str)
+ local t = copy(pdfliteral)
+ t.data = str
+ return t
+ end
+
+ end
+
+end
diff --git a/tex/context/base/luat-tmp.lua b/tex/context/base/luat-tmp.lua
index 58a195986..bd29f87f4 100644
--- a/tex/context/base/luat-tmp.lua
+++ b/tex/context/base/luat-tmp.lua
@@ -22,7 +22,7 @@ being written at the same time is small. We also need to extend
luatools with a recache feature.
--ldx]]--
-caches = caches or { }
+caches = caches or { }
dir = dir or { }
texmf = texmf or { }
@@ -34,9 +34,20 @@ caches.trace = false
caches.tree = false
caches.temp = caches.temp or os.getenv("TEXMFCACHE") or os.getenv("HOME") or os.getenv("HOMEPATH") or os.getenv("VARTEXMF") or os.getenv("TEXMFVAR") or os.getenv("TMP") or os.getenv("TEMP") or os.getenv("TMPDIR") or nil
caches.paths = caches.paths or { caches.temp }
+caches.force = false
+input.usecache = not toboolean(os.getenv("TEXMFSHARECACHE") or "false",true) -- true
+
+if caches.temp and caches.temp ~= "" and lfs.attributes(caches.temp,"mode") ~= "directory" then
+ if caches.force or io.ask(string.format("Should I create the cache path %s?",caches.temp), "no", { "yes", "no" }) == "yes" then
+ lfs.mkdirs(caches.temp)
+ end
+end
if not caches.temp or caches.temp == "" then
- print("\nFATAL ERROR: NO VALID TEMPORARY PATH\n")
+ print("\nfatal error: there is no valid cache path defined\n")
+ os.exit()
+elseif lfs.attributes(caches.temp,"mode") ~= "directory" then
+ print(string.format("\nfatal error: cache path %s is not a directory\n",caches.temp))
os.exit()
end
@@ -223,8 +234,6 @@ end
-- since we want to use the cache instead of the tree, we will now
-- reimplement the saver.
-input.usecache = true
-
function input.aux.save_data(instance, dataname, check)
for cachename, files in pairs(instance[dataname]) do
local name
@@ -420,8 +429,8 @@ end
function input.storage.dump()
for name, data in ipairs(input.storage.data) do
local evaluate, message, original, target = data[1], data[2], data[3] ,data[4]
- local name, initialize, finalize = nil, "", ""
- for str in string.gmatch(target,"([^%.]+)") do
+ local name, initialize, finalize, code = nil, "", "", ""
+ for str in target:gmatch("([^%.]+)") do
if name then
name = name .. "." .. str
else
@@ -435,15 +444,15 @@ function input.storage.dump()
input.storage.max = input.storage.max + 1
if input.storage.trace then
logs.report('storage',string.format('saving %s in slot %s',message,input.storage.max))
- lua.bytecode[input.storage.max] = loadstring(
+ code =
initialize ..
string.format("logs.report('storage','restoring %s from slot %s') ",message,input.storage.max) ..
table.serialize(original,name) ..
finalize
- )
else
- lua.bytecode[input.storage.max] = loadstring(initialize .. table.serialize(original,name) .. finalize)
+ code = initialize .. table.serialize(original,name) .. finalize
end
+ lua.bytecode[input.storage.max] = loadstring(code)
end
end
diff --git a/tex/context/base/luat-tra.lua b/tex/context/base/luat-tra.lua
index f5c077f41..7fc973195 100644
--- a/tex/context/base/luat-tra.lua
+++ b/tex/context/base/luat-tra.lua
@@ -20,8 +20,8 @@ do
if counters[f] == nil then
counters[f] = 1
--~ names[f] = debug.getinfo(2,"Sn")
---~ names[f] = debug.getinfo(2,"n")
- names[f] = debug.getinfo(f)
+ names[f] = debug.getinfo(2,"n")
+--~ names[f] = debug.getinfo(f)
else
counters[f] = counters[f] + 1
end
@@ -51,8 +51,11 @@ do
printer("\n") -- ugly but ok
for func, count in pairs(counters) do
if count > threshold then
- printer(string.format("%8i %s\n", count, getname(func)))
- total = total + count
+ local name = getname(func)
+ if name ~= "(for generator)" then
+ printer(string.format("%8i %s\n", count, getname(func)))
+ total = total + count
+ end
end
grandtotal = grandtotal + count
functions = functions + 1
@@ -78,7 +81,12 @@ do
end
function debugger.tracing()
- return tonumber((os.env['MTX.TRACE.CALLS'] or os.env['MTX_TRACE_CALLS'] or 0)) > 0
+ local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
+ if n > 0 then
+ function debugger.tracing() return true end ; return true
+ else
+ function debugger.tracing() return false end ; return false
+ end
end
end
diff --git a/tex/context/base/luat-zip.lua b/tex/context/base/luat-zip.lua
index fbf00a47e..c5a63aeed 100644
--- a/tex/context/base/luat-zip.lua
+++ b/tex/context/base/luat-zip.lua
@@ -164,7 +164,7 @@ else
function input.registerzipfile(instance,zipname,tag)
if not zip.registeredfiles[zipname] then
- input.start_timing(instance)
+ input.starttiming(instance)
local z = zip.open(zipname)
if not z then
zipname = input.find_file(instance,zipname)
@@ -177,7 +177,7 @@ else
else
input.logger("? zipfile","unknown "..zipname)
end
- input.stop_timing(instance)
+ input.stoptiming(instance)
end
end
diff --git a/tex/context/base/lxml-ini.lua b/tex/context/base/lxml-ini.lua
index 67e1bc75b..69d828d3d 100644
--- a/tex/context/base/lxml-ini.lua
+++ b/tex/context/base/lxml-ini.lua
@@ -113,8 +113,8 @@ do
function lxml.verbatim(id,before,after)
local root = lxml.id(id)
- if before then tex.sprint(tex.ctxcatcodes,string.format("%s[%s]",before,id.tg)) end
- xml.serialize(root,toverbatim,nil,nil,nil,true)
+ if before then tex.sprint(tex.ctxcatcodes,string.format("%s[%s]",before,root.tg)) end
+ xml.serialize(root.dt,toverbatim,nil,nil,nil,true) -- was root
if after then tex.sprint(tex.ctxcatcodes,after) end
end
function lxml.inlineverbatim(id)
@@ -136,19 +136,36 @@ function lxml.root(id)
return lxml.loaded[id]
end
+-- redefine xml load
+
+xml.originalload = xml.load
+
+function xml.load(filename)
+ input.starttiming(lxml)
+ local x = xml.originalload(filename)
+ input.stoptiming(lxml)
+ return x
+end
+
+function lxml.filename(filename) -- some day we will do this in input, first figure out /
+ return input.find_file(texmf.instance,url.filename(filename)) or ""
+end
+
function lxml.load(id,filename)
- input.start_timing(lxml)
if texmf then
- local fullname = input.find_file(texmf.instance,filename) or ""
+ local fullname = lxml.filename(filename)
if fullname ~= "" then
filename = fullname
end
end
lxml.loaded[id] = xml.load(filename)
- input.stop_timing(lxml)
return lxml.loaded[id], filename
end
+function lxml.include(id,pattern,attribute,recurse)
+ xml.include(lxml.id(id),pattern,attribute,recurse,lxml.filename)
+end
+
function lxml.utfize(id)
xml.utfize(lxml.id(id))
end
@@ -199,7 +216,8 @@ function lxml.index(id,pattern,i)
end
function lxml.attribute(id,pattern,a,default) --todo: snelle xmlatt
- tex.sprint((xml.filters.attribute(lxml.id(id),pattern,a)) or default or "")
+ local str = xml.filters.attribute(lxml.id(id),pattern,a) or ""
+ tex.sprint((str == "" and default) or str)
end
function lxml.count(id,pattern)
@@ -217,7 +235,8 @@ function lxml.tag(id)
tex.sprint(lxml.id(id).tg or "")
end
function lxml.namespace(id) -- or remapped name?
- tex.sprint(lxml.id(id).ns or "")
+ local root = lxml.id(id)
+ tex.sprint(root.rn or root.ns or "")
end
--~ function lxml.concat(id,what,separator,lastseparator)
@@ -264,6 +283,7 @@ end
lxml.trace_setups = false
function lxml.setsetup(id,pattern,setup)
+ local trace = lxml.trace_setups
if not setup or setup == "" or setup == "*" then
for rt, dt, dk in xml.elements(lxml.id(id),pattern) do
local dtdk = dt and dt[dk] or rt
@@ -273,13 +293,25 @@ function lxml.setsetup(id,pattern,setup)
else
dtdk.command = ns .. ":" .. tg
end
- if lxml.trace_setups then
- texio.write_nl(string.format("xml setup: namespace=%s, tag=%s, setup=%s",ns, tg, dtdk.command))
+ if trace then
+ texio.write_nl(string.format("lpath matched -> %s -> %s", dtdk.command, dtdk.command))
end
end
else
+ if trace then
+ texio.write_nl(string.format("lpath pattern -> %s -> %s", pattern, setup))
+ end
for rt, dt, dk in xml.elements(lxml.id(id),pattern) do
- ((dt and dt[dk]) or rt).command = setup
+ local dtdk = (dt and dt[dk]) or rt
+ dtdk.command = setup
+ if trace then
+ local ns, tg = dtdk.rn or dtdk.ns, dtdk.tg
+ if ns == "" then
+ texio.write_nl(string.format("lpath matched -> %s -> %s", tg, setup))
+ else
+ texio.write_nl(string.format("lpath matched -> %s:%s -> %s", ns, tg, setup))
+ end
+ end
end
end
end
@@ -312,7 +344,7 @@ do
traverse(root, lpath(pattern), function(r,d,k)
-- this can become pretty large
local n = #lxml.self + 1
- lxml.self[n] = d[k]
+ lxml.self[n] = (d and d[k]) or r
tex.sprint(tex.ctxcatcodes,string.format("\\xmlsetup{%s}{%s}",n,command))
end)
end
@@ -424,9 +456,9 @@ function xml.getbuffer(name) -- we need to make sure that commands are processed
end
function lxml.loadbuffer(id,name)
- input.start_timing(lxml)
+ input.starttiming(lxml)
lxml.loaded[id] = xml.convert(table.join(buffers.data[name or id] or {},""))
- input.stop_timing(lxml)
+ input.stoptiming(lxml)
return lxml.loaded[id], name or id
end
diff --git a/tex/context/base/lxml-ini.tex b/tex/context/base/lxml-ini.tex
index 0df2d302f..8f25a2d4a 100644
--- a/tex/context/base/lxml-ini.tex
+++ b/tex/context/base/lxml-ini.tex
@@ -17,38 +17,49 @@
\unprotect
-\def\xmlload #1#2{\ctxlua{lxml.load("#1","#2")}}
-\def\xmlloadbuffer #1#2{\ctxlua{lxml.loadbuffer("#1","#2")}}
-\def\xmlutfize #1{\ctxlua{lxml.utfize("#1")}}
-\def\xmlfirst #1#2{\ctxlua{lxml.first("#1","#2")}}
-\def\xmllast #1#2{\ctxlua{lxml.last("#1","#2")}}
-\def\xmlall #1#2{\ctxlua{lxml.all("#1","#2")}}
-\def\xmlfilter #1#2{\ctxlua{lxml.filter("#1","#2")}}
-\def\xmlcommand #1#2#3{\ctxlua{lxml.command("#1","#2","#3")}}
-\def\xmlnonspace #1#2{\ctxlua{lxml.nonspace("#1","#2")}}
-\def\xmltext #1#2{\ctxlua{lxml.text("#1","#2")}}
-\def\xmlcontent #1#2{\ctxlua{lxml.content("#1","#2")}}
-\def\xmlstripped #1#2{\ctxlua{lxml.stripped("#1","#2")}}
-\def\xmlstrip #1#2{\ctxlua{lxml.strip("#1","#2")}}
-\def\xmlflush #1{\ctxlua{lxml.flush("#1")}}
-\def\xmlindex #1#2#3{\ctxlua{lxml.index("#1","#2",\number#3)}}
-\def\xmlidx #1#2#3{\ctxlua{lxml.idx("#1","#2",\number#3)}}
-\def\xmlcount #1#2{\ctxlua{lxml.count("#1","#2")}}
-\def\xmlname #1{\ctxlua{lxml.name("#1")}}
-\def\xmlnamespace #1{\ctxlua{lxml.namespace("#1")}}
-\def\xmltag #1{\ctxlua{lxml.tag("#1")}}
-\def\xmlattribute #1#2#3{\ctxlua{lxml.attribute("#1","#2","#3")}}
-\def\xmlatt #1#2{\ctxlua{lxml.attribute("#1","/","#2")}}
-\def\xmlattdef #1#2#3{\ctxlua{lxml.attribute("#1","/","#2","#3")}}
-\def\xmlconcat #1#2#3{\ctxlua{lxml.concat("#1","#2",[[\detokenize{#3}]])}}
-\def\xmlsetsetup #1#2#3{\ctxlua{lxml.setsetup("#1","#2","#3")}}
-\def\xmlsetfunction#1#2#3{\ctxlua{lxml.setaction("#1","#2",#3)}}
-\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("#1")}}
-\def\xmldirectives #1{\ctxlua{lxml.directives.setups("#1")}}
-
-\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
+\def\xmlload #1#2{\ctxlua{lxml.load("#1","#2")}}
+\def\xmlloadbuffer #1#2{\ctxlua{lxml.loadbuffer("#1","#2")}}
+\def\xmlutfize #1{\ctxlua{lxml.utfize("#1")}}
+\def\xmlfirst #1#2{\ctxlua{lxml.first("#1","#2")}}
+\def\xmllast #1#2{\ctxlua{lxml.last("#1","#2")}}
+\def\xmlall #1#2{\ctxlua{lxml.all("#1","#2")}}
+\def\xmlfilter #1#2{\ctxlua{lxml.filter("#1","#2")}}
+\def\xmlcommand #1#2#3{\ctxlua{lxml.command("#1","#2","#3")}}
+\def\xmlnonspace #1#2{\ctxlua{lxml.nonspace("#1","#2")}}
+\def\xmltext #1#2{\ctxlua{lxml.text("#1","#2")}}
+\def\xmlcontent #1#2{\ctxlua{lxml.content("#1","#2")}}
+\def\xmlstripped #1#2{\ctxlua{lxml.stripped("#1","#2")}}
+\def\xmlstrip #1#2{\ctxlua{lxml.strip("#1","#2")}}
+\def\xmlflush #1{\ctxlua{lxml.flush("#1")}}
+\def\xmlindex #1#2#3{\ctxlua{lxml.index("#1","#2",\number#3)}}
+\def\xmlidx #1#2#3{\ctxlua{lxml.idx("#1","#2",\number#3)}}
+\def\xmlcount #1#2{\ctxlua{lxml.count("#1","#2")}}
+\def\xmlname #1{\ctxlua{lxml.name("#1")}}
+\def\xmlnamespace #1{\ctxlua{lxml.namespace("#1")}}
+\def\xmltag #1{\ctxlua{lxml.tag("#1")}}
+\def\xmlattribute #1#2#3{\ctxlua{lxml.attribute("#1","#2","#3")}}
+\def\xmlattributedef#1#2#3#4{\ctxlua{lxml.attribute("#1","#2","#3","#4")}}
+\def\xmlatt #1#2{\ctxlua{lxml.attribute("#1","/","#2")}}
+\def\xmlattdef #1#2#3{\ctxlua{lxml.attribute("#1","/","#2","#3")}}
+\def\xmlconcat #1#2#3{\ctxlua{lxml.concat("#1","#2",[[\detokenize{#3}]])}}
+\def\xmlsetsetup #1#2#3{\ctxlua{lxml.setsetup("#1","#2","#3")}}
+\def\xmlsetfunction #1#2#3{\ctxlua{lxml.setaction("#1","#2",#3)}}
+\def\xmlloaddirectives #1{\ctxlua{lxml.directives.load("#1")}}
+\def\xmldirectives #1{\ctxlua{lxml.directives.setups("#1")}}
+\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
\def\xmlchecknamespace#1#2#3{\ctxlua{xml.check_namespace(lxml.id("#1"),"#2","#3")}} % element
\def\xmlremapname #1#2#3#4{\ctxlua{xml.remapname(lxml.id("#1"),"#2","#3","#4")}} % element
+\def\xmlremapnamespace#1#2#3{\ctxlua{xml.rename_space(lxml.id("#1"),"#2","#3")}} % document
+\def\xmldelete #1#2{\ctxlua{xml.delete(lxml.id("#1"),"#2")}}
+\def\xmlinclude #1#2#3{\ctxlua{lxml.include("#1","#2","#3",true)}}
+\def\xmldoifelse #1#2{\ctxlua{cs.testcase(xml.found(lxml.id("#1"),"#2",false))}}
+\def\xmldoifelsetext #1#2{\ctxlua{cs.testcase(xml.found(lxml.id("#1"),"#2",true ))}}
+
+% \startxmlsetups xml:include
+% \xmlinclude{main}{include}{filename|href}
+% \stopxmlsetups
+%
+% \xmlprependsetup{xml:include}
\let\xmlgrab\xmlsetsetup
@@ -58,8 +69,10 @@
\newtoks \registeredxmlsetups
-\def\xmlregistersetup#1%
- {\appendtoksonce\directsetup{#1}\to\registeredxmlsetups}
+\def\xmlappendsetup #1{\appendtoksonce \directsetup{#1}\to\registeredxmlsetups} % to be done, made faster
+\def\xmlprependsetup#1{\prependtoksonce\directsetup{#1}\to\registeredxmlsetups} % to be done, made faster
+
+\let\xmlregistersetup\xmlappendsetup
\def\xmlregisteredsetups
{\the\registeredxmlsetups}
@@ -103,7 +116,7 @@
\def\xmlcdataobeyedline {\obeyedline}
\def\xmlcdataobeyedspace{\strut\obeyedspace}
\def\xmlcdatabefore {\bgroup\tt}
-\def\xmlcdataafter {\egroup x}
+\def\xmlcdataafter {\egroup}
% verbatim (dodo:pre/post whitespace, maybe splot verbatim and
% cdata commands), experimental:
@@ -116,20 +129,26 @@
% \def\startxmlinlineverbatim [#1]{}
% \def\stopxmlinlineverbatim {}
+% we use an xml: namespace so one has to define a suitable verbatim, say
+%
+% \definetyping[xml:verbatim][typing]
+%
+% this is experimental!
+
\def\startxmldisplayverbatim[#1]%
- {\begingroup
+ {\startpacked % \begingroup
\let\currenttypingclass\??tp
- \edef\currenttyping{#1}%
+ \edef\currenttyping{xml:#1}%
\def\stopxmldisplayverbatim
{\endofverbatimlines
- \endgroup}%
+ \stoppacked} % \endgroup
\mkinitializeverbatim
\beginofverbatimlines}
\def\startxmlinlineverbatim[#1]%
{\begingroup
\let\currenttypingclass\??ty
- \edef\currenttyping{#1}%
+ \edef\currenttyping{xml:#1}%
\let\stopxmldisplayverbatim\endgroup
\mkinitializeverbatim}
diff --git a/tex/context/base/math-ext.tex b/tex/context/base/math-ext.tex
index a5dcabd20..cf332ba00 100644
--- a/tex/context/base/math-ext.tex
+++ b/tex/context/base/math-ext.tex
@@ -10,7 +10,6 @@
%C This module is part of the \CONTEXT\ macro||package and is
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%M \input math-ext
\unprotect
@@ -252,10 +251,11 @@
%D \macros{mtharrowfill,defaultmtharrowfill}
%D
%D To extend the arrows we need to define a \quotation{math arrow
-%D fill}. This command takes 7 arguments: the first four correspond
+%D fill}. This command takes 8 arguments: the first four correspond
%D the second argument of \tex{definematharrow} explained above. The
-%D other three specify the tail, body and head of the arrow.
-%D \tex{defaultmtharrowfill} has values tweaked to match latin modern
+%D other three specify the tail, body and head of the arrow. The last
+%D argument specifies the math-mode in which the arrow is drawn.
+%D \tex{defaultmtharrowfill} has values tweaked to match Latin Modern
%D fonts. For fonts that are significantly different (e.g. cows) a
%D different set of values need to be determined.
@@ -270,11 +270,21 @@
\def\defaultmtharrowfill{\mtharrowfill 7227}
%D We now define some arrow fills that will be used for defining the
-%D arrows. \tex{leftarrowfill} and \tex{rightarrowfill} are redefined
-%D using \tex{defaultmtharrowfill}.
+%D arrows. Plain \TEX\ already defines \tex{leftarrowfill} and
+%D \tex{rightarrowfill}. The \tex{defaultmtharrowfill} command defines an
+%D arrowfill that takes an argument (so that it can also be used
+%D with over and under arrows). However the Plain \TEX\ definitions of
+%D \tex{leftarrowfill} and \tex{rightarrowfill} do not take this extra
+%D argument. To be backward compatible with Plain \TEX, we define two
+%D arrowfills: \tex{specrightarrowfill} which takes an extra argument, and
+%D \tex{rightarrowfill} which does not.
+
+\def\specrightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
+\def\specleftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
+
+\def\rightarrowfill {\specrightarrowfill \textstyle}
+\def\leftarrowfill {\specleftarrowfill \textstyle}
-\def\rightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
-\def\leftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
\def\equalfill {\defaultmtharrowfill \Relbar \Relbar \Relbar}
\def\Rightarrowfill {\defaultmtharrowfill \Relbar \Relbar \Rightarrow}
\def\Leftarrowfill {\defaultmtharrowfill \Leftarrow \Relbar \Relbar}
@@ -303,8 +313,8 @@
%D \filename{extpfel.sty} and \filename{mathtools.sty} packages for
%D \LATEX\ (plus a few more).
-\definematharrow [xrightarrow] [0359] [\rightarrowfill]
-\definematharrow [xleftarrow] [3095] [\leftarrowfill]
+\definematharrow [xrightarrow] [0359] [\specrightarrowfill]
+\definematharrow [xleftarrow] [3095] [\specleftarrowfill]
\definematharrow [xequal] [0099] [\equalfill]
\definematharrow [xRightarrow] [0359] [\Rightarrowfill]
\definematharrow [xLeftarrow] [3095] [\Leftarrowfill]
@@ -321,7 +331,7 @@
\definematharrow [xhookrightarrow] [0395] [\hookrightfill]
\definematharrow [xrel] [0099] [\relfill]
\definematharrow [xtriplerel] [0099] [\triplerelfill]
-\definematharrow [xrightoverleftarrow] [0359,3095] [\rightarrowfill,\leftarrowfill]
+\definematharrow [xrightoverleftarrow] [0359,3095] [\specrightarrowfill,\specleftarrowfill]
\definematharrow [xleftrightharpoons] [3399,3399] [\leftharpoonupfill,\rightharpoondownfill]
\definematharrow [xrightleftharpoons] [3399,3399] [\rightharpoonupfill,\leftharpoondownfill]
@@ -394,8 +404,8 @@
%D Now we define the arrows
-\definemathoverarrow [overleftarrow] [\leftarrowfill]
-\definemathoverarrow [overrightarrow] [\rightarrowfill]
+\definemathoverarrow [overleftarrow] [\specleftarrowfill]
+\definemathoverarrow [overrightarrow] [\specrightarrowfill]
\definemathoverarrow [overleftrightarrow] [\leftrightarrowfill]
\definemathoverarrow [overtwoheadrightarrow] [\twoheadrightarrowfill]
\definemathoverarrow [overtwoheadleftarrow] [\twoheadleftarrowfill]
@@ -404,8 +414,8 @@
\definemathoverarrow [overleftharpoondown] [1pt] [\leftharpoondownfill]
\definemathoverarrow [overleftharpoonup] [\leftharpoonupfill]
-\definemathunderarrow [underleftarrow] [\leftarrowfill]
-\definemathunderarrow [underrightarrow] [\rightarrowfill]
+\definemathunderarrow [underleftarrow] [\specleftarrowfill]
+\definemathunderarrow [underrightarrow] [\specrightarrowfill]
\definemathunderarrow [underleftrightarrow] [\leftrightarrowfill]
\definemathunderarrow [undertwoheadrightarrow][\twoheadrightarrowfill]
\definemathunderarrow [undertwoheadleftarrow] [\twoheadleftarrowfill]
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 5d9c43231..fc9ac004e 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -8,6 +8,8 @@
-- version 2 gsubbed the file into TeX code, and version 3 uses
-- the new lpeg functionality and streams the result into TeX.
+-- We will move old stuff to edu.
+
--~ old lpeg 0.4 lpeg 0.5
--~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19)
--~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85)
@@ -46,7 +48,7 @@ function mptopdf.parse()
mptopdf.parsers[mptopdf.parser]()
end
--- shared code
+-- old code
mptopdf.steps = { }
@@ -63,8 +65,6 @@ function mptopdf.descape(str)
return str:gsub("\\([%(%)\\])",mptopdf.descapes)
end
--- old code
-
function mptopdf.steps.descape(str)
str = str:gsub("\\(%d%d%d)",function(n)
return "\\\\char" .. tonumber(n,8) .. " "
@@ -217,10 +217,10 @@ end
function mptopdf.convertmpstopdf(name)
if mptopdf.loaded(name) then
garbagecollector.push()
- input.start_timing(mptopdf)
+ input.starttiming(mptopdf)
mptopdf.parse()
mptopdf.reset()
- input.stop_timing(mptopdf)
+ input.stoptiming(mptopdf)
garbagecollector.pop()
else
tex.print("file " .. name .. " not found")
@@ -342,18 +342,6 @@ function mp.textext(font, scale, str) -- old parser
mptopdf.resetpath()
end
-function mp.fshow(str,font,scale) -- lpeg parser
- mp.textext(font,scale,mptopdf.descape(str))
---~ local dx, dy = 0, 0
---~ if #mptopdf.stack.path > 0 then
---~ dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2]
---~ end
---~ mptopdf.flushconcat()
---~ mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..mptopdf.descape(str).."}{"..dx.."}{"..dy.."}")
---~ mptopdf.resetpath()
-end
-
-
--~ function mp.handletext(font,scale.str,dx,dy)
--~ local one, two = string.match(str, "^(%d+)::::(%d+)")
--~ if one and two then
@@ -473,6 +461,24 @@ end
-- that MetaPost produces. It's my first real lpeg code, which may
-- show. Because the parser binds to functions, we define it last.
+do -- assumes \let\c\char
+
+ local byte = string.byte
+ local digit = lpeg.R("09")
+ local spec = digit^2 * lpeg.P("::::") * digit^2
+ local text = lpeg.Cc("{") * (
+ lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
+ lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp
+ lpeg.P(1) / function(n) return "\\c" .. byte(n) end
+ ) * lpeg.Cc("}")
+ local package = lpeg.Cs(spec + text^0)
+
+ function mp.fshow(str,font,scale) -- lpeg parser
+ mp.textext(font,scale,package:match(str))
+ end
+
+end
+
do
local eol = lpeg.S('\r\n')^1
@@ -517,8 +523,10 @@ do
local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mp.concat
local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mp.concat
- local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * lpeg.C(lpeg.P((1-space)^1)) * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
+ local fshow = (lpeg.P("(") *
+ lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 )
+ * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mp.fshow
local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mp.setlinewidth
local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mp.setlinewidth
@@ -560,7 +568,6 @@ do
local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mp.attribute
local A = ((cnumber * sp)^2 * lpeg.P("A")) / mp.attribute
-
local preamble = (
prolog + setup +
boundingbox + highresboundingbox + specials + special +
@@ -570,7 +577,7 @@ do
local procset = (
lj + ml + lc +
c + l + m + n + p + r +
-A +
+ A +
R + C + G +
S + F + B + W +
vlw + hlw +
@@ -584,7 +591,7 @@ A +
local verbose = (
curveto + lineto + moveto + newpath + closepath + rlineto +
setrgbcolor + setcmykcolor + setgray +
-attribute +
+ attribute +
setlinejoin + setmiterlimit + setlinecap +
stroke + fill + clip + both +
setlinewidth_x + setlinewidth_y +
diff --git a/tex/context/base/meta-pdf.mkii b/tex/context/base/meta-pdf.mkii
index 27774b34d..97aedf97e 100644
--- a/tex/context/base/meta-pdf.mkii
+++ b/tex/context/base/meta-pdf.mkii
@@ -639,8 +639,15 @@
%D finally I saw the light. It proved that we also had to
%D take care of \type{(split arguments)}.
+% \startMPcode
+% draw btex Ga toch effe f\kern0ptietsen?{}` etex ;
+% \stopMPcode
+
+\newtoks \everyMPshowfont
+
\def\setMPfshowfont#1#2%
- {\font\temp=#1\space at #2\relax\temp}
+ {\font\temp=#1\space at #2\relax\temp
+ \the\everyMPshowfont}
\let\MPfshowcommand\empty
@@ -648,7 +655,7 @@
{\setbox\scratchbox\hbox
{\obeyMPspecials
\edef\MPtextsize{\gMPa\nofMParguments}%
- \def\do(##1){##1}%
+ \def\do(##1){##1}% only works in latest mp
\edef\MPtextdata{\dogMPa1}% beware, stack can have more
\handleMPtext}%
\setbox\scratchbox\hbox
@@ -672,6 +679,29 @@
% elsewhere we will implement \handleMPtextmptxt
+\def\doflushMPtext#1%
+ {\edef\!!stringa{#1}%
+ \@EA\dodoflushMPtext\!!stringa\relax}
+
+\def\dodoflushMPtext
+ {\afterassignment\dododoflushMPtext\let\nexttoken=}
+
+\def\dododoflushMPtext
+ {\ifx\nexttoken\relax
+ % done
+ \else\ifx\nexttoken\char
+ \@EA\@EA\@EA\dodododoflushMPtext
+ \else
+ {\nexttoken}%
+ \@EA\@EA\@EA\dodoflushMPtext
+ \fi\fi}
+
+\def\dodododoflushMPtext
+ {\afterassignment\dododododoflushMPtext\scratchcounter}
+
+\def\dododododoflushMPtext
+ {{\char\scratchcounter}\let\next\dodoflushMPtext}
+
\def\handleMPtextnormal
{\let\ \relax % mp breaks long lines and appends a \
\ifx\MPtextsize\PSnfont % round font size (to pt)
@@ -694,8 +724,8 @@
\MPfshowcommand
{\ifnum\nofMParguments=\plusone
\def\do(##1){##1}%
- \dogMPa1%
- \else
+ \doflushMPtext{\dogMPa1}%
+ \else % can't happen anymore in mp version 1+
% we need to catch ( a ) (a a a) (\123 \123 \123) etc
\scratchcounter\plusone
\def\dodo##1% Andreas Fieger's bug: (\304...)
@@ -862,7 +892,7 @@
\or
\ifx\lastMPmoveX\empty \else % we assume 0,0 rlineto
\flushMPmoveto
- \PDFcode{\!MP\lastMPmoveX\space \!MP\lastMPmoveY\space l S}%
+ \PDFcode{\!MP\lastMPmoveX\space \!MP\lastMPmoveY\space l}%
\resetMPmoveto
\fi
\or
diff --git a/tex/context/base/meta-pdf.mkiv b/tex/context/base/meta-pdf.mkiv
index d10734547..e8ce94146 100644
--- a/tex/context/base/meta-pdf.mkiv
+++ b/tex/context/base/meta-pdf.mkiv
@@ -28,7 +28,7 @@
%D Plugin.
-\def\mkconvertMPtoPDF
+\def\mkconvertMPtoPDF % watch the transparency reset
{\vbox\bgroup
\forgetall
\offinterlineskip
@@ -40,6 +40,7 @@
\PDFcomment{mps begin}%
\PDFcode{q 1 0 0 1 0 0 cm}%
\ctxlua{mptopdf.convertmpstopdf("\MPfilename")}\removeunwantedspaces
+ \dohandleMPresettransparency % a bit messy here, should be a toks
\PDFcode{Q}%
\PDFcomment{mps end}%
\stopMPresources
@@ -86,6 +87,7 @@
\executeifdefined{handleMPtext\MPtexttag}
{\setbox\scratchbox\hbox
{\font\temp=#1\space at #2\onebasepoint
+ \let\c\char
\temp
\MPfshowcommand{#3}}%
\setbox\scratchbox\hbox
@@ -113,11 +115,11 @@
%D The boundingbox.
\def\MPSboundingbox#1#2#3#4%
- {\xdef\MPllx{#1}
- \xdef\MPlly{#2}
- \xdef\MPurx{#3}
- \xdef\MPury{#4}
- \xdef\MPwidth {\the\dimexpr#3\onebasepoint-#1\onebasepoint\relax}
+ {\xdef\MPllx{#1}%
+ \xdef\MPlly{#2}%
+ \xdef\MPurx{#3}%
+ \xdef\MPury{#4}%
+ \xdef\MPwidth {\the\dimexpr#3\onebasepoint-#1\onebasepoint\relax}%
\xdef\MPheight{\the\dimexpr#4\onebasepoint-#2\onebasepoint\relax}}
\MPSboundingbox0000
diff --git a/tex/context/base/meta-pdf.tex b/tex/context/base/meta-pdf.tex
index 4dec40e70..d5b7f202f 100644
--- a/tex/context/base/meta-pdf.tex
+++ b/tex/context/base/meta-pdf.tex
@@ -13,6 +13,8 @@
%D Formerly known as supp-pdf.tex and supp-mpe.tex.
+%D We will clean up the color mess later.
+
%D These macros are written as generic as possible. Some
%D general support macro's are loaded from a small module
%D especially made for non \CONTEXT\ use. In this module I
@@ -968,19 +970,27 @@
\ifx\colorversion\undefined \else \ifnum\colorversion>\plusone
- \def\dohandleMPgraycolor #1{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)}}
- \def\dohandleMPrgbcolor #1#2#3{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}}
- \def\dohandleMPcmykcolor#1#2#3#4{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}}
- \def\dohandleMPspotcolor#1#2#3#4{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}}
-
- \def\dohandleMPgraytransparency #1#2#3{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)}%
- \ctxlua{ctx.pdftransparencyliteral(#2,#3)}}
- \def\dohandleMPrgbtransparency #1#2#3#4#5{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}%
- \ctxlua{ctx.pdftransparencyliteral(#4,#5)}}
- \def\dohandleMPcmyktransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}%
- \ctxlua{ctx.pdftransparencyliteral(#5,#6)}}
- \def\dohandleMPspottransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}%
- \ctxlua{ctx.pdftransparencyliteral(#5,#6)}}
+ \def\dohandleMPgraycolor #1{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfgrayliteral(\the\currentcolormodel,#1)}}
+ \def\dohandleMPrgbcolor #1#2#3{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)}}
+ \def\dohandleMPcmykcolor#1#2#3#4{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)}}
+ \def\dohandleMPspotcolor#1#2#3#4{\ctxlua{ctx.pdffinishtransparency()
+ ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")}}
+
+ % we can combine the next calls
+
+ \def\dohandleMPgraytransparency #1#2#3{\ctxlua{ctx.pdfgrayliteral(\the\currentcolormodel,#1)
+ ctx.pdftransparencyliteral(#2,#3)}}
+ \def\dohandleMPrgbtransparency #1#2#3#4#5{\ctxlua{ctx.pdfrgbliteral (\the\currentcolormodel,#1,#2,#3)
+ ctx.pdftransparencyliteral(#4,#5)}}
+ \def\dohandleMPcmyktransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfcmykliteral(\the\currentcolormodel,#1,#2,#3,#4)
+ ctx.pdftransparencyliteral(#5,#6)}}
+ \def\dohandleMPspottransparency#1#2#3#4#5#6{\ctxlua{ctx.pdfspotliteral(\the\currentcolormodel,"#1",#2,"#3","#4")
+ ctx.pdftransparencyliteral(#5,#6)}}
+
+ \def\dohandleMPresettransparency {\ctxlua{ctx.pdffinishtransparency()}}
\def\resolveMPgraycolor #1\to#2{\ctxlua{ctx.resolvempgraycolor("\strippedcsname#2","MPresolvedspace",\number\currentcolormodel,#1)}}
\def\resolveMPrgbcolor #1#2#3\to#4{\ctxlua{ctx.resolvemprgbcolor ("\strippedcsname#4","MPresolvedspace",\number\currentcolormodel,#1,#2,#3)}}
diff --git a/tex/context/base/mult-con.tex b/tex/context/base/mult-con.tex
index 22103e615..7a37869a9 100644
--- a/tex/context/base/mult-con.tex
+++ b/tex/context/base/mult-con.tex
@@ -502,6 +502,10 @@ subsubsubsubsubsubject: subsubsubsubsubonderwerp subsubsubsubsub
zitat citace
citazione citat
citation
+ blockquote: blokcitaat blockquote
+ blockquote blockquote
+ blockquote blockquote
+ blockquote
quote: citeer quote
zitieren citovat
menzione minicitat
@@ -1801,6 +1805,10 @@ numberwidth: nummerbreedte numberwidth
ausrichtung zarovnani
allinea aliniere
alignement
+ symalign: symuitlijnen symalign
+ symausrichtung symzarovnani
+ symallinea symaliniere
+ symalignement
urlspace: urlspatie urlspace
urlspatium prostorurl
spaziourl spatiuurl
diff --git a/tex/context/base/mult-sys.tex b/tex/context/base/mult-sys.tex
index f4f8aa5a7..69381aab3 100644
--- a/tex/context/base/mult-sys.tex
+++ b/tex/context/base/mult-sys.tex
@@ -574,6 +574,7 @@
\definesystemvariable {vn} % VoetNoten
\definesystemvariable {vt} % VerTical
\definesystemvariable {wr} % WitRuimte
+\definesystemvariable {wl} % WordList
\definesystemvariable {xf} % XML File
\definesystemvariable {xp} % XML Processing
\definesystemvariable {xy} % schaal
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index af30b3940..d2cfc17e0 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -8,14 +8,63 @@ if not modules then modules = { } end modules ['node-ini'] = {
--[[ldx--
Access to nodes is what gives its power. Here we
-implement a few helper functions.
+implement a few helper functions. These functions are rather optimized.
--ldx]]--
-nodes = nodes or { }
-nodes.trace = false
+nodes = nodes or { }
+nodes.trace = false
+nodes.ignore = nodes.ignore or false
-- handy helpers
+if node.protect_glyphs then
+
+ nodes.protect_glyphs = node.protect_glyphs
+ nodes.unprotect_glyphs = node.unprotect_glyphs
+
+else do
+
+ -- initial value subtype : X000 0001 = 1 = 0x01 = char
+ --
+ -- expected before linebreak : X000 0000 = 0 = 0x00 = glyph
+ -- X000 0010 = 2 = 0x02 = ligature
+ -- X000 0100 = 4 = 0x04 = ghost
+ -- X000 1010 = 10 = 0x0A = leftboundary lig
+ -- X001 0010 = 18 = 0x12 = rightboundary lig
+ -- X001 1010 = 26 = 0x1A = both boundaries lig
+ -- X000 1100 = 12 = 0x1C = leftghost
+ -- X001 0100 = 20 = 0x14 = rightghost
+
+
+ local glyph = node.id('glyph')
+ local traverse_id = node.traverse_id
+
+ function nodes.protect_glyphs(head)
+ local done = false
+ for g in traverse_id(glyph,head) do
+ local s = g.subtype
+ if s == 1 then
+ done, g.subtype = true, 256
+ elseif s <= 256 then
+ done, g.subtype = true, 256 + s
+ end
+ end
+ return done
+ end
+
+ function nodes.unprotect_glyphs(head)
+ local done = false
+ for g in traverse_id(glyph,head) do
+ local s = g.subtype
+ if s > 256 then
+ done, g.subtype = true, s - 256
+ end
+ end
+ return done
+ end
+
+end end
+
do
local remove, free = node.remove, node.free
@@ -76,44 +125,18 @@ function nodes.report(t,done)
end
end
---~ function nodes.count(stack)
---~ if stack then
---~ local n = 0
---~ for _, node in pairs(stack) do
---~ if node then
---~ local kind = node[1]
---~ if kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ n = n + 1 + nodes.count(content) -- self counts too
---~ else
---~ n = n + 1
---~ end
---~ elseif kind == 'inline' then
---~ n = n + nodes.count(node[4]) -- self does not count
---~ else
---~ n = n + 1
---~ end
---~ end
---~ end
---~ return n
---~ else
---~ return 0
---~ end
---~ end
-
do
local hlist, vlist = node.id('hlist'), node.id('vlist')
- function nodes.count(stack)
+ local function count(stack,flat)
local n = 0
while stack do
local id = stack.id
- if id == hlist or id == vlist then
+ if not flat and id == hlist or id == vlist then
local list = stack.list
if list then
- n = n + 1 + nodes.count(list) -- self counts too
+ n = n + 1 + count(list) -- self counts too
else
n = n + 1
end
@@ -125,6 +148,8 @@ do
return n
end
+ nodes.count = count
+
end
--[[ldx--
@@ -147,95 +172,14 @@ original table is used.
Insertion is handled (at least in as follows. When
we need to insert a node at a certain position, we change the node at
that position by a dummy node, tagged inline which itself
-contains the original node and one or more new nodes. Before we pass
+has_attribute the original node and one or more new nodes. Before we pass
back the list we collapse the list. Of course collapsing could be built
into the engine, but this is a not so natural extension.
When we collapse (something that we only do when really needed), we
-also ignore the empty nodes.
+also ignore the empty nodes. [This is obsolete!]
--ldx]]--
---~ function nodes.inline(...)
---~ return { 'inline', 0, nil, { ... } }
---~ end
-
---~ do
-
---~ function collapse(stack,existing_t)
---~ if stack then
---~ local t = existing_t or { }
---~ for _, node in pairs(stack) do
---~ if node then
---~ -- if node[3] then node[3][1] = nil end -- remove status bit
---~ local kind = node[1]
---~ if kind == 'inline' then
---~ collapse(node[4],t)
---~ elseif kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ node[8] = collapse(content)
---~ end
---~ t[#t+1] = node
---~ else
---~ t[#t+1] = node
---~ end
---~ else
---~ -- deleted node
---~ end
---~ end
---~ return t
---~ else
---~ return stack
---~ end
---~ end
-
---~ nodes.collapse = collapse
-
---~ end
-
---[[ldx--
-The following function implements a generic node processor. A
-generic processer is not that much needed, because we often need
-to act differently for horizontal or vertical lists. For instance
-counting nodes needs a different method (ok, we could add a second
-handle for catching them but it would become messy then).
---ldx]]--
-
---~ function nodes.each(stack,handle)
---~ if stack then
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node then
---~ local kind = node[1]
---~ if kind == 'hlist' or kind == 'vlist' then
---~ local content = node[8]
---~ if type(content) == "table" then
---~ nodes.each(content,handle)
---~ end
---~ elseif kind == 'inline' then
---~ nodes.each(node[4],handle)
---~ else
---~ stack[i] = handle(kind,node)
---~ end
---~ end
---~ i = i + 1
---~ if i > #stack then
---~ break
---~ end
---~ end
---~ end
---~ end
-
---~ function nodes.remove(stack,id,subid) -- "whatsit", 6
---~ nodes.each(stack, function(kind,node)
---~ if kind == id and node[2] == subid then
---~ return false
---~ else
---~ return node
---~ end
---~ end)
---~ end
--[[ldx--
Serializing nodes can be handy for tracing. Also, saving and
@@ -298,64 +242,100 @@ if not fonts.tfm.id then fonts.tfm.id = { } end
do
- local glyph, hlist, vlist = node.id('glyph'), node.id('hlist'), node.id('vlist')
- local pushmarks = false
+ local glyph = node.id('glyph')
+ local has_attribute = node.has_attribute
+ local traverse_id = node.traverse_id
- function nodes.process_glyphs(head)
+ local pairs = pairs
+
+ local starttiming, stoptiming = input.starttiming, input.stoptiming
+
+ function nodes.process_characters(head)
if status.output_active then -- not ok, we need a generic blocker, pagebody ! / attr tex.attibutes
- -- 25% calls
- return true
- elseif not head then
- -- 25% calls
- return true
- elseif not head.next and (head.id == hlist or head.id == vlist) then
- return head
+ return head, false -- true
else
-- either next or not, but definitely no already processed list
- input.start_timing(nodes)
- local usedfonts, found, fontdata, done = { }, false, fonts.tfm.id, false
- for n in node.traverse_id(glyph,head) do
- local font = n.font
- if not usedfonts[font] then
- local shared = fontdata[font].shared
- if shared and shared.processors then
- usedfonts[font], found = shared.processors, true
+ starttiming(nodes)
+ local usedfonts, attrfonts, done = { }, { }, false
+ -- todo: should be independent of otf
+ local set_dynamics, font_ids = fonts.otf.set_dynamics, fonts.tfm.id -- todo: font-var.lua so that we can global this one
+ local a, u, prevfont, prevattr = 0, 0, nil, 0
+ for n in traverse_id(glyph,head) do
+ local font, attr = n.font, has_attribute(n,0) -- zero attribute is reserved for fonts, preset to 0 is faster (first match)
+ if attr and attr > 0 then
+ if font ~= prevfont or attr ~= prevattr then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local d = set_dynamics(font_ids[font],attr) -- todo, script, language -> n.language also axis
+ if d then
+ used[attr] = d
+ a = a + 1
+ end
+ end
+ prevfont, prevattr = font, attr
+ end
+ elseif font ~= prevfont then
+ prevfont, prevattr = font, 0
+ local used = usedfonts[font]
+ if not used then
+ local data = font_ids[font]
+ if data then
+ local shared = data.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processors
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ u = u + 1
+ end
+ end
+ else
+ -- probably nullfont
+ end
end
- end
- end
- if found then
- local tail = head
- if head.next then
- tail = node.slide(head)
else
- head.prev = nil
+ prevattr = attr
end
+ end
+ -- we could combine these and just make the attribute nil
+ if u > 0 then
for font, processors in pairs(usedfonts) do
- if pushmarks then
- local h, d = fonts.pushmarks(head,font)
- head, done = head or h, done or d
- end
- for _, processor in ipairs(processors) do
- local h, d = processor(head,font)
- head, done = head or h, done or d
+ local n = #processors
+ if n == 1 then
+ local h, d = processors[1](head,font,false)
+ head, done = h or head, done or d
+ else
+ for i=1,#processors do
+ local h, d = processors[i](head,font,false)
+ head, done = h or head, done or d
+ end
end
- if pushmarks then
- local h, d = fonts.popmarks(head,font)
- head, done = head or h, done or d
+ end
+ end
+ if a > 0 then -- we need to get rid of a loop here
+ for font, dynamics in pairs(attrfonts) do
+ for attribute, processors in pairs(dynamics) do -- attr can switch in between
+ local n = #processors
+ if n == 1 then
+ local h, d = processors[1](head,font,attribute)
+ head, done = h or head, done or d
+ else
+ for i=1,n do
+ local h, d = processors[i](head,font,attribute)
+ head, done = h or head, done or d
+ end
+ end
end
end
end
- input.stop_timing(nodes)
+ stoptiming(nodes)
if nodes.trace then
nodes.report(head,done)
end
- if done then
- return head -- something changed
- elseif head then
- return true -- nothing changed
- else
- return false -- delete list
- end
+ return head, true
end
end
@@ -366,9 +346,9 @@ end
do
- local contains, set, attribute = node.has_attribute, node.set_attribute, tex.attribute
+ local has_attribute, set, attribute = node.has_attribute, node.set_attribute, tex.attribute
- function nodes.inherit_attributes(n)
+ function nodes.inherit_attributes(n) -- still ok ?
if n then
local i = 1
while true do
@@ -376,7 +356,7 @@ do
if a < 0 then
break
else
- local ai = contains(n,i)
+ local ai = has_attribute(n,i)
if not ai then
set(n,i,a)
end
@@ -400,54 +380,132 @@ function nodes.length(head)
end
end
-nodes.processors.actions = nodes.processors.actions or { }
+--~ nodes.processors.actions = nodes.processors.actions or { }
-function nodes.processors.action(head)
- if head then
- node.slide(head)
- local actions, done = nodes.processors.actions, false
- for i=1,#actions do
- local action = actions[i]
- if action then
- local h, ok = action(head)
- if ok then
- head = h
- end
- done = done or ok
- end
- end
- if done then
- return head
- else
- return true
- end
- else
- return head
- end
-end
+--~ function nodes.processors.action(head)
+--~ if head then
+--~ node.slide(head)
+--~ local done = false
+--~ local actions = nodes.processors.actions
+--~ for i=1,#actions do
+--~ local h, ok = actions[i](head)
+--~ if ok then
+--~ head, done = h, true
+--~ end
+--~ end
+--~ if done then
+--~ return head
+--~ else
+--~ return true
+--~ end
+--~ else
+--~ return head
+--~ end
+--~ end
lists = lists or { }
lists.plugins = lists.plugins or { }
-function nodes.process_lists(head)
- return nodes.process_attributes(head,lists.plugins)
-end
-
chars = chars or { }
chars.plugins = chars.plugins or { }
-function nodes.process_chars(head)
- return nodes.process_attributes(head,chars.plugins)
+--~ words = words or { }
+--~ words.plugins = words.plugins or { }
+
+callbacks.trace = false
+
+do
+
+ kernel = kernel or { }
+
+ local starttiming, stoptiming = input.starttiming, input.stoptiming
+ local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
+
+ function kernel.hyphenation(head,tail) -- lang.hyphenate returns done
+ starttiming(kernel)
+ local done = hyphenate(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+ function kernel.ligaturing(head,tail) -- node.ligaturing returns head,tail,done
+ starttiming(kernel)
+ local head, tail, done = ligaturing(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+ function kernel.kerning(head,tail) -- node.kerning returns head,tail,done
+ starttiming(kernel)
+ local head, tail, done = kerning(head,tail)
+ stoptiming(kernel)
+ return head, tail, done
+ end
+
end
-nodes.processors.actions = { -- for the moment here, will change
- nodes.process_chars, -- attribute driven
- nodes.process_glyphs, -- font driven
- nodes.process_lists, -- attribute driven
-}
+callback.register('hyphenate' , function(head,tail) return tail end)
+callback.register('ligaturing', function(head,tail) return tail end)
+callback.register('kerning' , function(head,tail) return tail end)
-callback.register('pre_linebreak_filter', nodes.processors.action)
-callback.register('hpack_filter', nodes.processors.action)
+-- used to be loop, this is faster, called often; todo: shift up tail or even better,
+-- handle tail everywhere; for the moment we're safe
+
+do
+
+ local charplugins, listplugins = chars.plugins, lists.plugins
+
+ nodes.processors.actions = function(head,tail) -- removed: if head ... end
+ local ok, done = false, false
+ head, ok = nodes.process_attributes(head,charplugins) ; done = done or ok -- attribute driven
+ head, tail, ok = kernel.hyphenation (head,tail) ; done = done or ok -- language driven
+ head, ok = languages.words.check (head,tail) ; done = done or ok -- language driven
+ head, ok = nodes.process_characters(head) ; done = done or ok -- font driven
+ ok = nodes.protect_glyphs (head) ; done = done or ok -- turn chars into glyphs
+ head, tail, ok = kernel.ligaturing (head,tail) ; done = done or ok -- normal ligaturing routine / needed for base mode
+ head, tail, ok = kernel.kerning (head,tail) ; done = done or ok -- normal kerning routine / needed for base mode
+ head, ok = nodes.process_attributes(head,listplugins) ; done = done or ok -- attribute driven
+ return head, done
+ end
+
+end
+
+do
+
+ local actions = nodes.processors.actions
+ local first_character = node.first_character
+ local slide = node.slide
+
+ local function tracer(what,state,head,groupcode,glyphcount)
+ texio.write_nl(string.format("%s %s: group: %s, nodes: %s",
+ (state and "Y") or "N", what, groupcode or "?", nodes.count(head,true)))
+ end
+
+ function nodes.processors.pre_linebreak_filter(head,groupcode) -- todo: tail
+ local first, found = first_character(head)
+ if found then
+ if callbacks.trace then tracer("pre_linebreak",true,head,groupcode) end
+ local head, done = actions(head,slide(head))
+ return (done and head) or true
+ else
+ if callbacks.trace then tracer("pre_linebreak",false,head,groupcode) end
+ return true
+ end
+ end
+
+ function nodes.processors.hpack_filter(head,groupcode) -- todo: tail
+ local first, found = first_character(head)
+ if found then
+ if callbacks.trace then tracer("hpack",true,head,groupcode) end
+ local head, done = actions(head,slide(head))
+ return (done and head) or true
+ end
+ if callbacks.trace then tracer("hpack",false,head,groupcode) end
+ return true
+ end
+
+end
+
+callback.register('pre_linebreak_filter', nodes.processors.pre_linebreak_filter)
+callback.register('hpack_filter' , nodes.processors.hpack_filter)
do
@@ -462,16 +520,40 @@ do
-- flat: don't use next, but indexes
-- verbose: also add type
+ -- can be sped up
+
+ function nodes.astable(n,sparse)
+ local f, t = node.fields(n.id,n.subtype), { }
+ for i=1,#f do
+ local v = f[i]
+ local d = n[v]
+ if d then
+ if v == "ref_count" or v == "id" then
+ -- skip
+ elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
+ t[v] = "pointer to list"
+ elseif sparse then
+ if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then
+ t[v] = d
+ end
+ else
+ t[v] = d
+ end
+ end
+ end
+ t.type = node.type(n.id)
+ return t
+ end
function nodes.totable(n,flat,verbose)
- local function totable(n,verbose)
+ local function totable(n)
local f = node.fields(n.id,n.subtype)
local tt = { }
for _,v in ipairs(f) do
if n[v] then
if v == "ref_count" then
-- skip
- elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number"
+ elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
tt[v] = nodes.totable(n[v],flat,verbose)
else
tt[v] = n[v]
@@ -487,12 +569,12 @@ do
if flat then
local t = { }
while n do
- t[#t+1] = totable(n,verbose)
+ t[#t+1] = totable(n)
n = n.next
end
return t
else
- local t = totable(n,verbose)
+ local t = totable(n)
if n.next then
t.next = nodes.totable(n.next,flat,verbose)
end
@@ -504,11 +586,7 @@ do
end
local function key(k)
- if type(k) == "number" then
- return "["..k.."]"
- else
- return k
- end
+ return ((type(k) == "number") and "["..k.."]") or k
end
local function serialize(root,name,handle,depth,m)
@@ -518,13 +596,14 @@ do
handle(("%s%s={"):format(depth,key(name)))
else
depth = ""
- if type(name) == "string" then
+ local tname = type(name)
+ if tname == "string" then
if name == "return" then
handle("return {")
else
handle(name .. "={")
end
- elseif type(name) == "number" then
+ elseif tname == "number"then
handle("[" .. name .. "]={")
else
handle("t={")
@@ -533,7 +612,7 @@ do
if root then
local fld
if root.id then
- fld = node.fields(root.id,root.subtype)
+ fld = node.fields(root.id,root.subtype) -- we can cache these (todo)
else
fld = table.sortedkeys(root)
end
@@ -541,13 +620,23 @@ do
handle(("%s %s=%q,"):format(depth,'type',root['type']))
end
for _,k in ipairs(fld) do
- if k then
+ if k == "ref_count" then
+ -- skip
+ elseif k then
local v = root[k]
local t = type(v)
if t == "number" then
+if v == 0 then
+ -- skip
+else
handle(("%s %s=%s,"):format(depth,key(k),v))
+end
elseif t == "string" then
+if v == "" then
+ -- skip
+else
handle(("%s %s=%q,"):format(depth,key(k),v))
+end
elseif v then -- userdata or table
serialize(v,k,handle,depth,m+1)
end
@@ -585,9 +674,22 @@ do
tex.print("\\stoptyping")
end
+ function nodes.check_for_leaks(sparse)
+ local l = { }
+ local q = node.usedlist()
+ for p in node.traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),node.type(p.id))
+ l[s] = (l[s] or 0) + 1
+ end
+ node.flush_list(q)
+ for k, v in pairs(l) do
+ texio.write_nl(string.format("%s * %s", v, k))
+ end
+ end
+
end
-if not node.list_has_attribute then
+if not node.list_has_attribute then -- no longer needed
function node.list_has_attribute(list,attribute)
if list and attribute then
@@ -609,377 +711,48 @@ function nodes.pack_list(head)
return t
end
--- helpers
-
do
- local kern_node = node.new("kern",1)
- local penalty_node = node.new("penalty")
- local glue_node = node.new("glue")
- local glue_spec_node = node.new("glue_spec")
+ local glue, whatsit, hlist = node.id("glue"), node.id("whatsit"), node.id("hlist")
- function nodes.penalty(p)
- local n = node.copy(penalty_node)
- n.penalty = p
- return n
- end
- function nodes.kern(k)
- local n = node.copy(kern_node)
- n.kern = k
- return n
- end
- function nodes.glue(width,stretch,shrink)
- local n = node.copy(glue_node)
- local s = node.copy(glue_spec_node)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- n.spec = s
- return n
+ function nodes.leftskip(n)
+ while n do
+ local id = n.id
+ if id == glue then
+ if n.subtype == 8 then -- 7 in c/web source
+ return (n.spec and n.spec.width) or 0
+ else
+ return 0
+ end
+ elseif id == whatsit then
+ n = n.next
+ elseif id == hlist then
+ return n.width
+ else
+ break
+ end
+ end
+ return 0
end
- function nodes.glue_spec(width,stretch,shrink)
- local s = node.copy(glue_spec_node)
- s.width, s.stretch, s.shrink = width, stretch, shrink
- return s
+ function nodes.rightskip(n)
+ if n then
+ n = node.slide(n)
+ while n do
+ local id = n.id
+ if id == glue then
+ if n.subtype == 9 then -- 8 in the c/web source
+ return (n.spec and n.spec.width) or 0
+ else
+ return 0
+ end
+ elseif id == whatsit then
+ n = n.prev
+ else
+ break
+ end
+ end
+ end
+ return false
end
end
-
--- old code
-
---~ function nodes.do_process_glyphs(stack)
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ local node = stack[1]
---~ if node then
---~ local kind = node[1]
---~ if kind == 'glyph' then
---~ local tfmdata = fonts.tfm.id[node[5]] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ for _, func in pairs(tfmdata.shared.processors) do -- per font
---~ func(stack,1,node)
---~ end
---~ end
---~ elseif kind == 'hlist' or kind == "vlist" then
---~ local done = nodes.do_process_glyphs(node[8])
---~ end
---~ return true
---~ else
---~ return false
---~ end
---~ else
---~ local font_ids = { }
---~ local done = false
---~ for _, v in pairs(stack) do
---~ if v then
---~ if v[1] == 'glyph' then
---~ local font_id = v[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id] = tfmdata.shared.processors
---~ end
---~ end
---~ end
---~ end
---~ if done then
---~ return false
---~ else
---~ -- todo: generic loop before
---~ for font_id, _ in pairs(font_ids) do
---~ for _, func in pairs(font_ids[font_id]) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for i=1, #stack do
---~ local node = stack[i]
---~ if node then
---~ if node[1] == 'hlist' or node[1] == "vlist" then
---~ nodes.do_process_glyphs(node[8])
---~ end
---~ end
---~ end
---~ return true
---~ end
---~ end
---~ end
-
---~ function nodes.do_process_glyphs(stack)
---~ local function process_list(node)
---~ local done = false
---~ if node and node[1] == 'hlist' or node[1] == "vlist" then
---~ local attributes = node[3]
---~ if attributes then
---~ if not attributes[1] then
---~ nodes.do_process_glyphs(node[8])
---~ attributes[1] = 1
---~ done = true
---~ end
---~ else
---~ nodes.do_process_glyphs(node[8])
---~ node[3] = { 1 }
---~ done = true
---~ end
---~ end
---~ return done
---~ end
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ return process_list(stack[1])
---~ else
---~ local font_ids, found = { }, false
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ input.start_timing(nodes)
---~ local done = nodes.do_process_glyphs(t)
---~ if done then
---~ t = nodes.collapse(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ return t
---~ else
---~ return true
---~ end
---~ end
-
---~ function nodes.do_process_glyphs(stack)
---~ local function process_list(node)
---~ local done = false
---~ if node and node[1] == 'hist' or node[1] == "vlist" then
---~ local attributes = node[3]
---~ if attributes then
---~ if attributes[1] then
---~ else
---~ local content = node[8]
---~ if type(content) == "table" then
---~ nodes.do_process_glyphs(content)
---~ end
---~ attributes[1] = 1
---~ done = true
---~ end
---~ else
---~ nodes.do_process_glyphs(node[8])
---~ node[3] = { 1 }
---~ done = true
---~ end
---~ end
---~ return done
---~ end
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 then
---~ return process_list(stack[1])
---~ else
---~ local font_ids, found = { }, false
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fonts.tfm.id[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ if status.output_active then
---~ return true
---~ else
---~ input.start_timing(nodes)
---~ local done = nodes.do_process_glyphs(t)
---~ if done then
---~ t = nodes.collapse(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ return t
---~ else
---~ return true
---~ end
---~ end
---~ end
-
---~ do
-
---~ local function do_process_glyphs(stack)
---~ if not stack or #stack == 0 then
---~ return false
---~ elseif #stack == 1 and stack[1][1] ~= 'glyph' then
---~ return false
---~ else
---~ local font_ids, found = { }, false
---~ local fti = fonts.tfm.id
---~ for _, node in ipairs(stack) do
---~ if node and node[1] == 'glyph' then
---~ local font_id = node[5]
---~ local tfmdata = fti[font_id] -- we can use fonts.tfm.processor_id
---~ if tfmdata and tfmdata.shared and tfmdata.shared.processors then
---~ font_ids[font_id], found = tfmdata.shared.processors, true
---~ end
---~ end
---~ end
---~ if not found then
---~ return false
---~ else
---~ -- we need func to report a 'done'
---~ local done = false
---~ for font_id, font_func in pairs(font_ids) do
---~ for _, func in pairs(font_func) do -- per font
---~ local i = 1
---~ while true do
---~ local node = stack[i]
---~ if node and node[1] == 'glyph' and node[5] == font_id then
---~ i = func(stack,i,node)
---~ done = true
---~ end
---~ if i < #stack then
---~ i = i + 1
---~ else
---~ break
---~ end
---~ end
---~ end
---~ end
---~ for _, node in ipairs(stack) do
---~ if node then
---~ done = done or process_list(node)
---~ end
---~ end
---~ return done
---~ end
---~ end
---~ end
-
---~ local function do_collapse_glyphs(stack,existing_t)
---~ if stack then
---~ local t = existing_t or { }
---~ for _, node in pairs(stack) do
---~ if node then
---~ if node[3] then node[3][1] = nil end -- remove status bit / 1 sec faster on 15 sec
---~ if node[1] == 'inline' then
---~ local nodes = node[4]
---~ if #nodes == 1 then
---~ t[#t+1] = nodes[1]
---~ else
---~ do_collapse_glyphs(nodes,t)
---~ end
---~ else
---~ t[#t+1] = node
---~ end
---~ else
---~ -- deleted node
---~ end
---~ end
---~ return t
---~ else
---~ return stack
---~ end
---~ end
-
---~ function nodes.process_glyphs(t,...)
---~ --~ print(...)
---~ if status.output_active then -- not ok, we need a generic blocker, pagebody ! / attr tex.attibutes
---~ return true
---~ else
---~ input.start_timing(nodes)
---~ local done = do_process_glyphs(t)
---~ if done then
---~ t = do_collapse_glyphs(t)
---~ end
---~ input.stop_timing(nodes)
---~ nodes.report(t,done)
---~ if done then
---~ --~ texio.write_nl("RETURNING PROCESSED LIST")
---~ return t
---~ else
---~ --~ texio.write_nl("RETURNING SIGNAL")
---~ return true
---~ end
---~ end
---~ end
-
---~ end
diff --git a/tex/context/base/page-flt.tex b/tex/context/base/page-flt.tex
index 75531fbd5..47b5fddb1 100644
--- a/tex/context/base/page-flt.tex
+++ b/tex/context/base/page-flt.tex
@@ -366,6 +366,8 @@
{\ConvertToConstant\doifelse{#4}\v!none
{\global\nofloatcaptiontrue}
{\global\nofloatcaptionfalse}}%
+ \doif{\floatcaptionparameter\c!number}\v!none % new
+ {\global\nofloatcaptiontrue}%
\ifemptyfloatcaption \ifnofloatnumber
\global\nofloatcaptiontrue
\fi \fi}
@@ -513,20 +515,20 @@
% pas op, maxbreedte niet instellen als plaats=links/rechts
-\def\setlocalfloatdimensions#1#2#3% experimental / #3 box number #4 prefix
+\def\setlocalfloatdimensions#1%
{\global\sidefloatshift \zeropoint % duplicate
\global\sidefloatmaximum\zeropoint\relax % duplicate
\ifextrafloatactions
\ifdim\sidefloatdownshift=\zeropoint\else
- #3\setbox#2\vbox
- {\vskip\sidefloatdownshift\nointerlineskip\box#3}%
+ \global\setbox\floatbox\vbox
+ {\vskip\sidefloatdownshift\nointerlineskip\box\floatbox}%
\fi
\doifsomething{\floatparameter\c!minwidth}
{\scratchdimen\floatparameter\c!minwidth\relax
- \ifdim\wd#2<\scratchdimen
- #3\setbox#2\hbox to \scratchdimen
+ \ifdim\wd\floatbox<\scratchdimen
+ \global\setbox\floatbox\hbox to \scratchdimen
{\doifnot{\floatparameter\c!location}\v!left \hss
- \box#2%
+ \box\floatbox%
\doifnot{\floatparameter\c!location}\v!right\hss}%
\fi}%
% todo: rand / rug
@@ -537,17 +539,17 @@
{\letvalue{\??fl\currentfloat\c!maxwidth}\rightmarginwidth}}}%
\doifsomething{\floatparameter\c!maxwidth}
{\scratchdimen\floatparameter\c!maxwidth\relax
- \ifdim\wd#2>\scratchdimen
+ \ifdim\wd\floatbox>\scratchdimen
\doifcommonelse{\v!inright,\v!rightmargin,\v!rightedge
\v!inleft,\v!leftmargin,\v!leftedge}{#1}
{\global\sidefloatmaximum\scratchdimen}
- {#3\setbox#2\hbox to \scratchdimen
+ {\global\setbox\floatbox\hbox to \scratchdimen
{\doifcommonelse{\v!right,\v!left}{#1}
{\doifnotinset\v!right{#1}\hss
- \box#2%
+ \box\floatbox
\doifnotinset\v!left{#1}\hss}%
{\doifnot{\floatparameter\c!location}\v!left\hss
- \box#2%
+ \box\floatbox
\doifnot{\floatparameter\c!location}\v!right\hss}}}%
\fi}%
\fi}
@@ -2031,7 +2033,7 @@
\par
\edef\floatcaptiondirectives{\floatparameter\c!location,\floatcaptionparameter\c!location}%
\ifparfloat\@EA\dosetparfloat\else\@EA\dosetpagfloat\fi{#1}{#2}{#3}%
- \setlocalfloatdimensions{#1}\floatbox\global % tzt arg 3/4 weg
+ \setlocalfloatdimensions{#1}%
\setbox\floatbox\hbox
{\dosavefloatdata\restoretextcolor{\box\floatbox}}%
\global\floatheight\ht\floatbox
diff --git a/tex/context/base/page-ini.tex b/tex/context/base/page-ini.tex
index 2582f2dc0..783a358c9 100644
--- a/tex/context/base/page-ini.tex
+++ b/tex/context/base/page-ini.tex
@@ -634,6 +634,7 @@
\ifx\mkprocesscolumncontents\undefined\let\mkprocesscolumncontents\gobbleoneargument\fi
\ifx\mkprocesspagecontents \undefined\let\mkprocesspagecontents \gobbleoneargument\fi
+\ifx\mkprocessboxcontents \undefined\let\mkprocessboxcontents \gobbleoneargument\fi
\def\normalejectpenalty{-\@M} \let\ejectpenalty\normalejectpenalty
\def\normalsuperpenalty{-\@MM} \let\superpenalty\normalsuperpenalty
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
new file mode 100644
index 000000000..91412d84f
--- /dev/null
+++ b/tex/context/base/page-lin.lua
@@ -0,0 +1,232 @@
+if not modules then modules = { } end modules ['page-lin'] = {
+ version = 1.001,
+ comment = "companion to page-lin.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- experimental
+
+nodes = nodes or { }
+nodes.lines = nodes.lines or { }
+nodes.lines.data = nodes.lines.data or { } -- start step tag
+
+do
+
+ -- if there is demand for it, we can support multiple numbering streams
+ -- and use more than one attibute
+
+ local hlist, vlist, whatsit = node.id('hlist'), node.id('vlist'), node.id('whatsit')
+
+ local display_math = attributes.numbers['display-math'] or 121
+ local line_number = attributes.numbers['line-number'] or 131
+ local line_reference = attributes.numbers['line-reference'] or 132
+
+ local current_list = { }
+ local cross_references = { }
+ local chunksize = 250 -- not used in boxed
+
+ local has_attribute = node.has_attribute
+ local traverse_id = node.traverse_id
+ local copy = node.copy
+ local format = string.format
+ local sprint = tex.sprint
+
+ local data = nodes.lines.data
+
+ nodes.lines.scratchbox = nodes.lines.scratchbox or 0
+
+ -- cross referencing
+
+ function nodes.lines.number(n)
+ local cr = cross_references[n] or 0
+ cross_references[n] = nil
+ return cr
+ end
+
+ local function resolve(n,m)
+ while n do
+ local id = n.id
+ if id == whatsit then
+ local a = has_attribute(n,line_reference)
+ if a then
+ cross_references[a] = m
+ end
+ elseif id == hlist or id == vlist then
+ resolve(n.list,m)
+ end
+ n = n.next
+ end
+ end
+
+ -- boxed variant
+
+ nodes.lines.boxed = { }
+
+ function nodes.lines.boxed.register(configuration)
+ data[#data+1] = configuration
+ return #data
+ end
+ function nodes.lines.boxed.setup(n,configuration)
+ local d = data[n]
+ if d then
+ for k,v in pairs(configuration) do d[k] = v end
+ else
+ data[n] = configuration
+ end
+ return n
+ end
+
+ local leftskip = nodes.leftskip
+
+ function nodes.lines.boxed.stage_one(n)
+ current_list = { }
+ local head = tex.box[n].list
+ local function check_number(n,a) -- move inline
+ local d = data[a]
+ if d then
+ local s = d.start
+ current_list[#current_list+1] = { n, s }
+ sprint(tex.ctxcatcodes, format("\\makenumber{%s}{%s}{%s}{%s}{%s}\\endgraf", d.tag or "", s, n.shift, n.width, leftskip(n.list)))
+ d.start = s + (d.step or 1)
+ end
+ end
+ for n in traverse_id(hlist,head) do -- attr test here and quit as soon as zero found
+ if n.height == 0 and n.depth == 0 then
+ -- skip funny hlists
+ else
+ local a = has_attribute(n.list,line_number)
+ if a and a > 0 then
+ if has_attribute(n,display_math) then
+ if nodes.is_display_math(n) then
+ check_number(n,a)
+ end
+ else
+ if node.first_character(n.list) then
+ check_number(n,a)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ function nodes.lines.boxed.stage_two(n,m)
+ m = m or nodes.lines.scratchbox
+ local t, i = { }, 0
+ for l in traverse_id(hlist,tex.box[m].list) do
+ t[#t+1] = copy(l)
+ end
+ for _, l in ipairs(current_list) do
+ local n, m = l[1], l[2]
+ i = i + 1
+ t[i].next = n.list
+ n.list = t[i]
+ resolve(n,m)
+ end
+ end
+
+ -- flow variant
+ --
+ -- it's too hard to make this one robust, so for the moment it's not
+ -- available; todo: line refs
+
+ if false then
+
+ nodes.lines.flowed = { }
+
+ function nodes.lines.flowed.prepare()
+ for i=1,#data do
+ sprint(tex.ctxcatcodes,format("\\ctxlua{nodes.lines.flowed.prepare_a(%s)}\\ctxlua{nodes.lines.flowed.prepare_b(%s)}",i, i))
+ end
+ end
+
+ function nodes.lines.flowed.prepare_a(i)
+ local d = data[i]
+ local p = d.present
+ if p < chunksize then
+ local b = nodes.lines.scratchbox
+ sprint(tex.ctxcatcodes, format("{\\forgetall\\global\\setbox%s=\\vbox{\\unvbox%s\\relax\\offinterlineskip", b, b))
+ while p < chunksize do
+ sprint(tex.ctxcatcodes, format("\\mkmaketextlinenumber{%s}{%s}\\endgraf",d.start,1))
+ p = p + 1
+ d.start = d.start + d.step
+ end
+ d.present = p
+ sprint(tex.ctxcatcodes, "}}")
+ end
+ end
+
+ function nodes.lines.flowed.prepare_b(i)
+ local d = data[i]
+ local b = nodes.lines.scratchbox
+ local l = tex.box[b]
+ if l then
+ l = l.list
+ local n = d.numbers
+ while l do
+ if l.id == hlist then
+ local m = node.copy(l)
+ m.next = nil
+ if n then
+ n.next = m
+ else
+ d.numbers = m
+ end
+ n = m
+ end
+ l = l.next
+ end
+ end
+ tex.box[b] = nil
+ end
+
+ function nodes.lines.flowed.cleanup(i)
+ if i then
+ node.flush_list(data[i].numbers)
+ else
+ for i=1,#data do
+ node.flush_list(data[i].numbers)
+ end
+ end
+ end
+
+ function nodes.lines.flowed.apply(head)
+ local function check_number(n,a)
+ local d = data[a]
+ if d then
+ local m = d.numbers
+ if m then
+ d.numbers = m.next
+ m.next = n.list
+ n.list = m
+ d.present = d.present - 1
+ end
+ end
+ end
+ for n in node.traverse(head) do
+ local id = n.id
+ if id == hlist then
+ if n.height == 0 and n.depth == 0 then
+ -- skip funny hlists
+ else
+ local a = has_attribute(n,line_number)
+ if a and a > 0 then
+ if has_attribute(n,display_math) then
+ if nodes.is_display_math(n) then
+ check_number(n,a)
+ end
+ else
+ check_number(n,a)
+ end
+ end
+ end
+ end
+ end
+ return head, true
+ end
+
+ end
+
+end
diff --git a/tex/context/base/page-lin.mkii b/tex/context/base/page-lin.mkii
new file mode 100644
index 000000000..357283252
--- /dev/null
+++ b/tex/context/base/page-lin.mkii
@@ -0,0 +1,312 @@
+%D \module
+%D [ file=page-lin, % copied from main-001
+%D version=1997.03.31,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{Context Core Macros / Line Numbering}
+
+\unprotect
+
+\newif\ifnumberinglines
+\newif\iftypesettinglines
+
+\newcount\linenumber
+\newcount\linestepper
+
+\chardef\linenumberlocation=0
+
+\newtoks\beforeeverylinenumbering
+\newtoks\aftereverylinenumbering
+
+% het gebruik van \setlocalreference scheelt een hash entry
+
+\def\dodoshowlinenumber % for use elsewhere, to be extended
+ {\doplacelinenumber
+ \global\advance\linenumber \plusone}
+
+\def\completelinenumber
+ {\@@rnleft\convertnumber\@@rnconversion\linenumber\@@rnright}
+
+\def\dosetuplinenumbering[#1]%
+ {\getparameters[\??rn][\c!start=1,\c!step=1,#1]%
+ \global\linenumber\plusone}
+
+\def\setuplinenumbering
+ {\dosingleargument\dosetuplinenumbering}
+
+\def\dostartnummerenLINE
+ {\EveryPar{\placelinenumber}} % why not append to everypar ? better
+
+\def\dostopnummerenLINE
+ {\the\aftereverylinenumbering
+ \egroup}
+
+\def\dostartnummerenVERB
+ {\EveryLine{\placelinenumber}}
+
+\def\dostopnummerenVERB
+ {\the\aftereverylinenumbering
+ \egroup}
+
+\newevery \everylinenumber \relax
+
+\def\dodoplacelinenumber
+ {% beware of em's, the font is already switched !
+ \setbox\scratchbox\hbox
+ {\setbox0\hbox{\@@rncommand{\completelinenumber}}\vsmashbox0%
+ \ifcase\linenumberlocation
+ \iftypesettinglines % hack
+ \expandafter\llap
+ \else
+ \expandafter\rlap
+ \fi{\hbox to \@@rnwidth{\box0\hss}}% was \llap, nog testen !!
+ \or
+ \inleftmargin
+ {\forgetall
+ \doifelse\@@rnwidth\v!margin
+ {\hsize\leftmarginwidth}{\hsize\@@rnwidth}%
+ \alignedline\@@rnalign\v!right{\box0\hskip\@@rndistance}}%
+ \else
+ \inrightmargin
+ {\forgetall
+ \doifelse\@@rnwidth\v!margin
+ {\hsize\rightmarginwidth}{\hsize\@@rnwidth}%
+ \alignedline\@@rnalign\v!left{\hskip\@@rndistance\box0}}%
+ \fi}%
+ \vsmashbox\scratchbox
+ \box\scratchbox
+ \the\everylinenumber}
+
+\def\complexstartlinenumbering[#1]%
+ {\doifnot{#1}\v!continue
+ {\doifnumberelse{#1}
+ {\global\linenumber#1\relax}
+ {\doifelsenothing\@@rnstart
+ {\global\linenumber\plusone}
+ {\global\linenumber\@@rnstart}}}%
+ \chardef\linenumberlocation\zerocount
+ \processaction
+ [\@@rnlocation]
+ [ \v!inmargin=>\chardef\linenumberlocation\plusone,
+ \v!inleft=>\chardef\linenumberlocation\plusone,
+ \v!inright=>\chardef\linenumberlocation\plustwo,
+ \v!margin=>\chardef\linenumberlocation\plusone]%
+ % \v!text=>\chardef\linenumberlocation\zerocount,
+ %\s!unknown=>\chardef\linenumberlocation\zerocount,
+ %\s!default=>\chardef\linenumberlocation\zerocount]%
+ \doifnot\@@rnwidth\v!margin
+ {\freezedimenmacro\@@rnwidth
+ \ifcase\linenumberlocation % text
+ \advance\leftskip\@@rnwidth\relax
+ \fi}%
+ \freezedimenmacro\@@rndistance
+ \chardef\@@rn@@rnmethod
+ \ifprocessingverbatim\zerocount\else\iftypesettinglines\plusone\else\plustwo\fi\fi
+ \processaction
+ [\@@rnmethod]
+ [ \v!type=>\chardef\@@rn@@rnmethod\zerocount,
+ \v!line=>\chardef\@@rn@@rnmethod\plusone,
+ \v!text=>\chardef\@@rn@@rnmethod\plustwo,
+ \v!file=>\chardef\@@rn@@rnmethod\plusthree]%
+ \ifcase\@@rn@@rnmethod % verbatim, line by line
+ \typesettinglinestrue
+ \let\dostartnummeren\dostartnummerenVERB
+ \let\stoplinenumbering\dostopnummerenVERB
+ \def\placelinenumber
+ {\doplacelinenumber
+ \global\advance\linenumber \plusone}%
+ \or % text, line by line
+ \let\dostartnummeren\dostartnummerenLINE
+ \let\stoplinenumbering\dostopnummerenLINE
+ \def\placelinenumber
+ {\doplacelinenumber
+ \global\advance\linenumber \plusone}%
+ \or % text, whole lot
+ \let\dostartnummeren\dostartnummerenPAR
+ \let\stoplinenumbering\dostopnummerenPAR
+ \def\placelinenumber
+ {\global\advance\linenumber \minusone
+ \doplacelinenumber}%
+ \or % verbatim, selective line by line
+ \typesettinglinestrue
+ \let\dostartnummeren\dostartnummerenVERB
+ \let\stoplinenumbering\dostopnummerenVERB
+ \def\placelinenumber
+ {\global\linenumber\verbatimlinenumber
+ \global\advance\linenumber \minusone
+ \doplacelinenumber}%
+ \fi
+ \dostartnummeren}
+
+\def\startlinenumbering
+ {\bgroup
+ \the\beforeeverylinenumbering
+ \numberinglinestrue
+ \complexorsimpleempty\startlinenumbering}
+
+\def\donoplacelinenumber
+ {\the\everylinenumber}
+
+\def\doplacelinenumber
+ {\ifnum\linenumber<\@@rnstart\relax
+ \donoplacelinenumber
+ \else
+ \ifnum\numexpr(\linenumber/\@@rnstep)*\@@rnstep\relax=\linenumber
+ \doattributes\??rn\c!style\c!color\dodoplacelinenumber
+ \else
+ \donoplacelinenumber
+ \fi
+ \fi}
+
+\def\someline[#1]%
+ {\dolinereference0[#1]\ignorespaces}
+
+\def\startline[#1]%
+ {\dolinereference1[#1]\ignorespaces}
+
+\def\stopline[#1]%
+ {\removelastspace\dolinereference2[#1]}
+
+\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifinstringelse{--}\currenttextreference
+ {\in{\leftlabeltext\v!lines}{\rightlabeltext\v!lines}[\@@rnprefix#2]}
+ {\in{\leftlabeltext\v!line }{\rightlabeltext\v!line }[\@@rnprefix#2]}}
+ {\in{#1}[\@@rnprefix#2]}}
+
+\def\dostartnummerenPAR
+ {\beginofshapebox
+ \doglobal\newcounter\linereference}
+
+% localcrossref heroverwegen
+
+\def\setlinereference#1#2#3#4%
+ {\setxvalue{lrf:#1}{\noexpand\dogetlinereference{#2}{#3}{#4}}}
+
+\def\getlinereference#1%
+ {\getvalue{lrf:#1}}
+
+\def\dogetlinereference#1#2#3%
+ {\edef\linereferencename{#1}%
+ \edef\linereferenceline{#2}%
+ \edef\linereferenceplus{#3}}
+
+% 1 xxx xxx xxx xxx xxx xxx xxx
+% 2 xxx yyy yyy yyy yyy yyy yyy <= start y
+% 3 yyy yyy yyy yyy yyy yyy yyy
+% 4 yyy yyy yyy yyy yyy xxx xxx <= stop y
+% 5 xxx xxx xxx xxx xxx xxx xxx
+
+\def\dolinereference#1[#2]%
+ {\bgroup
+ \dimen0=\strutdp
+ \doif\@@rnreferencing\v!on
+ {\doglobal\increment\linereference
+ % start 1=>(n=y,l=0,p=1)
+ % stop 2=>(n=y,l=0,p=2)
+ \setlinereference{\linereference}{\@@rnprefix#2}{0}{#1}%
+ \advance\dimen0 \linereference sp}%
+ \prewordbreak
+ \vrule \!!width \zeropoint \!!depth \dimen0 \!!height \zeropoint
+ \prewordbreak
+ \egroup}
+
+\def\dostopnummerenPAR % dp's -> openstrutdepth
+ {\endofshapebox
+ \checkreferences
+ \linestepper\zerocount
+ \reshapebox{\global\advance\linestepper \plusone}%
+ \global\advance\linenumber \linestepper
+ \doifelse\@@rnreferencing\v!on
+ {\reshapebox % We are going back!
+ {\global\advance\linenumber \minusone
+ \dimen0=\dp\shapebox
+ \advance\dimen0 -\strutdp\relax
+ \ifdim\dimen0>\zeropoint
+ % 1=>4 | 2=>4 1=>2
+ % start 1=>(n=y,l=2,p=1)
+ % stop 2=>(n=y,l=4,p=2)
+ \dostepwiserecurse\plusone{\number\dimen0}\plusone
+ {\getlinereference\recurselevel
+ \setlinereference\recurselevel
+ {\linereferencename}{\the\linenumber}{\linereferenceplus}}%
+ \fi}%
+ \global\advance\linenumber \linestepper
+ \ifnum\linereference>\zerocount % anders vreemde loop in paragraphs+recurse
+ \dorecurse\linereference
+ {\getlinereference\recurselevel
+ \ifnum\linereferenceplus=2 % stop
+ % ref y: text = 4 / Kan dit buiten referentie mechanisme om?
+ \expanded{\setlocalcrossreference
+ {\referenceprefix\linereferencename}{}{}{\linereferenceline}}%
+ \fi}%
+ \dorecurse\linereference
+ {\getlinereference\recurselevel
+ \ifnum\linereferenceplus<2 % start / lone
+ \ifnum\linereferenceplus=1 % start
+ \getreferenceelements\linereferencename % text = 4
+ \ifnum\linereferenceline<0\currenttextreference\relax % 0 prevents error
+ \edef\linereferenceline{\linereferenceline--\currenttextreference}%
+ \fi
+ \fi
+ \expanded{\setlocalcrossreference
+ {\referenceprefix\linereferencename}{}{}{\linereferenceline}}%
+ \fi}%
+ \global\let\scratchline\linenumber % We are going back!
+ \reshapebox
+ {\doglobal\decrement\scratchline
+ \hbox
+ {\dorecurse\linereference
+ {\getlinereference\recurselevel
+ \getreferenceelements\linereferencename
+ \beforesplitstring\currenttextreference--\at--\to\firstline
+ \ifnum\firstline=\scratchline\relax
+ % beter een rawtextreference, i.e. expanded
+ % \textreference[\linereferencename]{\currenttextreference}%
+ \rawtextreference\s!lin\linereferencename\currenttextreference
+ \expanded{\setlocalcrossreference
+ {\referenceprefix\linereferencename}{}{}{0}}% ==done
+ \fi}%
+ \dimen0\dp\shapebox
+ \advance\dimen0 -\strutdp\relax
+ \ifdim\dimen0>\zeropoint
+ \dp\shapebox\strutdp
+ \fi
+ \placelinenumber\box\shapebox}}% no \strut !
+ \else
+ \reshapebox{\hbox{\placelinenumber\box\shapebox}}% no \strut !
+ \fi}
+ {\reshapebox{\global\advance\linenumber \minusone}%
+ \global\advance\linenumber \linestepper
+ \reshapebox{\hbox{\placelinenumber\box\shapebox}}}% no \strut !
+ \global\advance\linenumber \linestepper
+ \flushshapebox
+ \the\aftereverylinenumbering
+ \egroup}
+
+\setuplinenumbering
+ [\c!method=,
+ \c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!location=\v!margin,
+ \c!style=,
+ \c!color=,
+ \c!prefix=,
+ \c!referencing=\v!on,
+ \c!width=\ifcase\linenumberlocation2em\else\v!margin\fi,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!distance=\zeropoint,
+ \c!align=\ifcase\linenumberlocation\v!right\or\v!right\or\v!left\fi]
+
+\protect \endinput
diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv
new file mode 100644
index 000000000..d442bbfeb
--- /dev/null
+++ b/tex/context/base/page-lin.mkiv
@@ -0,0 +1,424 @@
+%D \module
+%D [ file=page-lin,
+%D version=2007.11.29,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{Context Core Macros / Line Numbering}
+
+\unprotect
+
+% low level interface
+
+\defineattribute[line-number]
+\defineattribute[line-reference]
+
+\registerctxluafile{page-lin}{1.001}
+% \ctxluafileload{page-lin}{1.001}
+
+\appendtoksonce\doresetattribute{line-number}\to\everyforgetall
+\appendtoksonce\dosetattribute{display-math}{1}\to\everybeforedisplayformula
+
+\newbox \linenumberscratchbox
+\newcount\linenumberchunk
+\newcount\linerefcounter
+
+\newconditional\tracelinenumbering
+
+\def\mkaddtextlinenumbers#1#2#3% box col max
+ {\bgroup
+ \ifcase#3\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else\ifcase#2\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else
+ \let\makenumber\mkrightlinenumber
+ \fi\fi
+ \mkprocesstextlinenumbers{#1}%
+ \egroup}
+
+\def\mkprocesstextlinenumbers#1%
+ {\setbox\linenumberscratchbox\vbox{\forgetall\offinterlineskip\ctxlua{nodes.lines.boxed.stage_one(\number#1)}}%
+ \ctxlua{nodes.lines.boxed.stage_two(\number#1,\number\linenumberscratchbox)}}% can move to lua code
+
+% id nr shift width leftskip
+
+\def\mkskiplinenumber #1#2#3#4#5{}
+\def\mkleftlinenumber #1#2#3#4#5{\hbox{\llap{#2\quad\hskip#3\scaledpoint}}}
+\def\mkrightlinenumber#1#2#3#4#5{\hbox{\rlap{\hskip#4\scaledpoint\hskip#3\scaledpoint\quad#2}}}
+
+\def\makenumber#1#2{\hbox{\llap{#1\quad\hskip#2\scaledpoint}}\endgraf}%
+
+\def\mkdoprocesspagecontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone}
+\def\mkdoprocessboxcontents #1{\mkaddtextlinenumbers{#1}\plusone \plusone}
+\def\mkdoprocesscolumncontents#1{\mkaddtextlinenumbers{#1}\currentcolumn\nofcolumns}
+
+\def\mkcurrentstart{0}
+\def\mkcurrentstep {1}
+
+\def\mkdefinetextlinenumbering#1%
+ {\begingroup
+ \scratchcounter\ctxlua{tex.sprint(nodes.lines.boxed.register({start=\mkcurrentstart,step=\mkcurrentstep,tag="#1"}))}%
+ \setxvalue{ln:c:#1}{\number\scratchcounter}%
+ \endgroup}
+
+\def\mkstarttextlinenumbering#1#2%
+ {\globallet\mkprocesspagecontents \mkdoprocesspagecontents
+ \globallet\mkprocesscolumncontents\mkdoprocesscolumncontents
+ \ifcase#2\relax
+ % continue
+ \or
+ \mkdefinetextlinenumbering{#1}% restart
+ \fi
+ \dosetattribute{line-number}{\getvalue{ln:c:#1}}}
+
+\def\mksetuptextlinenumbering#1%
+ {\ctxlua{nodes.lines.boxed.setup(\getvalue{ln:c:#1},{start=\mkcurrentstart,step=\mkcurrentstep,tag="#1"})}}
+
+\def\mkstoptextlinenumbering
+ {\doresetattribute{line-number}}
+
+\def\mksomelinereference#1#2#3%
+ {\dontleavehmode\begingroup
+ \global\advance\linerefcounter\plusone
+ \dosetattribute{line-reference}\linerefcounter
+ #3\rawtextreference\s!lin{#2}{\noexpand\ctxlua{tex.sprint(nodes.lines.number(\the\linerefcounter))}}%
+ \endgroup}
+
+\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{}\ignorespaces}
+\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{}}
+
+\def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced
+\def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced
+
+% high level interface
+
+\newif\ifnumberinglines
+\newif\iftypesettinglines
+
+\let\currentlinenumbering\empty
+
+\chardef\linenumbermode = 1 % 0=continue, 1=restart
+\chardef\linenumberlocation = 1 % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text
+\chardef\linenumberalignment = 5 % 0=middle, 1=left, 2=right, 5=auto
+
+\newevery \beforeeverylinenumbering \relax
+\newevery \aftereverylinenumbering \relax
+\newevery \everylinenumber \relax
+
+\newdimen\linenumberwidth
+\newdimen\linenumberdistance
+
+\def\definelinenumbering
+ {\dosingleempty\dodefinelinenumbering}
+
+\def\dodefinelinenumbering[#1]%
+ {\def\currentlinenumbering{#1}%
+ \mkdefinetextlinenumbering\currentlinenumbering}
+
+\def\setuplinenumbering
+ {\dodoubleempty\dosetuplinenumbering}
+
+\def\dosetuplinenumbering[#1][#2]%
+ {\ifsecondargument
+ \def\currentlinenumbering{#1}%
+ \getparameters[\??rn#1][#2]%
+ \else
+ \let\currentlinenumbering\empty
+ \getparameters[\??rn][#1]%
+ \fi
+ \mksetuptextlinenumbering\currentlinenumbering}
+
+\def\linenumberparameter#1%
+ {\csname\??rn\ifcsname\??rn\currentlinenumbering#1\endcsname\currentlinenumbering\fi#1\endcsname}
+
+\def\linenumberattributes
+ {\doattributes{\??rn\ifcsname\??rn\currentlinenumbering\c!style\endcsname\currentlinenumbering\fi}}
+
+\definelinenumbering
+
+\setuplinenumbering
+ [\c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!continue=v!no,
+ \c!location=\v!left,
+ \c!style=,
+ \c!color=,
+ \c!width=2em,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!distance=\zeropoint,
+ \c!align=\v!auto]
+
+\def\startlinenumbering
+ {\dodoubleempty\dostartlinenumbering}
+
+% no intermediate changes in values, define a class, otherwise each range
+% would need a number
+
+\def\mkcurrentstart{\linenumberparameter\c!start}
+\def\mkcurrentstep {\linenumberparameter\c!step }
+
+% todo: text
+
+\expandafter\chardef\csname\??rn:l:\v!middle \endcsname \zerocount
+\expandafter\chardef\csname\??rn:l:\v!left \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!margin \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!inmargin\endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!inleft \endcsname \plusone
+\expandafter\chardef\csname\??rn:l:\v!right \endcsname \plustwo
+\expandafter\chardef\csname\??rn:l:\v!inright \endcsname \plustwo
+\expandafter\chardef\csname\??rn:l:\v!inner \endcsname \plusthree
+\expandafter\chardef\csname\??rn:l:\v!outer \endcsname \plusfour
+\expandafter\chardef\csname\??rn:l:\v!text \endcsname \plusfive
+
+\expandafter\chardef\csname\??rn:a:\v!middle \endcsname \zerocount
+\expandafter\chardef\csname\??rn:a:\v!right \endcsname \plusone
+\expandafter\chardef\csname\??rn:a:\v!flushleft \endcsname \plusone
+\expandafter\chardef\csname\??rn:a:\v!left \endcsname \plustwo
+\expandafter\chardef\csname\??rn:a:\v!flushright\endcsname \plustwo
+\expandafter\chardef\csname\??rn:a:\v!auto \endcsname \plusfive
+
+\def\dostartlinenumbering[#1][#2]% todo: c!continue
+ {\begingroup
+ \chardef\linenumbermode\plusone
+ \let\currentlinenumbering\empty
+ \ifsecondargument
+ \doif{#2}\v!continue{\chardef\linenumbermode\zerocount}%
+ \else\iffirstargument
+ \doifelse{#1}\v!continue
+ {\chardef\linenumbermode\zerocount}
+ {\def\currentlinenumbering{#1}}%
+ \fi\fi
+ \doifelse{\linenumberparameter\c!continue}\v!yes
+ {\chardef\linenumbermode\zerocount}%
+ \numberinglinestrue
+ \the\beforeeverylinenumbering
+ \mkstarttextlinenumbering\currentlinenumbering\linenumbermode}
+
+\def\stoplinenumbering
+ {\mkstoptextlinenumbering
+ \the\aftereverylinenumbering
+ \endgroup}
+
+% number placement
+
+\let\mkskiplinenumber \gobblefivearguments
+
+\def\mkdoinnerlinenumber{\doifoddpageelse\mkdoleftlinenumber\mkdorightlinenumber}
+\def\mkdoouterlinenumber{\doifoddpageelse\mkdorightlinenumber\mkdoleftlinenumber}
+
+\def\mkleftlinenumber
+ {\ifcase\linenumberlocation
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoleftlinenumber
+ \or
+ \expandafter\mkdoinnerlinenumber
+ \or
+ \expandafter\mkdoouterlinenumber
+ \fi}
+
+\def\mkrightlinenumber
+ {\ifcase\linenumberlocation
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdorightlinenumber
+ \or
+ \expandafter\mkdoouterlinenumber
+ \or
+ \expandafter\mkdoinnerlinenumber
+ \fi}
+
+\def\mkaddtextlinenumbers#1#2#3% box col max
+ {\bgroup
+ \ifcase#3\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkleftlinenumber
+ \else\ifcase#2\relax
+ \let\makenumber\mkskiplinenumber
+ \or
+ \let\makenumber\mkdoleftlinenumber
+ \ifcase\linenumberlocation\or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plusone
+ \fi
+ \else
+ \let\makenumber\mkdorightlinenumber
+ \ifcase\linenumberlocation\or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plusone
+ \or
+ \chardef\linenumberlocation\plustwo
+ \or
+ \chardef\linenumberlocation\plustwo
+ \fi
+ \fi\fi
+ \mkprocesstextlinenumbers{#1}%
+ \egroup}
+
+\def\mkdoleftlinenumber #1#2#3#4#5%
+ {\hbox{\llap{\dosomelinenumber{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}}
+\def\mkdorightlinenumber#1#2#3#4#5%
+ {\hbox{\rlap{\hskip#4\scaledpoint\hskip#3\scaledpoint\dosomelinenumber{#1}{1}{#2}{#5}}}}
+
+\def\dosomelinenumber#1#2#3#4% tag 1=left|2=right linenumber leftskip
+ {\begingroup
+ \def\currentlinenumbering{#1}%
+ \chardef\linenumberlocation \executeifdefined{\??rn:l:\linenumberparameter\c!location}\plusone % left
+ \chardef\linenumberalignment\executeifdefined{\??rn:a:\linenumberparameter\c!align }\plusfive % auto
+ \doifelse{\linenumberparameter\c!width}\v!margin
+ {\linenumberwidth\leftmarginwidth}
+ {\linenumberwidth\linenumberparameter\c!width}%
+ \linenumberdistance\linenumberparameter\c!distance\relax
+ \ifcase#2\relax\or\hskip\linenumberdistance\fi\relax
+ \ifnum\linenumberlocation=\plusfive
+ \scratchdimen\dimexpr#4\scaledpoint-\linenumberdistance\relax
+ \chardef\linenumberlocation\plusone
+ \else
+ \scratchdimen\zeropoint
+ \fi
+ \ifcase\linenumberalignment
+ \chardef\linenumberlocation\zerocount % middle
+ \or
+ \chardef\linenumberlocation\plusone % left
+ \or
+ \chardef\linenumberlocation\plustwo % right
+ \fi
+ \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \linenumberwidth
+ {\ifcase\linenumberlocation
+ \hss % middle
+ \or
+ % left
+ \or
+ \hss % right
+ \or
+ \doifoddpageelse\relax\hss % inner
+ \or
+ \doifoddpageelse\hss\relax % outer
+ \fi
+ \linenumberattributes\c!style\c!color
+ {\linenumberparameter\c!command
+ {\linenumberparameter\c!left
+ \convertnumber{\linenumberparameter\c!conversion}{#3}%
+ \linenumberparameter\c!right}}%
+ \ifcase\linenumberlocation
+ \hss % middle
+ \or
+ \hss % left
+ \or
+ % right
+ \or
+ \doifoddpageelse\hss\relax % inner
+ \or
+ \doifoddpageelse\relax\hss % outer
+ \fi}%
+ \ifcase#2\relax\or\or\hskip\linenumberdistance\fi\relax
+ \hskip-\scratchdimen
+ \the\everylinenumber
+ \endgroup}
+
+% left right inner outer
+
+% align: \alignedline\@@rnalign\v!right{\box0\hskip\@@rndistance}
+
+% referencing
+
+\def\someline [#1]{\mkstartlinereference{#1}\mkstoplinereference{#1}}
+\def\startline[#1]{\mkstartlinereference{#1}}
+\def\stopline [#1]{\mkstoplinereference {#1}}
+
+\def\mkshowstartlinereference#1%
+ {\ifconditional\tracelinenumbering
+ \setbox\scratchbox\hbox{\llap
+ {\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox\box\scratchbox
+ \fi}
+\def\mkshowstoplinereference#1%
+ {\ifconditional\tracelinenumbering
+ \setbox\scratchbox\hbox{\rlap
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\!!width\onepoint\!!depth\strutdp\!!height.8\strutht}}%
+ \smashbox\scratchbox\box\scratchbox
+ \fi}
+
+\def\mkstartlinereference#1{\mksomelinereference{#1}{lr:b:#1}{\mkshowstartlinereference{#1}}\ignorespaces}
+\def\mkstoplinereference #1{\removeunwantedspaces\mksomelinereference{#1}{lr:e:#1}{\mkshowstoplinereference{#1}}}
+
+% eventually we will do this in lua
+
+\def\doifelsesamelinereference#1#2#3%
+ {\doifreferencefoundelse{lr:b:#1}
+ {\let\fline\currenttextreference
+ \doifreferencefoundelse{lr:e:#1}
+ {\let\tline\currenttextreference
+ \ifx\fline\tline#2\else#3\fi}
+ {#2}}
+ {#2}}
+
+\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifelsesamelinereference{#2}
+ {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
+ {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
+ {\doifelsesamelinereference{#2}
+ {\in{#1}[lr:b:#2]}
+ {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
+
+\protect \endinput
+
+\iffalse % \iftrue
+
+ \appendtoks\ctxlua{nodes.lines.flowed.prepare()}\to\everyshipout
+ \appendtoks\ctxlua{nodes.lines.flowed.cleanup()}\to\everybye
+
+ \def\mkstarttextlinenumbering#1#2%
+ {\ctxlua{nodes.lines.flowed.prepare(#1)}%
+ \dosetattribute{line-number}{#1}}
+
+ \def\mkstoptextlinenumbering
+ {\doresetattribute{line-number}}
+
+ \def\mkmaketextlinenumber#1#2%
+ {\hbox \bgroup
+ \ifcase#2\relax
+ %
+ \or
+ \llap{#1\quad\hskip#2\scaledpoint}%
+ \else
+ \rlap{\hskip\textwidth\quad#1\hskip#2\scaledpoint}%
+ \fi
+ \egroup}
+
+ \ctxlua{
+ nodes.lines.scratchbox = \number\linenumberscratchbox ;
+ % callback.register('vpack_filter', nodes.lines.flowed.apply)
+ callback.register('post_linebreak_filter', nodes.lines.flowed.apply)
+ }
+
+\fi
diff --git a/tex/context/base/page-lin.tex b/tex/context/base/page-lin.tex
deleted file mode 100644
index 66a09527c..000000000
--- a/tex/context/base/page-lin.tex
+++ /dev/null
@@ -1,459 +0,0 @@
-%D \module
-%D [ file=page-lin, % copied from main-001
-%D version=1997.03.31,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Line Numbering,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{Context Core Macros / Line Numbering}
-
-\unprotect
-
-\newif\ifnumberinglines
-\newif\iftypesettinglines
-
-\newcount\linenumber
-\newcount\linestepper
-
-\chardef\linenumberlocation=0
-
-\newtoks\beforeeverylinenumbering
-\newtoks\aftereverylinenumbering
-
-\def\setuplines
- {\dodoubleargument\getparameters[\??rg]}
-
-\def\startlines
- {\@@rgbefore
- \pushmacro\checkindentation
- \whitespace
- %\page[\v!preference]} gaat mis na koppen, nieuw: later \nobreak
- \begingroup
- \setupindenting[\@@rgindenting]%
- \typesettinglinestrue
- \setupwhitespace[\v!none]%
- \obeylines
- \ignorespaces
- \gdef\afterfirstobeyedline % tzt two pass, net als opsomming
- {\gdef\afterfirstobeyedline
- {\nobreak
- \global\let\afterfirstobeyedline\relax}}%
- \def\obeyedline
- {\par
- \afterfirstobeyedline
- \futurelet\next\dobetweenthelines}%
- \activatespacehandler\@@rgspace
- \GotoPar}
-
-\def\stoplines
- {\endgroup
- \popmacro\checkindentation
- \@@rgafter}
-
-%D When spacing is active we need to handle commands in
-%D a special way:
-%D
-%D \starttyping
-%D \setuplines[space=on]
-%D
-%D \startlines
-%D Let's talk about this{\ttsl\gobbleoneargument or}that.
-%D \stoplines
-%D
-%D \startlines
-%D Let's talk about this{\getvalue{ttsl}or}that.
-%D \stoplines
-%D \stoptyping
-%D
-%D One can indent in several ways:
-%D
-%D \starttyping
-%D \setupindenting[medium] \setuplines[indenting=odd] % no yes odd even
-%D
-%D \startlines
-%D first
-%D second
-%D third
-%D fourth
-%D \stoplines
-%D \stoptyping
-
-% this is not the natural place
-
-\def\installspacehandler#1#2% needs to set \obeyedspace
- {\setvalue{\??sr#1}{#2}}
-
-\installspacehandler \v!on
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode{\tt\controlspace}}}%
- \let\ =\obeyedspace}
-
-\installspacehandler \v!yes
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode \normalspace }}%
- \let\ =\obeyedspace}
-
-\installspacehandler \v!off
- {\normalspaces
- \let\obeyedspace\normalspace
- \let\ =\normalspace}
-
-\installspacehandler \v!fixed
- {\obeyspaces
- \def\obeyedspace{\mathortext\normalspace{\dontleavehmode\fixedspace}}%
- \let\ =\obeyedspace}
-
-\def\activatespacehandler#1%
- {\executeifdefined{\??sr#1}{\activatespacehandler\v!off}}
-
-\def\dobetweenthelines
- {\doifmeaningelse\next\obeyedline\@@rginbetween\donothing}
-
-% het gebruik van \setlocalreference scheelt een hash entry
-
-\def\dodoshowlinenumber % for use elsewhere, to be extended
- {\doplacelinenumber
- \global\advance\linenumber \plusone}
-
-\def\completelinenumber
- {\@@rnleft\convertnumber\@@rnconversion\linenumber\@@rnright}
-
-\def\dosetuplinenumbering[#1]%
- {\getparameters[\??rn][\c!start=1,\c!step=1,#1]%
- \global\linenumber\plusone}
-
-\def\setuplinenumbering
- {\dosingleargument\dosetuplinenumbering}
-
-\def\dostartnummerenLINE
- {\EveryPar{\placelinenumber}} % why not append to everypar ? better
-
-\def\dostopnummerenLINE
- {\the\aftereverylinenumbering
- \egroup}
-
-\def\dostartnummerenVERB
- {\EveryLine{\placelinenumber}}
-
-\def\dostopnummerenVERB
- {\the\aftereverylinenumbering
- \egroup}
-
-\newevery \everylinenumber \relax
-
-\def\dodoplacelinenumber
- {% beware of em's, the font is already switched !
- \setbox\scratchbox\hbox
- {\setbox0\hbox{\@@rncommand{\completelinenumber}}\vsmashbox0%
- \ifcase\linenumberlocation
- \iftypesettinglines % hack
- \expandafter\llap
- \else
- \expandafter\rlap
- \fi{\hbox to \@@rnwidth{\box0\hss}}% was \llap, nog testen !!
- \or
- \inleftmargin
- {\forgetall
- \doifelse\@@rnwidth\v!margin
- {\hsize\leftmarginwidth}{\hsize\@@rnwidth}%
- \alignedline\@@rnalign\v!right{\box0\hskip\@@rndistance}}%
- \else
- \inrightmargin
- {\forgetall
- \doifelse\@@rnwidth\v!margin
- {\hsize\rightmarginwidth}{\hsize\@@rnwidth}%
- \alignedline\@@rnalign\v!left{\hskip\@@rndistance\box0}}%
- \fi}%
- \vsmashbox\scratchbox
- \box\scratchbox
- \the\everylinenumber}
-
-\def\complexstartlinenumbering[#1]%
- {\doifnot{#1}\v!continue
- {\doifnumberelse{#1}
- {\global\linenumber#1\relax}
- {\doifelsenothing\@@rnstart
- {\global\linenumber\plusone}
- {\global\linenumber\@@rnstart}}}%
- \chardef\linenumberlocation\zerocount
- \processaction
- [\@@rnlocation]
- [ \v!inmargin=>\chardef\linenumberlocation\plusone,
- \v!inleft=>\chardef\linenumberlocation\plusone,
- \v!inright=>\chardef\linenumberlocation\plustwo,
- \v!margin=>\chardef\linenumberlocation\plusone]%
- % \v!text=>\chardef\linenumberlocation\zerocount,
- %\s!unknown=>\chardef\linenumberlocation\zerocount,
- %\s!default=>\chardef\linenumberlocation\zerocount]%
- \doifnot\@@rnwidth\v!margin
- {\freezedimenmacro\@@rnwidth
- \ifcase\linenumberlocation % text
- \advance\leftskip\@@rnwidth\relax
- \fi}%
- \freezedimenmacro\@@rndistance
- \chardef\@@rn@@rnmethod
- \ifprocessingverbatim\zerocount\else\iftypesettinglines\plusone\else\plustwo\fi\fi
- \processaction
- [\@@rnmethod]
- [ \v!type=>\chardef\@@rn@@rnmethod\zerocount,
- \v!line=>\chardef\@@rn@@rnmethod\plusone,
- \v!text=>\chardef\@@rn@@rnmethod\plustwo,
- \v!file=>\chardef\@@rn@@rnmethod\plusthree]%
- \ifcase\@@rn@@rnmethod % verbatim, line by line
- \typesettinglinestrue
- \let\dostartnummeren\dostartnummerenVERB
- \let\stoplinenumbering\dostopnummerenVERB
- \def\placelinenumber
- {\doplacelinenumber
- \global\advance\linenumber \plusone}%
- \or % text, line by line
- \let\dostartnummeren\dostartnummerenLINE
- \let\stoplinenumbering\dostopnummerenLINE
- \def\placelinenumber
- {\doplacelinenumber
- \global\advance\linenumber \plusone}%
- \or % text, whole lot
- \let\dostartnummeren\dostartnummerenPAR
- \let\stoplinenumbering\dostopnummerenPAR
- \def\placelinenumber
- {\global\advance\linenumber \minusone
- \doplacelinenumber}%
- \or % verbatim, selective line by line
- \typesettinglinestrue
- \let\dostartnummeren\dostartnummerenVERB
- \let\stoplinenumbering\dostopnummerenVERB
- \def\placelinenumber
- {\global\linenumber\verbatimlinenumber
- \global\advance\linenumber \minusone
- \doplacelinenumber}%
- \fi
- \dostartnummeren}
-
-\def\startlinenumbering
- {\bgroup
- \the\beforeeverylinenumbering
- \numberinglinestrue
- \complexorsimpleempty\startlinenumbering}
-
-\def\donoplacelinenumber
- {\the\everylinenumber}
-
-\def\doplacelinenumber
- {\ifnum\linenumber<\@@rnstart\relax
- \donoplacelinenumber
- \else
- \ifnum\numexpr(\linenumber/\@@rnstep)*\@@rnstep\relax=\linenumber
- \doattributes\??rn\c!style\c!color\dodoplacelinenumber
- \else
- \donoplacelinenumber
- \fi
- \fi}
-
-\def\someline[#1]%
- {\dolinereference0[#1]\ignorespaces}
-
-\def\startline[#1]%
- {\dolinereference1[#1]\ignorespaces}
-
-\def\stopline[#1]%
- {\removelastspace\dolinereference2[#1]}
-
-\def\inline#1[#2]%
- {\doifelsenothing{#1}
- {\doifinstringelse{--}\currenttextreference
- {\in{\leftlabeltext\v!lines}{\rightlabeltext\v!lines}[\@@rnprefix#2]}
- {\in{\leftlabeltext\v!line }{\rightlabeltext\v!line }[\@@rnprefix#2]}}
- {\in{#1}[\@@rnprefix#2]}}
-
-\def\dostartnummerenPAR
- {\beginofshapebox
- \doglobal\newcounter\linereference}
-
-% localcrossref heroverwegen
-
-\def\setlinereference#1#2#3#4%
- {\setxvalue{lrf:#1}{\noexpand\dogetlinereference{#2}{#3}{#4}}}
-
-\def\getlinereference#1%
- {\getvalue{lrf:#1}}
-
-\def\dogetlinereference#1#2#3%
- {\edef\linereferencename{#1}%
- \edef\linereferenceline{#2}%
- \edef\linereferenceplus{#3}}
-
-% 1 xxx xxx xxx xxx xxx xxx xxx
-% 2 xxx yyy yyy yyy yyy yyy yyy <= start y
-% 3 yyy yyy yyy yyy yyy yyy yyy
-% 4 yyy yyy yyy yyy yyy xxx xxx <= stop y
-% 5 xxx xxx xxx xxx xxx xxx xxx
-
-\def\dolinereference#1[#2]%
- {\bgroup
- \dimen0=\strutdp
- \doif\@@rnreferencing\v!on
- {\doglobal\increment\linereference
- % start 1=>(n=y,l=0,p=1)
- % stop 2=>(n=y,l=0,p=2)
- \setlinereference{\linereference}{\@@rnprefix#2}{0}{#1}%
- \advance\dimen0 \linereference sp}%
- \prewordbreak
- \vrule \!!width \zeropoint \!!depth \dimen0 \!!height \zeropoint
- \prewordbreak
- \egroup}
-
-\def\dostopnummerenPAR % dp's -> openstrutdepth
- {\endofshapebox
- \checkreferences
- \linestepper\zerocount
- \reshapebox{\global\advance\linestepper \plusone}%
- \global\advance\linenumber \linestepper
- \doifelse\@@rnreferencing\v!on
- {\reshapebox % We are going back!
- {\global\advance\linenumber \minusone
- \dimen0=\dp\shapebox
- \advance\dimen0 -\strutdp\relax
- \ifdim\dimen0>\zeropoint
- % 1=>4 | 2=>4 1=>2
- % start 1=>(n=y,l=2,p=1)
- % stop 2=>(n=y,l=4,p=2)
- \dostepwiserecurse\plusone{\number\dimen0}\plusone
- {\getlinereference\recurselevel
- \setlinereference\recurselevel
- {\linereferencename}{\the\linenumber}{\linereferenceplus}}%
- \fi}%
- \global\advance\linenumber \linestepper
- \ifnum\linereference>\zerocount % anders vreemde loop in paragraphs+recurse
- \dorecurse\linereference
- {\getlinereference\recurselevel
- \ifnum\linereferenceplus=2 % stop
- % ref y: text = 4 / Kan dit buiten referentie mechanisme om?
- \expanded{\setlocalcrossreference
- {\referenceprefix\linereferencename}{}{}{\linereferenceline}}%
- \fi}%
- \dorecurse\linereference
- {\getlinereference\recurselevel
- \ifnum\linereferenceplus<2 % start / lone
- \ifnum\linereferenceplus=1 % start
- \getreferenceelements\linereferencename % text = 4
- \ifnum\linereferenceline<0\currenttextreference\relax % 0 prevents error
- \edef\linereferenceline{\linereferenceline--\currenttextreference}%
- \fi
- \fi
- \expanded{\setlocalcrossreference
- {\referenceprefix\linereferencename}{}{}{\linereferenceline}}%
- \fi}%
- \global\let\scratchline\linenumber % We are going back!
- \reshapebox
- {\doglobal\decrement\scratchline
- \hbox
- {\dorecurse\linereference
- {\getlinereference\recurselevel
- \getreferenceelements\linereferencename
- \beforesplitstring\currenttextreference--\at--\to\firstline
- \ifnum\firstline=\scratchline\relax
- % beter een rawtextreference, i.e. expanded
- % \textreference[\linereferencename]{\currenttextreference}%
- \rawtextreference\s!lin\linereferencename\currenttextreference
- \expanded{\setlocalcrossreference
- {\referenceprefix\linereferencename}{}{}{0}}% ==done
- \fi}%
- \dimen0\dp\shapebox
- \advance\dimen0 -\strutdp\relax
- \ifdim\dimen0>\zeropoint
- \dp\shapebox\strutdp
- \fi
- \placelinenumber\box\shapebox}}% no \strut !
- \else
- \reshapebox{\hbox{\placelinenumber\box\shapebox}}% no \strut !
- \fi}
- {\reshapebox{\global\advance\linenumber \minusone}%
- \global\advance\linenumber \linestepper
- \reshapebox{\hbox{\placelinenumber\box\shapebox}}}% no \strut !
- \global\advance\linenumber \linestepper
- \flushshapebox
- \the\aftereverylinenumbering
- \egroup}
-
-\def\emptylines
- {\dosingleempty\doemptylines}
-
-\def\doemptylines[#1]%
- {\endgraf\dorecurse{\iffirstargument#1\else3\fi}\crlf}
-
-\newcount\internalparagraphnumber
-
-\def\setupparagraphnumbering
- {\dosingleempty\dosetupparagraphnumbering}
-
-\def\dosetupparagraphnumbering[#1]%
- {\getparameters
- [\??ph][#1]%
- \processaction
- [\@@phstate]
- [\v!start=>\let\showparagraphnumber\doshowparagraphnumberA,
- \v!stop=>\let\showparagraphnumber\relax,
- \v!line=>\let\showparagraphnumber\doshowparagraphnumberB,
- \v!reset=>\global\internalparagraphnumber\zerocount
- \let\showparagraphnumber\doshowparagraphnumberA]}
-
-\def\dodoshowparagraphnumber
- {\global\advance\internalparagraphnumber \plusone
- \inleftmargin % \tf normalizes em
- {\tf{\doattributes\??ph\c!style\c!color{\the\internalparagraphnumber}}%
- \kern\@@phdistance}}
-
-\def\doshowparagraphnumberA
- {\ifprocessingverbatim
- \iflinepar\dodoshowparagraphnumber\fi
- \else
- \dodoshowparagraphnumber
- \fi}
-
-\def\doshowparagraphnumberB
- {\ifnumberinglines
- \doshowparagraphnumberA
- \fi}
-
-\setuplinenumbering
- [\c!method=,
- \c!conversion=\v!numbers,
- \c!start=1,
- \c!step=1,
- \c!location=\v!margin,
- \c!style=,
- \c!color=,
- \c!width=2em,
- \c!prefix=,
- \c!referencing=\v!on]
-
-% new
-
-\setuplinenumbering
- [\c!width=\ifcase\linenumberlocation2em\else\v!margin\fi,
- \c!left=,
- \c!right=,
- \c!command=,
- \c!distance=\zeropoint,
- \c!align=\ifcase\linenumberlocation\v!right\or\v!right\or\v!left\fi]
-
-\setupparagraphnumbering
- [\c!state=\v!stop,
- \c!style=,
- \c!color=,
- \c!distance=\ifcase\linenumberlocation2em\else\!!zeropoint\fi]
-
-\setuplines
- [\c!before=\blank,
- \c!after=\blank,
- \c!inbetween=\blank,
- \c!indenting=\v!no,
- \c!space=\v!default]
-
-\protect \endinput
diff --git a/tex/context/base/page-mul.tex b/tex/context/base/page-mul.tex
index 8efa1af18..5cd66a420 100644
--- a/tex/context/base/page-mul.tex
+++ b/tex/context/base/page-mul.tex
@@ -580,7 +580,6 @@
\edef\maxcolumndepth{\the\dp\currentcolumnbox}%
\fi}}
-
\chardef\multicolumntopflushmethod\plusone % 0: no correction, 1: correction when topstuff, 2: correction, 3: correction++
\chardef\multicolumntopalignmethod\plustwo % 0: nothing, 1: force grid, 2: follow grid
@@ -603,6 +602,7 @@
\setfalse\someprecolumncontent % will be set elsewhere
\else
\settrue\someprecolumncontent
+\mkprocessboxcontents\precolumnbox
\fi
\forgetall
\setmulticolumnsout
diff --git a/tex/context/base/page-par.tex b/tex/context/base/page-par.tex
new file mode 100644
index 000000000..fa1723d37
--- /dev/null
+++ b/tex/context/base/page-par.tex
@@ -0,0 +1,58 @@
+%D \module
+%D [ file=page-par, % copied from page-lin
+%D version=1997.03.31,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA / Hans Hagen \& Ton Otten}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{Context Core Macros / Paragraph Numbering}
+
+\unprotect
+
+\newcount\internalparagraphnumber
+
+\def\setupparagraphnumbering
+ {\dosingleempty\dosetupparagraphnumbering}
+
+\def\dosetupparagraphnumbering[#1]%
+ {\getparameters
+ [\??ph][#1]%
+ \processaction
+ [\@@phstate]
+ [\v!start=>\let\showparagraphnumber\doshowparagraphnumberA,
+ \v!stop=>\let\showparagraphnumber\relax,
+ \v!line=>\let\showparagraphnumber\doshowparagraphnumberB,
+ \v!reset=>\global\internalparagraphnumber\zerocount
+ \let\showparagraphnumber\doshowparagraphnumberA]}
+
+\def\dodoshowparagraphnumber
+ {\global\advance\internalparagraphnumber \plusone
+ \inleftmargin % \tf normalizes em
+ {\tf{\doattributes\??ph\c!style\c!color{\the\internalparagraphnumber}}%
+ \kern\@@phdistance}}
+
+\def\doshowparagraphnumberA
+ {\ifprocessingverbatim
+ \iflinepar\dodoshowparagraphnumber\fi
+ \else
+ \dodoshowparagraphnumber
+ \fi}
+
+\def\doshowparagraphnumberB
+ {\ifnumberinglines
+ \doshowparagraphnumberA
+ \fi}
+
+\setupparagraphnumbering
+ [\c!state=\v!stop,
+ \c!style=,
+ \c!color=,
+ \c!distance=\ifcase\linenumberlocation2em\else\!!zeropoint\fi] % will change
+
+\protect \endinput
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index 07dd54c1f..a12fd24a2 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -57,18 +57,21 @@ function regimes.load(regime)
end
function regimes.translate(line,regime)
- if regime and line and regimes.utf[regime] then
- return line:gsub("(.)", regimes.utf[regime])
- else
- return line
+ if regime and line then
+ local rur = regimes.utf[regime]
+ if rur then
+ return line:gsub("(.)", rur) -- () redundant
+ end
end
+ return line
end
function regimes.enable(regime)
if regimes.data[regime] then
regimes.currentregime = regime
+ local translate = regimes.translate
input.filters.dynamic_translator = function(s)
- return regimes.translate(s,regimes.currentregime)
+ return translate(s,regime)
end
else
regimes.disable()
diff --git a/tex/context/base/regi-ini.mkii b/tex/context/base/regi-ini.mkii
index e7c2a6792..a5b2cf177 100644
--- a/tex/context/base/regi-ini.mkii
+++ b/tex/context/base/regi-ini.mkii
@@ -45,7 +45,14 @@
\let\mkwalkregime \gobbleoneargument
\let\mkautosetregime\gobbletwoarguments
- \def\mkenableregime#1{\XeTeXinputencoding "#1"\relax}
+ % \def\mkenableregime#1%
+ % {\XeTeXinputencoding "#1"\relax}
+
+ \def\mkenableregime#1%
+ {\doifelse{#1}{utf}%
+ {\writestatus\m!regime{mapping utf to utf-8}%
+ \XeTeXinputencoding{utf-8}}
+ {\XeTeXinputencoding{#1}}}
\endXETEX
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 614895f2d..101d9bcdb 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -42,6 +42,7 @@
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
+\logo [CCODE] {c}
\logo [CALCMATH] {CalcMath}
\logo [CD] {cd}
\logo [CDROM] {cdrom}
@@ -144,10 +145,12 @@
\logo [METATEX] {Meta\TeX}
\logo [MIKTEX] {Mik\TeX}
\logo [MLTEX] {ml\TeX}
+\logo [METATYPE] {MetaType1}
\logo [MODULA] {Modula}
\logo [MOV] {mov}
\logo [MPS] {mps}
\logo [MPTOPDF] {mptopdf}
+\logo [MPLIB] {mplib}
\logo [MSDOS] {msdos}
\logo [MSWINDOWS] {MS~Windows}
\logo [MTXRUN] {mtxrun}
@@ -216,6 +219,7 @@
\logo [TEXNL] {tex-nl}
\logo [TEXSHOW] {\TeX show}
\logo [TEXSPELL] {\TeX spell}
+\logo [TEXGYRE] {\TeX\ Gyre}
\logo [TEXSYNC] {texsync}
\logo [TEXTMATE] {TextMate}
\logo [TEXTOOLS] {\TeX tools}
diff --git a/tex/context/base/sort-ini.mkii b/tex/context/base/sort-ini.mkii
index f9e813d5c..6c904e8cc 100644
--- a/tex/context/base/sort-ini.mkii
+++ b/tex/context/base/sort-ini.mkii
@@ -124,6 +124,10 @@
\readsysfile{\f!sortprefix lan}\donothing\donothing
\egroup}}}
+\prependtoks
+ \savesortdefinitions
+\to \everysavesortkeys
+
% \defineregister[one]
% \defineregister[two] \setupregister[two][language=cz]
%
diff --git a/tex/context/base/spec-tpd.tex b/tex/context/base/spec-tpd.tex
index 068ac8dfe..d2a1d73a7 100644
--- a/tex/context/base/spec-tpd.tex
+++ b/tex/context/base/spec-tpd.tex
@@ -539,7 +539,7 @@
\definespecial\dostartnonecolormode{\doPDFstartnonecolormode}
\definespecial\doregisternonecolor {\doPDFregisternonecolor}
-\def\doPDFregisterspotcolorname#1#2%
+\def\doPDFregisterspotcolorname#1#2% no need for escape in luatex
{\bgroup
\let\ascii\empty
\def\docommand##1%
diff --git a/tex/context/base/supp-pdf.tex b/tex/context/base/supp-pdf.tex
index 63dfb1f69..61f7b32e2 100644
--- a/tex/context/base/supp-pdf.tex
+++ b/tex/context/base/supp-pdf.tex
@@ -602,86 +602,8 @@
\expandafter\dohandleMPsequenceC
\fi#1}
-%\def\dohandleMPsequenceA#1 %
-% {\setMPargument{#1}%
-% \handleMPsequence}
-
\let\dohandleMPsequenceA\setMPsequence
-% \def\dohandleMPsequenceB#1 %
-% {\edef\somestring{#1}%
-% \ifx\somestring\PSmoveto
-% \edef\lastMPmoveX{\gMPa1}%
-% \edef\lastMPmoveY{\gMPa2}%
-% \PDFcode{\!MPgMPa1 \!MPgMPa2 m}%
-% \resetMPstack
-% \else\ifx\somestring\PSnewpath
-% \let\handleMPsequence\handleMPpath
-% \else\ifx\somestring\PSgsave
-% \PDFcode{q}%
-% \resetMPstack
-% \else\ifx\somestring\PSgrestore
-% \PDFcode{Q}%
-% \resetMPstack
-% \else\ifx\somestring\PSdtransform % == setlinewidth
-% \let\handleMPsequence\handleMPdtransform
-% % after that we will encounter more tokens until setlinewidth+pop
-% % or pop+setlinewidth which we catch next; we explicitly need to
-% % reset the stack since [] n setdash may follow; a more clever
-% % approach would be to read on till the condition is met, but it's
-% % the only pop / setlinewidth we will encounter so ...
-% \else\ifx\somestring\PSsetlinewidth
-% % already handled in dtransform
-% \resetMPstack
-% \else\ifx\somestring\PSpop
-% % already handled in dtransform
-% \resetMPstack
-% \else\ifx\somestring\PSconcat
-% \cleanupMPconcat
-% \PDFcode{\gMPa1 \gMPa2 \gMPa3 \gMPa4 \gMPa5 \gMPa6 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetrgbcolor
-% \handleMPrgbcolor
-% \resetMPstack
-% \else\ifx\somestring\PSsetcmykcolor
-% \handleMPcmykcolor
-% \resetMPstack
-% \else\ifx\somestring\PSsetgray
-% \handleMPgraycolor
-% \resetMPstack
-% \else\ifx\somestring\PStranslate
-% \PDFcode{1 0 0 1 \gMPa1 \gMPa2 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetdash
-% \handleMPsetdash
-% \resetMPstack
-% \else\ifx\somestring\PSsetlinejoin
-% \PDFcode{\gMPa1 j}%
-% \resetMPstack
-% \else\ifx\somestring\PSsetmiterlimit
-% \PDFcode{\gMPa1 M}%
-% \resetMPstack
-% \else\ifx\somestring\PSfshow
-% \PDFcode{n}%
-% \handleMPfshow
-% \resetMPstack
-% \else\ifx\somestring\PSsetlinecap
-% \PDFcode{\gMPa1 J}%
-% \resetMPstack
-% \else\ifx\somestring\PSrlineto
-% \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l S}%
-% \resetMPstack
-% \else\ifx\somestring\PSscale
-% \PDFcode{\gMPa1 0 0 \gMPa2 0 0 cm}%
-% \resetMPstack
-% \else\ifx\somestring\PSspecial
-% \handleMPspecialcommand
-% \resetMPstack
-% \else
-% \handleMPgraphic% {#1}%
-% \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
-% \handleMPsequence}
-
\def\installMPSkeywordN#1#2%
{\expandafter\def\csname\@@MP:N:#1\endcsname{#2}}
@@ -969,11 +891,48 @@
%D finally I saw the light. It proved that we also had to
%D take care of \type{(split arguments)}.
+% \def\setMPfshowfont#1#2%
+% {\font\temp=#1\space at #2\relax\temp}
+
+% \startMPcode
+% draw btex Ga toch effe f\kern0ptietsen?{}` etex ;
+% \stopMPcode
+
+\newtoks \everyMPshowfont
+
\def\setMPfshowfont#1#2%
- {\font\temp=#1\space at #2\relax\temp}
+ {\font\temp=#1\space at #2\relax\temp
+ \the\everyMPshowfont}
\let\MPfshowcommand\empty
+%D The next hackery handles characters one by one. We only support this
+%D for the latest greatest \METAPOST\ binaries, the ones that escape
+%D problematic chars.
+
+\def\doflushMPtext#1%
+ {\edef\!!stringa{#1}%
+ \@EA\dodoflushMPtext\!!stringa\relax}
+
+\def\dodoflushMPtext
+ {\afterassignment\dododoflushMPtext\let\nexttoken=}
+
+\def\dododoflushMPtext
+ {\ifx\nexttoken\relax
+ % done
+ \else\ifx\nexttoken\char
+ \@EA\@EA\@EA\dodododoflushMPtext
+ \else
+ {\nexttoken}%
+ \@EA\@EA\@EA\dodoflushMPtext
+ \fi\fi}
+
+\def\dodododoflushMPtext
+ {\afterassignment\dododododoflushMPtext\scratchcounter}
+
+\def\dododododoflushMPtext
+ {{\char\scratchcounter}\let\next\dodoflushMPtext}
+
\def\dohandleMPfshow
{\bgroup
\setbox\scratchbox\hbox
@@ -1002,7 +961,7 @@
\MPfshowcommand
{\ifnum\nofMParguments=1
\def\do(##1){##1}%
- \dogMPa1%
+ \doflushMPtext{\dogMPa1}% only latest mp gets this treatment
\else
% we need to catch ( a ) (a a a) (\123 \123 \123) etc
\scratchcounter1
@@ -1162,7 +1121,7 @@
\or
\PDFcode{\!MPgMPs1 \!MPgMPs2 \!MPgMPs3 \!MPgMPs4 \!MPgMPs5 \!MPgMPs6 c}%
\or
- \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l S}%
+ \PDFcode{\!MP\lastMPmoveX\space\!MP\lastMPmoveY\space l}%
\or
\edef\lastMPmoveX{\gMPs1}% evt \!MP here
\edef\lastMPmoveY{\gMPs2}%
@@ -2086,7 +2045,7 @@
{\ifcase\finiMPpath
\chardef\finiMPpath2
\let\handleMPsequence\processMPpath
- \fi}
+ \fi}
\installMPSkeywordP \PSstroke
{\ifcase\finiMPpath
\chardef\finiMPpath1
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index 9f35d68b6..519808e17 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -16,13 +16,20 @@ the top of 's char range but outside the unicode range.
do
local char, flush, format = unicode.utf8.char, tex.sprint, string.format
+ function converters.hexstringtonumber(n) flush(tonumber(n,16)) end
+ function converters.octstringtonumber(n) flush(tonumber(n, 8)) end
+ function converters.rawcharacter (n) flush(char(0x110000+n)) end
+
function converters.lchexnumber (n) flush(format("%x" ,n)) end
function converters.uchexnumber (n) flush(format("%X" ,n)) end
function converters.lchexnumbers (n) flush(format("%02x",n)) end
function converters.uchexnumbers (n) flush(format("%02X",n)) end
function converters.octnumber (n) flush(format("%03o",n)) end
- function converters.hexstringtonumber(n) flush(tonumber(n,16)) end
- function converters.octstringtonumber(n) flush(tonumber(n, 8)) end
- function converters.rawcharacter (n) flush(char(0x110000+n)) end
+
+ function converters.lchexnumber (n) flush(("%x" ):format(n)) end
+ function converters.uchexnumber (n) flush(("%X" ):format(n)) end
+ function converters.lchexnumbers (n) flush(("%02x"):format(n)) end
+ function converters.uchexnumbers (n) flush(("%02X"):format(n)) end
+ function converters.octnumber (n) flush(("%03o"):format(n)) end
end
diff --git a/tex/context/base/syst-etx.tex b/tex/context/base/syst-etx.tex
index 093c3d17f..5d7ab9a65 100644
--- a/tex/context/base/syst-etx.tex
+++ b/tex/context/base/syst-etx.tex
@@ -209,6 +209,8 @@
\def\newmarks {\myalloc@8\marks \mathchardef\@@maxallocation}
\def\newlanguage{\myalloc@9\language\chardef \@@minallocation}
+\def\topofboxstack{\number\count24 }
+
%D Since in \CONTEXT\ we only have one math family left we
%D redefine \type {\newfam}.
diff --git a/tex/context/base/syst-mtx.tex b/tex/context/base/syst-mtx.tex
index 4e5e2ef26..0abd89e57 100644
--- a/tex/context/base/syst-mtx.tex
+++ b/tex/context/base/syst-mtx.tex
@@ -43,6 +43,8 @@
\def\newfam#1{\chardef#1=15 }
+\def\topofboxstack{\number\count24 }
+
\count18=1
\mathchardef\@@minallocation = 16
diff --git a/tex/context/base/syst-omg.tex b/tex/context/base/syst-omg.tex
index 1bedb195b..0aa409ccb 100644
--- a/tex/context/base/syst-omg.tex
+++ b/tex/context/base/syst-omg.tex
@@ -31,6 +31,8 @@
\def\newfam#1{\chardef#1=15 }
+\def\topofboxstack{\number\count24 }
+
\count18=1
\mathchardef\@@minallocation = 16
diff --git a/tex/context/base/thrd-trg.tex b/tex/context/base/thrd-trg.tex
index 3fce672d5..dda81a8af 100644
--- a/tex/context/base/thrd-trg.tex
+++ b/tex/context/base/thrd-trg.tex
@@ -6,6 +6,12 @@
\unprotect
+% compare: \number 0.5 \number -0.5 \number 1.5 \number -1.5
+%
+% so we need:
+
+\def\realnumber#1{\withoutpt\the\dimexpr#1\s!pt\relax} % brrr
+
\chardef \@iv = 4
\chardef \@xc = 90 % was \nin@ty
\chardef \@clxx = 180
@@ -51,22 +57,22 @@
%D calculations.
\def\calculatesin#1%
- {{\expandafter\ifx\csname sin \number#1\endcsname\relax
+ {{\expandafter\ifx\csname sin \realnumber{#1}\endcsname\relax
\!!dimena#1\onepoint
\tg@@sin
- \expandafter\xdef\csname sin \number#1\endcsname{\withoutpt\the\!!dimena}%
+ \expandafter\xdef\csname sin \realnumber{#1}\endcsname{\withoutpt\the\!!dimena}%
\fi}}
\def\calculatecos#1%
- {{\expandafter\ifx\csname cos \number#1\endcsname\relax
+ {{\expandafter\ifx\csname cos \realnumber{#1}\endcsname\relax
\!!dimena\@xc\onepoint
\advance\!!dimena-#1\onepoint
\tg@@sin
- \expandafter\xdef\csname cos \number#1\endcsname{\withoutpt\the\!!dimena}%
+ \expandafter\xdef\csname cos \realnumber{#1}\endcsname{\withoutpt\the\!!dimena}%
\fi}}
\def\calculatetan#1%
- {{\expandafter\ifx\csname tan \number#1\endcsname\relax
+ {{\expandafter\ifx\csname tan \realnumber{#1}\endcsname\relax
\calculatesin{#1}%
\calculatecos{#1}%
\!!dimena\calculatedcos{#1}\onepoint
@@ -74,40 +80,40 @@
\!!dimenb\calculatedsin{#1}\onepoint
\!!dimenb\@xvi@k\!!dimenb
\divide\!!dimenb\!!dimena
- \expandafter\xdef\csname tan \number#1\endcsname{\withoutpt\the\!!dimenb}%
+ \expandafter\xdef\csname tan \realnumber{#1}\endcsname{\withoutpt\the\!!dimenb}%
\fi}}
%D The results are accessed with:
-\def\calculatedsin#1{\csname sin \number#1\endcsname}
-\def\calculatedcos#1{\csname cos \number#1\endcsname}
-\def\calculatedtan#1{\csname tan \number#1\endcsname}
+\def\calculatedsin#1{\csname sin \realnumber{#1}\endcsname}
+\def\calculatedcos#1{\csname cos \realnumber{#1}\endcsname}
+\def\calculatedtan#1{\csname tan \realnumber{#1}\endcsname}
%D A more save implementation would be:
-\def\calculatedsin#1{\executeifdefined{sin \number#1}\!!zerocount}
-\def\calculatedcos#1{\executeifdefined{cos \number#1}\!!plusone}
-\def\calculatedtan#1{\executeifdefined{tan \number#1}\!!zerocount}
+\def\calculatedsin#1{\executeifdefined{sin \realnumber{#1}}\!!zerocount}
+\def\calculatedcos#1{\executeifdefined{cos \realnumber{#1}}\!!plusone }
+\def\calculatedtan#1{\executeifdefined{tan \realnumber{#1}}\!!zerocount}
%D A few values are predefined, although, on todays systems there
%D is no real reason for that. I've added the 270 ones and changed
%D the -90 tan. Also, I prefer text (\type {\!!..} instead of
%D counters \type {\..}.
-\expandafter\let\csname sin 0\endcsname\!!zerocount
-\expandafter\let\csname cos 0\endcsname\!!plusone
-\expandafter\let\csname sin 90\endcsname\!!plusone
-\expandafter\let\csname cos 90\endcsname\!!zerocount
-\expandafter\let\csname sin 180\endcsname\!!zerocount
-\expandafter\let\csname cos 180\endcsname\!!minusone
-\expandafter\let\csname sin 270\endcsname\!!minusone
-\expandafter\let\csname cos 270\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{ 0}\endcsname\!!zerocount
+\expandafter\let\csname cos \realnumber{ 0}\endcsname\!!plusone
+\expandafter\let\csname sin \realnumber{ 90}\endcsname\!!plusone
+\expandafter\let\csname cos \realnumber{ 90}\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{180}\endcsname\!!zerocount
+\expandafter\let\csname cos \realnumber{180}\endcsname\!!minusone
+\expandafter\let\csname sin \realnumber{270}\endcsname\!!minusone
+\expandafter\let\csname cos \realnumber{270}\endcsname\!!zerocount
-\expandafter\let\csname sin -90\endcsname\!!minusone
-\expandafter\let\csname cos -90\endcsname\!!zerocount
+\expandafter\let\csname sin \realnumber{-90}\endcsname\!!minusone
+\expandafter\let\csname cos \realnumber{-90}\endcsname\!!zerocount
-\expandafter\def\csname tan 90\endcsname{\writestatus\m!systems{infinite tan +90}}
-\expandafter\def\csname tan -90\endcsname{\writestatus\m!systems{infinite tan -90}}
+\expandafter\def\csname tan \realnumber{ 90}\endcsname{\writestatus\m!systems{infinite tan +90}}
+\expandafter\def\csname tan \realnumber{-90}\endcsname{\writestatus\m!systems{infinite tan -90}}
%D Usage: \type {\calculatesin{10}} and \type {\calculatedsin{10}}
diff --git a/tex/context/base/type-enc.tex b/tex/context/base/type-enc.tex
index 799eedbd5..d43f84326 100644
--- a/tex/context/base/type-enc.tex
+++ b/tex/context/base/type-enc.tex
@@ -30,7 +30,6 @@
% fallbacks, no math in latin modern
- \definefontsynonym[lmdunh10][cmdunh10]
\definefontsynonym[lmff10] [cmff10]
\definefontsynonym[lmfi10] [cmfi10]
\definefontsynonym[lmfib8] [cmfib8]
diff --git a/tex/context/base/type-one.tex b/tex/context/base/type-one.tex
index b724466a8..bc0d45027 100644
--- a/tex/context/base/type-one.tex
+++ b/tex/context/base/type-one.tex
@@ -246,6 +246,21 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman5-Regular] [rm-lmr5]
+ \definefontsynonym [LMMathRoman6-Regular] [rm-lmr6]
+ \definefontsynonym [LMMathRoman7-Regular] [rm-lmr7]
+ \definefontsynonym [LMMathRoman8-Regular] [rm-lmr8]
+ \definefontsynonym [LMMathRoman9-Regular] [rm-lmr9]
+ \definefontsynonym [LMMathRoman10-Regular] [rm-lmr10]
+ \definefontsynonym [LMMathRoman12-Regular] [rm-lmr12]
+ \definefontsynonym [LMMathRoman17-Regular] [rm-lmr17]
+ \definefontsynonym [LMMathRoman5-Bold] [rm-lmbx5]
+ \definefontsynonym [LMMathRoman6-Bold] [rm-lmbx6]
+ \definefontsynonym [LMMathRoman7-Bold] [rm-lmbx7]
+ \definefontsynonym [LMMathRoman8-Bold] [rm-lmbx8]
+ \definefontsynonym [LMMathRoman9-Bold] [rm-lmbx9]
+ \definefontsynonym [LMMathRoman10-Bold] [rm-lmbx10]
+ \definefontsynonym [LMMathRoman12-Bold] [rm-lmbx12]
\definefontsynonym [LMMathSymbols5-BoldItalic] [lmbsy5]
\definefontsynonym [LMMathSymbols7-BoldItalic] [lmbsy7]
\definefontsynonym [LMMathSymbols10-BoldItalic][lmbsy10]
diff --git a/tex/context/base/type-otf.tex b/tex/context/base/type-otf.tex
index 7bfd1ee02..3e0f75961 100644
--- a/tex/context/base/type-otf.tex
+++ b/tex/context/base/type-otf.tex
@@ -188,6 +188,21 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman5-Regular] [rm-lmr5]
+ \definefontsynonym [LMMathRoman6-Regular] [rm-lmr6]
+ \definefontsynonym [LMMathRoman7-Regular] [rm-lmr7]
+ \definefontsynonym [LMMathRoman8-Regular] [rm-lmr8]
+ \definefontsynonym [LMMathRoman9-Regular] [rm-lmr9]
+ \definefontsynonym [LMMathRoman10-Regular] [rm-lmr10]
+ \definefontsynonym [LMMathRoman12-Regular] [rm-lmr12]
+ \definefontsynonym [LMMathRoman17-Regular] [rm-lmr17]
+ \definefontsynonym [LMMathRoman5-Bold] [rm-lmbx5]
+ \definefontsynonym [LMMathRoman6-Bold] [rm-lmbx6]
+ \definefontsynonym [LMMathRoman7-Bold] [rm-lmbx7]
+ \definefontsynonym [LMMathRoman8-Bold] [rm-lmbx8]
+ \definefontsynonym [LMMathRoman9-Bold] [rm-lmbx9]
+ \definefontsynonym [LMMathRoman10-Bold] [rm-lmbx10]
+ \definefontsynonym [LMMathRoman12-Bold] [rm-lmbx12]
\definefontsynonym [LMMathSymbols5-BoldItalic] [lmbsy5]
\definefontsynonym [LMMathSymbols7-BoldItalic] [lmbsy7]
\definefontsynonym [LMMathSymbols10-BoldItalic][lmbsy10]
diff --git a/tex/context/base/type-tmf.tex b/tex/context/base/type-tmf.tex
index f4445209c..9783ad736 100644
--- a/tex/context/base/type-tmf.tex
+++ b/tex/context/base/type-tmf.tex
@@ -82,7 +82,7 @@
\stoptypescript
\starttypescript [math] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRoman] [LMRoman-Regular]
+ \definefontsynonym [MathRoman] [LMMathRoman-Regular]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalic] [LMMathItalic-Italic]
\definefontsynonym [MathSymbol] [LMMathSymbols-Italic]
@@ -111,14 +111,14 @@
\stoptypescript
\starttypescript [boldmath] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRoman] [LMRoman-Bold]
+ \definefontsynonym [MathRoman] [LMMathRoman-Bold]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalic] [LMMathItalic-BoldItalic]
\definefontsynonym [MathSymbol] [LMMathSymbols-BoldItalic]
\stoptypescript
\starttypescript [bfmath] [modern,computer-modern,latin-modern] [name]
- \definefontsynonym [MathRomanBold] [LMRoman-Bold]
+ \definefontsynonym [MathRomanBold] [LMMathRoman-Bold]
\definefontsynonym [MathExtension] [LMMathExtension-Regular]
\definefontsynonym [MathItalicBold] [LMMathItalic-BoldItalic]
\definefontsynonym [MathSymbolBold] [LMMathSymbols-BoldItalic]
@@ -250,6 +250,8 @@
\stoptypescript
\starttypescript [math] [modern,latin-modern]
+ \definefontsynonym [LMMathRoman-Regular] [LMMathRoman10-Regular]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
\definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
\definefontsynonym [LMMathSymbols-Italic] [LMMathSymbols10-Italic]
\definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
@@ -257,6 +259,21 @@
\definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
\stoptypescript
+% can boldmath and bfmath be joined?
+\starttypescript [boldmath] [modern,computer-modern,latin-modern] [name]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
+ \definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
+ \definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
+ \definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
+\stoptypescript
+
+\starttypescript [bfmath] [modern,computer-modern,latin-modern] [name]
+ \definefontsynonym [LMMathRoman-Bold] [LMMathRoman10-Bold]
+ \definefontsynonym [LMMathExtension-Regular] [LMMathExtension10-Regular]
+ \definefontsynonym [LMMathItalic-BoldItalic] [LMMathItalic10-BoldItalic]
+ \definefontsynonym [LMMathSymbols-BoldItalic] [LMMathSymbols10-BoldItalic]
+\stoptypescript
+
\starttypescript [serif] [modern,latin-modern,computer-modern]
\definefontsynonym [cmr5] [LMRoman5-Regular]
\definefontsynonym [cmr6] [LMRoman6-Regular]
diff --git a/tex/context/base/type-xtx.tex b/tex/context/base/type-xtx.tex
index be833bb2a..32ff858d1 100644
--- a/tex/context/base/type-xtx.tex
+++ b/tex/context/base/type-xtx.tex
@@ -12,7 +12,7 @@
%C details.
%D Here are some fonts definitions that can get you started with
-%D \XETEX (for more details see Adam's MyWay documents).
+%D \XETEX\ (for more details see Adam's MyWay documents).
%D
%D Most typescripts in this file are mostly independent of the other
%D typescript files. Generally, you can speed things up a lot by
@@ -36,7 +36,7 @@
%D
%D \starttyping
%D \definetypeface[basic][rm][Xserif][Baskerville]
-%D \definetypeface[basic][ss][Xsans] [Optima Regular][default][encoding=uc,rscale=.87]
+%D \definetypeface[basic][ss][Xsans] [Optima Regular][default][features=default,rscale=.87]
%D \definetypeface[basic][tt][Xmono] [Courier] [default]
%D \stoptyping
%D
@@ -108,10 +108,12 @@
%D HH: todo, define feature set switch mapping=tex-tex
-\definefontsynonym[Dummy] [name:\typescripttwo\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyItalic] [name:\typescripttwo/I\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyBold] [name:\typescripttwo/B\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummyBoldItalic][name:\typescripttwo/BI\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Dummy] [name:\typescripttwo] [features=default]
+\definefontsynonym[DummyItalic] [name:\typescripttwo/I] [features=default]
+\definefontsynonym[DummyBold] [name:\typescripttwo/B] [features=default]
+\definefontsynonym[DummyBoldItalic][name:\typescripttwo/BI][features=default]
+
+\definefontsynonym[DummyCaps] [name:\typescripttwo] [features=smallcaps]
\definefontsynonym[Serif] [Dummy]
\definefontsynonym[SerifBold] [DummyBold]
@@ -119,16 +121,18 @@
\definefontsynonym[SerifBoldItalic] [DummyBoldItalic]
\definefontsynonym[SerifSlanted] [DummyItalic]
\definefontsynonym[SerifBoldSlanted][DummyBoldItalic]
-\definefontsynonym[SerifCaps] [Dummy]
+\definefontsynonym[SerifCaps] [DummyCaps]
\stoptypescript
\starttypescript[Xsans][all][name]
-\definefontsynonym[DummySans] [name:\typescripttwo\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansItalic] [name:\typescripttwo/I\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansBold] [name:\typescripttwo/B\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[DummySansBoldItalic][name:\typescripttwo/BI\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[DummySans] [name:\typescripttwo] [features=default]
+\definefontsynonym[DummySansItalic] [name:\typescripttwo/I] [features=default]
+\definefontsynonym[DummySansBold] [name:\typescripttwo/B] [features=default]
+\definefontsynonym[DummySansBoldItalic][name:\typescripttwo/BI][features=default]
+
+\definefontsynonym[DummySansCaps] [name:\typescripttwo] [features=smallcaps]
\definefontsynonym[Sans] [DummySans]
\definefontsynonym[SansBold] [DummySansBold]
@@ -136,16 +140,18 @@
\definefontsynonym[SansBoldItalic] [DummySansBoldItalic]
\definefontsynonym[SansSlanted] [DummySansItalic]
\definefontsynonym[SansBoldSlanted][DummySansBoldItalic]
-\definefontsynonym[SansCaps] [DummySans]
+\definefontsynonym[SansCaps] [DummySansCaps]
\stoptypescript
\starttypescript[Xmono][all][name]
-\definefontsynonym[DummyMono] [name:\typescripttwo] [encoding=uc]
-\definefontsynonym[DummyMonoItalic] [name:\typescripttwo/I] [encoding=uc]
-\definefontsynonym[DummyMonoBold] [name:\typescripttwo/B] [encoding=uc]
-\definefontsynonym[DummyMonoBoldItalic][name:\typescripttwo/BI][encoding=uc]
+\definefontsynonym[DummyMono] [name:\typescripttwo]
+\definefontsynonym[DummyMonoItalic] [name:\typescripttwo/I]
+\definefontsynonym[DummyMonoBold] [name:\typescripttwo/B]
+\definefontsynonym[DummyMonoBoldItalic][name:\typescripttwo/BI]
+
+% TODO: smallcaps without other features
\definefontsynonym[Mono] [DummyMono]
\definefontsynonym[MonoBold] [DummyMonoBold]
@@ -204,10 +210,10 @@
\starttypescript[serif][times][uc]
-\definefontsynonym[Times-Roman] [name:Times Roman\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-Italic] [name:Times Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-Bold] [name:Times Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Times-BoldItalic][name:Times Bold Italic\xetexcolon mapping=tex-text;][encoding=uc]
+\definefontsynonym[Times-Roman] [name:Times Roman] [features=default]
+\definefontsynonym[Times-Italic] [name:Times Italic] [features=default]
+\definefontsynonym[Times-Bold] [name:Times Bold] [features=default]
+\definefontsynonym[Times-BoldItalic][name:Times Bold Italic][features=default]
\stoptypescript
@@ -215,14 +221,14 @@
\starttypescript[serif][palatino][uc]
-\definefontsynonym[Palatino] [name:Book Antiqua\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-Italic] [name:Book Antiqua Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-Bold] [name:Book Antiqua Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Palatino-BoldItalic] [name:Book Antiqua Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Palatino] [name:Book Antiqua] [features=default]
+\definefontsynonym[Palatino-Italic] [name:Book Antiqua Italic] [features=default]
+\definefontsynonym[Palatino-Bold] [name:Book Antiqua Bold] [features=default]
+\definefontsynonym[Palatino-BoldItalic] [name:Book Antiqua Bold Italic][features=default]
-\definefontsynonym[Palatino-Slanted] [Palatino-Italic]
-\definefontsynonym[Palatino-BoldSlanted][Palatino-BoldItalic]
-\definefontsynonym[Palatino-Caps] [Palatino]
+\definefontsynonym[Palatino-Slanted] [Palatino-Italic]
+\definefontsynonym[Palatino-BoldSlanted] [Palatino-BoldItalic]
+\definefontsynonym[Palatino-Caps] [Palatino]
\stoptypescript
@@ -231,10 +237,10 @@
\starttypescript[sans][helvetica][uc]
-\definefontsynonym[Helvetica] [name:Helvetica Neue\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-Oblique] [name:Helvetica Neue Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-Bold] [name:Helvetica Neue Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[Helvetica-BoldOblique][name:Helvetica Neue Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Helvetica] [name:Helvetica Neue] [features=default]
+\definefontsynonym[Helvetica-Oblique] [name:Helvetica Neue Italic] [features=default]
+\definefontsynonym[Helvetica-Bold] [name:Helvetica Neue Bold] [features=default]
+\definefontsynonym[Helvetica-BoldOblique][name:Helvetica Neue Bold Italic][features=default]
\stoptypescript
@@ -244,9 +250,9 @@
\starttypescript[mono][courier][uc]
-\definefontsynonym[Courier] [name:Courier\xetexcolon mapping=tex-text] [encoding=uc]
+\definefontsynonym[Courier] [name:Courier]
\definefontsynonym[Courier-Oblique] [Courier]
-\definefontsynonym[Courier-Bold] [name:Courier Bold\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Courier-Bold] [name:Courier Bold]
\definefontsynonym[Courier-BoldOblique][Courier-Bold]
\stoptypescript
@@ -284,8 +290,8 @@
\starttypescript[sans][lucidagrande][uc]
-\definefontsynonym[LucidaGrande] [name:Lucida Grande\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[LucidaGrandeBold][name:Lucida Grande Bold\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[LucidaGrande] [name:Lucida Grande] [features=default]
+\definefontsynonym[LucidaGrandeBold][name:Lucida Grande Bold][features=default]
\stoptypescript
@@ -302,11 +308,11 @@
\stoptypescript
\starttypescript[sans][optima][uc]
-\definefontsynonym[Optima] [name:Optima Regular\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaItalic] [name:Optima Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaBold] [name:Optima Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[OptimaBoldItalic][name:Optima Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
-\definefontsynonym[OptimaBlack] [name:Optima ExtraBlack\xetexcolon mapping=tex-text] [encoding=uc]
+\definefontsynonym[Optima] [name:Optima Regular] [features=default]
+\definefontsynonym[OptimaItalic] [name:Optima Italic] [features=default]
+\definefontsynonym[OptimaBold] [name:Optima Bold] [features=default]
+\definefontsynonym[OptimaBoldItalic][name:Optima Bold Italic][features=default]
+\definefontsynonym[OptimaBlack] [name:Optima ExtraBlack] [features=default]
\stoptypescript
\starttypescript[sans][optima][name]
@@ -323,12 +329,12 @@
\starttypescript[sans][gillsans,gillsanslt][uc]
-\definefontsynonym[GillSans] [name:Gill Sans\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansItalic] [name:Gill Sans Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansBold] [name:Gill Sans Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansBoldItalic] [name:Gill Sans Bold Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansLight] [name:Gill Sans Light\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GillSansLightItalic][name:Gill Sans Light Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[GillSans] [name:Gill Sans] [features=default]
+\definefontsynonym[GillSansItalic] [name:Gill Sans Italic] [features=default]
+\definefontsynonym[GillSansBold] [name:Gill Sans Bold] [features=default]
+\definefontsynonym[GillSansBoldItalic] [name:Gill Sans Bold Italic] [features=default]
+\definefontsynonym[GillSansLight] [name:Gill Sans Light] [features=default]
+\definefontsynonym[GillSansLightItalic][name:Gill Sans Light Italic][features=default]
\stoptypescript
@@ -430,10 +436,10 @@
\starttypescript[serif][timesnewroman][uc]
-\definefontsynonym[MSTimes] [name:Times New Roman\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesItalic] [name:Times New Roman Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesBold] [name:Times New Roman Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[MSTimesBoldItalic][name:Times New Roman Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[MSTimes] [name:Times New Roman] [features=default]
+\definefontsynonym[MSTimesItalic] [name:Times New Roman Italic] [features=default]
+\definefontsynonym[MSTimesBold] [name:Times New Roman Bold] [features=default]
+\definefontsynonym[MSTimesBoldItalic][name:Times New Roman Bold Italic][features=default]
\stoptypescript
@@ -451,10 +457,10 @@
\starttypescript[sans][arial][uc]
-\definefontsynonym[Arial] [name:Arial\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialItalic] [name:Arial Italic\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialBold] [name:Arial Bold\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[ArialBoldItalic][name:Arial Bold Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Arial] [name:Arial] [features=default]
+\definefontsynonym[ArialItalic] [name:Arial Italic] [features=default]
+\definefontsynonym[ArialBold] [name:Arial Bold] [features=default]
+\definefontsynonym[ArialBoldItalic][name:Arial Bold Italic][features=default]
\stoptypescript
@@ -476,10 +482,10 @@
\starttypescript [serif] [lucida] [uc]
- \definefontsynonym [LucidaBright] [name:Lucida Bright\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaBright-Demi] [name:Lucida Bright Demibold\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaBright-DemiItalic] [name:Lucida Bright Demibold\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaBright-Italic] [name:Lucida Bright\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym [LucidaBright] [name:Lucida Bright] [features=default]
+ \definefontsynonym [LucidaBright-Demi] [name:Lucida Bright Demibold][features=default]
+ \definefontsynonym [LucidaBright-DemiItalic] [name:Lucida Bright Demibold][features=default]
+ \definefontsynonym [LucidaBright-Italic] [name:Lucida Bright] [features=default]
\definefontsynonym [LucidaBrightSmallcaps] [LucidaBright]
\definefontsynonym [LucidaBrightSmallcaps-Demi][LucidaBright-Demi]
@@ -488,10 +494,10 @@
\stoptypescript
\starttypescript [sans] [lucida] [uc]
- \definefontsynonym [LucidaSans] [name:Lucida Sans Regular\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaSans-Demi] [name:Lucida Sans Demibold Roman\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym [LucidaSans-DemiItalic][name:Lucida Sans Demibold Italic\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym [LucidaSans-Italic] [name:Lucida Sans Italic\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym [LucidaSans] [name:Lucida Sans Regular] [features=default]
+ \definefontsynonym [LucidaSans-Demi] [name:Lucida Sans Demibold Roman] [features=default]
+ \definefontsynonym [LucidaSans-DemiItalic][name:Lucida Sans Demibold Italic][features=default]
+ \definefontsynonym [LucidaSans-Italic] [name:Lucida Sans Italic] [features=default]
\definefontsynonym [LucidaSans-Bold] [LucidaSans-Demi]
\definefontsynonym [LucidaSans-BoldItalic][LucidaSans-DemiItalic]
@@ -509,7 +515,7 @@
\starttypescript [calligraphy] [lucida] [uc]
- \definefontsynonym[LucidaCalligraphy-Italic][name:Lucida Calligraphy Italic\xetexcolon mapping=tex-text][encoding=uc]
+ \definefontsynonym[LucidaCalligraphy-Italic][name:Lucida Calligraphy Italic][features=default]
\stoptypescript
@@ -517,16 +523,16 @@
\starttypescript[handwriting][lucida][uc]
- \definefontsynonym[LucidaHandwriting-Italic][name:Lucida Handwriting Italic\xetexcolon mapping=tex-text][encoding=uc]
+ \definefontsynonym[LucidaHandwriting-Italic][name:Lucida Handwriting Italic][features=default]
\stoptypescript
\starttypescript[fax][lucida][uc]
- \definefontsynonym[LucidaFax] [name:Lucida Fax Regular\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym[LucidaFax-Demi] [name:Lucida Fax Demibold\xetexcolon mapping=tex-text] [encoding=uc]
- \definefontsynonym[LucidaFax-DemiItalic][name:Lucida Fax Demibold Italic\xetexcolon mapping=tex-text][encoding=uc]
- \definefontsynonym[LucidaFax-Italic] [name:Lucida Fax Italic\xetexcolon mapping=tex-text] [encoding=uc]
+ \definefontsynonym[LucidaFax] [name:Lucida Fax Regular] [features=default]
+ \definefontsynonym[LucidaFax-Demi] [name:Lucida Fax Demibold] [features=default]
+ \definefontsynonym[LucidaFax-DemiItalic][name:Lucida Fax Demibold Italic][features=default]
+ \definefontsynonym[LucidaFax-Italic] [name:Lucida Fax Italic] [features=default]
\stoptypescript
@@ -536,8 +542,8 @@
\starttypescript[serif][gentium][uc]
-\definefontsynonym[Gentium] [name:Gentium\xetexcolon mapping=tex-text] [encoding=uc]
-\definefontsynonym[GentiumItalic][name:Gentium Italic\xetexcolon mapping=tex-text][encoding=uc]
+\definefontsynonym[Gentium] [name:Gentium] [features=default]
+\definefontsynonym[GentiumItalic][name:Gentium Italic][features=default]
\stoptypescript
diff --git a/tex/context/base/unic-ini.tex b/tex/context/base/unic-ini.tex
index f0f219182..1b2af197b 100644
--- a/tex/context/base/unic-ini.tex
+++ b/tex/context/base/unic-ini.tex
@@ -233,16 +233,13 @@
% beware, this may change: #1 rawchar (=> `#1 and such, saves tokens)
\def\utftwouniglph#1#2%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@a*(#1-\utf@d)+%
- `#2-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@a*(#1-\utf@d)+`#2-\utf@g\relax}}
\def\utfthreeuniglph#1#2#3%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@b*(#1-\utf@e)+%
- \utf@a*(`#2-\utf@g)+`#3-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@b*(#1-\utf@e)+\utf@a*(`#2-\utf@g)+`#3-\utf@g\relax}}
\def\utffouruniglph#1#2#3#4%
- {\@EA\unicodechar\@EA{\the\numexpr\utf@c*(#1-\utf@f)+%
- \utf@b*(`#2-\utf@g)+\utf@a*(`#3-\utf@g)+`#4-\utf@g\relax}}
+ {\@EA\unicodechar\@EA{\the\numexpr\utf@c*(#1-\utf@f)+\utf@b*(`#2-\utf@g)+\utf@a*(`#3-\utf@g)+`#4-\utf@g\relax}}
% \def\keeputfcharacters
% {\def\utftwouniglph ##1##2{\rawcharacter{##1}\string##2}%
@@ -749,9 +746,7 @@
\endXETEX
\beginTEX
-
\def\numbertoutf#1{[\number#1]}
-
\endTEX
\def\uchartoutf#1#2%
diff --git a/tex/context/base/x-cml.mkiv b/tex/context/base/x-cml.mkiv
index 12c4600f0..372165092 100644
--- a/tex/context/base/x-cml.mkiv
+++ b/tex/context/base/x-cml.mkiv
@@ -20,16 +20,10 @@
\unprotect
\startxmlsetups xml:cml:process
- \xmlstrip {\xmldocument} {cml:chem|cml:ichem|cml:dchem}
- \xmlstrip {\xmldocument} {cml:reaction}
- \xmlstrip {\xmldocument} {cml:molecule}
- \xmlstrip {\xmldocument} {cml:ion}
- \xmlstrip {\xmldocument} {cml:structure}
- \xmlgrab {\xmldocument} {cml:chem|cml:dchem|cml:ichem|cml:reaction|cml:molecule|cml:ion|cml:atom} {*}
- \xmlgrab {\xmldocument} {cml:structure|cml:component|cml:forever} {*}
-% \xmlgrab {\xmldocument} {cml:*} {*}
+ \xmlstrip {\xmldocument} {cml:chem|cml:ichem|cml:dchem|cml:reaction|cml:molecule|cml:ion|cml:structure}
+ \xmlgrab {\xmldocument} {cml:*} {*}
\xmlgrab {\xmldocument} {cml:gives|cml:equilibrium|cml:mesomeric} {cml:arrow}
\xmlgrab {\xmldocument} {cml:plus|cml:minus|cml:equal} {cml:operator}
\xmlgrab {\xmldocument} {cml:bond|cml:singlebond|cml:doublebound|cml:triplebond} {cml:bond}
diff --git a/tex/context/base/x-fo.tex b/tex/context/base/x-fo.tex
index 1ce18e509..9c09fe0db 100644
--- a/tex/context/base/x-fo.tex
+++ b/tex/context/base/x-fo.tex
@@ -2470,7 +2470,7 @@ leader-pattern-width=12pt,
\startsetups fo:position:absolute:stop
\egroup
% evt uitstellen tot otr, zodat text/realfolio is solved
- \edef\FOpartag{p:\parposcounter}
+ \edef\FOpartag{p:\number\parposcounter}
\edef\FOtxttag{text:\realfolio}
\FOcontainerWW\MPplus\FOpartag{1}{0pt}
\FOcontainerHH\zeropoint % todo: add anchors to each 'object'
diff --git a/tex/context/base/x-mml.mkiv b/tex/context/base/x-mml.mkiv
index a0731b45d..8cbf07f08 100644
--- a/tex/context/base/x-mml.mkiv
+++ b/tex/context/base/x-mml.mkiv
@@ -26,8 +26,9 @@
% \xmlregistersetup{xml:mml:process}
+
\startxmlsetups xml:mml:process
- \xmlutfize {\xmldocument}
+% \xmlutfize {\xmldocument}
\xmlgrab {\xmldocument} {mml:*} {*}
\stopxmlsetups
diff --git a/tex/context/base/x-newmml.tex b/tex/context/base/x-newmml.tex
index cd44914d4..0d17aa4a5 100644
--- a/tex/context/base/x-newmml.tex
+++ b/tex/context/base/x-newmml.tex
@@ -548,7 +548,7 @@
\def\MMLccartesian#1%
{\def\MMLcsep{+}#1\getXMLentity{imaginaryi}}
-% float will be obsolete, an is replace by e-notation
+% float will be obsolete, and is replace by e-notation
\def\MMLcfloat#1%
{\doifelse\@@MMLfloatsymbol\v!no
diff --git a/tex/context/base/xtag-exp.tex b/tex/context/base/xtag-exp.tex
index 7f289dbf5..14c3c7a71 100644
--- a/tex/context/base/xtag-exp.tex
+++ b/tex/context/base/xtag-exp.tex
@@ -27,8 +27,7 @@
\def\startXMLreading
{\increment \XMLfilenesting \relax
- \ifnum\XMLfilenesting=1
-%\ifcase\XMLfilenesting\or
+ \ifcase\XMLfilenesting\or
\ifprocessingXML
\let\restoreXMLelements\relax
\else
@@ -38,8 +37,7 @@
\fi}
\def\stopXMLreading
- {\ifnum\XMLfilenesting=1
-%\ifcase\XMLfilenesting\or
+ {\ifcase\XMLfilenesting\or
\restoreXMLelements
\let\restoreXMLelements\relax
\fi
diff --git a/tex/context/interface/cont-cz.xml b/tex/context/interface/cont-cz.xml
index 11174cbbe..c82e72d0c 100644
--- a/tex/context/interface/cont-cz.xml
+++ b/tex/context/interface/cont-cz.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index 5cae23ebe..3b2638f60 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index 9ff000b2c..69884654e 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index aed3c6efb..b74b60e4f 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index 0b5c91052..fbdb3acd2 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index 02399e877..84d9a4b17 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 0f34ae02a..0d2b5af05 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -18,7 +18,10 @@
+
+
+
@@ -4224,6 +4227,7 @@
+
@@ -6065,6 +6069,9 @@
+
+
+
diff --git a/tex/context/interface/keys-cz.xml b/tex/context/interface/keys-cz.xml
index 068d08fec..077d6f2b4 100644
--- a/tex/context/interface/keys-cz.xml
+++ b/tex/context/interface/keys-cz.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index a7488a838..6a02edfe4 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index a9a2d6262..2bc21ab38 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index cbbcd926c..dee31e9e3 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index d2061a6c7..bf14c9594 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index d38260a5e..859ee5984 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index d08f55ab3..6c8083631 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -1,6 +1,6 @@
-
+
@@ -125,6 +125,7 @@
+
@@ -446,6 +447,7 @@
+
diff --git a/tex/context/sample/sample.tex b/tex/context/sample/sample.tex
index a173072aa..3a433183e 100644
--- a/tex/context/sample/sample.tex
+++ b/tex/context/sample/sample.tex
@@ -36,6 +36,8 @@ used in testing bibliographic references and citations.
\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
Destruction of Civilizations, \endgraf
Simon \& Schuster, 2006, p.106 \NC \NR
+\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
+ Thomas Dunne Books, 2007, p.160 \NC \NR
\stoptabulate
% Tufte: This quote will always produce hyphenated text, apart from the content,
@@ -52,4 +54,7 @@ used in testing bibliographic references and citations.
% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
% of Time)
+% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
+% the world around you differently (and a bit more freightened).
+
\stoptext
diff --git a/tex/context/sample/weisman.tex b/tex/context/sample/weisman.tex
new file mode 100644
index 000000000..7526d407a
--- /dev/null
+++ b/tex/context/sample/weisman.tex
@@ -0,0 +1,5 @@
+Since the mid-1990s, humans have taken an unprecedented step in Earthly
+annals by introducing not just exotic flora or fauna from one ecosystem
+into another, but actually inserting exotic genes into the operating
+systems of individual plants and animals, where they're intended to do
+exactly the same thing: copy themselves, over and over.
diff --git a/tex/context/test/x-cml-test.xml b/tex/context/test/x-cml-test.xml
index a177e5e2a..b7a3893de 100644
--- a/tex/context/test/x-cml-test.xml
+++ b/tex/context/test/x-cml-test.xml
@@ -1,5 +1,12 @@
+
+
+ H
+ O
+
+
+
diff --git a/tex/generic/context/mptopdf.tex b/tex/generic/context/mptopdf.tex
index 84a8aa380..bb9f27d0c 100644
--- a/tex/generic/context/mptopdf.tex
+++ b/tex/generic/context/mptopdf.tex
@@ -112,6 +112,7 @@
\def\processMPfile#1 %
{\pdfoutput=1
+ \pdfpkresolution600
\pdfcompresslevel=9
\chardef\makeMPintoPDFobject=1
\hsize=100in
@@ -143,4 +144,32 @@
%D file can be converted to \EPS\ using for instance the
%D \PDFTOPS\ program (in \WEBC) or \GHOSTSCRIPT.
+%D A few helpers:
+
+{\catcode`\.=12
+ \catcode`\p=12
+ \catcode`\t=12
+ \gdef\WITHOUTPT#1pt{#1}}
+
+\def\withoutpt#1%
+ {\expandafter\WITHOUTPT#1}
+
+\def\negatecolorcomponent#1% #1 = \macro
+ {\scratchdimen1pt\advance\scratchdimen-#1\onepoint
+ \ifdim\scratchdimen<\zeropoint\scratchdimen\zeropoint\fi
+ \edef#1{\withoutpt\the\scratchdimen}}
+
+\let\negatedcolorcomponent\firstofoneargument
+
+\def\negatedcolorcomponent#1%
+ {\ifdim\dimexpr1pt-#1pt\relax<\zeropoint
+ 0pt%
+ \else
+ \expandafter\withoutpt\the\dimexpr1pt-#1pt\relax
+ \fi}
+
+\def\negatecolorcomponent#1% #1 = \macro
+ {\edef#1{\negatedcolorcomponent{#1}}}
+
+
\dump
--
cgit v1.2.3