diff options
Diffstat (limited to 'tex')
38 files changed, 708 insertions, 184 deletions
diff --git a/tex/context/base/bibl-tra.mkiv b/tex/context/base/bibl-tra.mkiv index 08d8eb6fe..914470fbe 100644 --- a/tex/context/base/bibl-tra.mkiv +++ b/tex/context/base/bibl-tra.mkiv @@ -339,19 +339,32 @@ %D because similar numbers can be confusing. So, for the moment this is not %D supported in \MKIV. (So no: see reference [3-5,9] in "some other document") -\def\usepublications[#1]% +\unexpanded\def\usepublications[#1]% {\processcommalist[#1]\dousepublications} \def\dousepublications#1% {\doonlyonce{#1.\f!bibextension}{\dodousepublications{#1}}} +% \def\dodousepublications#1% brr, this par stuff +% {\let\@@savedpar\par +% \let\par\ignorespaces +% \ifhmode\kern\zeropoint\fi +% \readfile{#1.\f!bibextension} +% {\showmessage\m!publications{4}{#1.\f!bibextension}} +% {\showmessage\m!publications{2}{#1.\f!bibextension}}% +% \ifhmode\removeunwantedspaces\fi +% \let\par\@@savedpar} + \def\dodousepublications#1% brr, this par stuff {\let\@@savedpar\par \let\par\ignorespaces \ifhmode\kern\zeropoint\fi + \pushcatcodetable + \setcatcodetable\ctxcatcodes \readfile{#1.\f!bibextension} {\showmessage\m!publications{4}{#1.\f!bibextension}} {\showmessage\m!publications{2}{#1.\f!bibextension}}% + \popcatcodetable \ifhmode\removeunwantedspaces\fi \let\par\@@savedpar} diff --git a/tex/context/base/char-def.lua b/tex/context/base/char-def.lua index 71110ab00..118fbc701 100644 --- a/tex/context/base/char-def.lua +++ b/tex/context/base/char-def.lua @@ -417,6 +417,7 @@ characters.data={ direction="es", linebreak="hy", mathsymbol=0x2212, + mathfiller="relfill", mathextensible='h', unicodeslot=0x002D, }, @@ -601,6 +602,7 @@ characters.data={ name="Relbar", }, }, + mathfiller="equalfill", mathextensible='h', unicodeslot=0x003D, }, @@ -59502,6 +59504,7 @@ characters.data={ description="LEFTWARDS TWO HEADED ARROW", direction="on", linebreak="al", + mathfiller="twoheadleftarrowfill", mathextensible="l", mathclass="relation", mathname="twoheadleftarrow", @@ -59522,6 +59525,7 @@ characters.data={ description="RIGHTWARDS TWO HEADED ARROW", direction="on", linebreak="al", + mathfiller="twoheadrightarrowfill", mathextensible="r", mathclass="relation", mathname="twoheadrightarrow", @@ -59582,6 +59586,7 @@ characters.data={ description="RIGHTWARDS ARROW FROM BAR", direction="on", linebreak="al", + mathfiller="mapstofill", mathextensible="r", mathclass="relation", mathname="mapsto", @@ -59614,6 +59619,7 @@ characters.data={ direction="on", linebreak="al", mathextensible="l", + mathfiller="hookleftarrowfill", mathclass="relation", mathname="hookleftarrow", unicodeslot=0x21A9, @@ -59623,6 +59629,7 @@ characters.data={ description="RIGHTWARDS ARROW WITH HOOK", direction="on", linebreak="al", + mathfiller="hookrightarrowfill", mathextensible="r", mathclass="relation", mathname="hookrightarrow", @@ -59820,6 +59827,8 @@ characters.data={ description="LEFTWARDS HARPOON WITH BARB UPWARDS", direction="on", linebreak="al", + mathfiller="leftharpoonupfill", + mathextensible="l", mathclass="relation", mathname="leftharpoonup", unicodeslot=0x21BC, @@ -59829,6 +59838,8 @@ characters.data={ description="LEFTWARDS HARPOON WITH BARB DOWNWARDS", direction="on", linebreak="al", + mathfiller="leftharpoondownfill", + mathextensible="l", mathclass="relation", mathname="leftharpoondown", unicodeslot=0x21BD, @@ -59865,6 +59876,8 @@ characters.data={ description="RIGHTWARDS HARPOON WITH BARB UPWARDS", direction="on", linebreak="al", + mathfiller="rightharpoonupfill", + mathextensible="r", mathclass="relation", mathname="rightharpoonup", unicodeslot=0x21C0, @@ -59874,6 +59887,9 @@ characters.data={ description="RIGHTWARDS HARPOON WITH BARB DOWNWARDS", direction="on", linebreak="al", + mathfiller="rightharpoondownfill", + mathextensible="r", + mathclass="relation", mathclass="relation", mathname="rightharpoondown", unicodeslot=0x21C1, @@ -59902,6 +59918,7 @@ characters.data={ description="RIGHTWARDS ARROW OVER LEFTWARDS ARROW", direction="on", linebreak="al", + mathfiller="rightoverleftarrowfill", mathextensible="h", mathclass="relation", mathname="rightleftarrows", @@ -59924,6 +59941,7 @@ characters.data={ description="LEFTWARDS ARROW OVER RIGHTWARDS ARROW", direction="on", linebreak="al", + mathfiller="lefgtoverrightarrowfill", mathextensible="h", mathclass="relation", mathname="leftrightarrows", @@ -59974,6 +59992,8 @@ characters.data={ description="LEFTWARDS HARPOON OVER RIGHTWARDS HARPOON", direction="on", linebreak="al", + mathfiller="leftrightharpoonsfill", + mathextensible="h", mathclass="relation", mathname="leftrightharpoons", unicodeslot=0x21CB, @@ -59983,6 +60003,8 @@ characters.data={ description="RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON", direction="on", linebreak="al", + mathfiller="rightleftharpoonsfill", + mathextensible="h", mathclass="relation", mathname="rightleftharpoons", unicodeslot=0x21CC, @@ -61614,8 +61636,9 @@ characters.data={ direction="on", linebreak="al", mathclass="relation", - mathname="nequiv", mathextensible='h', + mathfiller="triplerelfill", + mathname="nequiv", specials={ "char", 0x2261, 0x0338 }, unicodeslot=0x2262, }, @@ -72851,6 +72874,7 @@ characters.data={ direction="on", linebreak="al", mathextensible="h", + mathfiller="leftrightarrowfill", mathclass="relation", mathname="longleftrightarrow", unicodeslot=0x27F7, @@ -72861,6 +72885,7 @@ characters.data={ direction="on", linebreak="al", mathextensible="l", + mathfiller="Leftarrowfill", mathclass="relation", mathname="Longleftarrow", unicodeslot=0x27F8, @@ -72871,6 +72896,7 @@ characters.data={ direction="on", linebreak="al", mathextensible="r", + mathfiller="Rightarrowfill", mathclass="relation", mathname="Longrightarrow", unicodeslot=0x27F9, @@ -72881,6 +72907,7 @@ characters.data={ direction="on", linebreak="al", mathextensible="h", + mathfiller="Leftrightarrowfill", mathclass="relation", mathname="Longleftrightarrow", unicodeslot=0x27FA, diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index 0ced751ef..3ce67992e 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2013.01.10 01:04} +\newcontextversion{2013.01.13 23:10} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index f6097bdf4..18153995f 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2013.01.10 01:04} +\newcontextversion{2013.01.13 23:10} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differnew file mode 100644 index 000000000..43f4790bd --- /dev/null +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png Binary files differindex cc76b02df..3157fb849 100644 --- a/tex/context/base/context-version.png +++ b/tex/context/base/context-version.png diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index b2e6fa6ed..6abbe0653 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2013.01.10 01:04} +\edef\contextversion{2013.01.13 23:10} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 97b71f5a4..ded53e1cf 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -25,7 +25,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2013.01.10 01:04} +\edef\contextversion{2013.01.13 23:10} %D For those who want to use this: @@ -233,6 +233,7 @@ \loadmarkfile{spac-ali} \loadmarkfile{spac-hor} +\loadmarkfile{spac-flr} \loadmarkfile{spac-ver} \loadmarkfile{spac-lin} \loadmarkfile{spac-pag} @@ -415,7 +416,8 @@ \loadmarkfile{math-for} \loadmarkfile{math-def} \loadmarkfile{math-ali} -\loadmarkfile{math-arr} +%loadmarkfile{math-arr} +\loadmkvifile{math-stc} \loadmarkfile{math-frc} \loadmarkfile{math-scr} \loadmarkfile{math-int} diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua index 37b4f62ca..cad3eac14 100644 --- a/tex/context/base/data-ini.lua +++ b/tex/context/base/data-ini.lua @@ -32,7 +32,7 @@ local resolvers = resolvers texconfig.kpse_init = false texconfig.shell_escape = 't' -if kpse and kpse.default_texmfcnf then +if not (environment and environment.default_texmfcnf) and kpse and kpse.default_texmfcnf then local default_texmfcnf = kpse.default_texmfcnf() -- looks more like context: default_texmfcnf = gsub(default_texmfcnf,"$SELFAUTOLOC","selfautoloc:") diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua index de20f4820..49033461e 100644 --- a/tex/context/base/data-lua.lua +++ b/tex/context/base/data-lua.lua @@ -46,7 +46,7 @@ local clibextras = { } local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0) -local function cleanpath(path) --hm, don't we have a helper for this? +local function cleanpath(path) -- hm, don't we have a helper for this? return resolvers.resolve(lpegmatch(pattern,path)) end diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index 41a590228..02ef35f5f 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -863,7 +863,7 @@ local function collect_files(names) if dname == "" or find(dname,"^%.") then dname = false else - dname = gsub(dname,"*","%.*") + dname = gsub(dname,"%*",".*") dname = "/" .. dname .. "$" end local hashes = instance.hashes diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua index 572697443..533103ec6 100644 --- a/tex/context/base/file-job.lua +++ b/tex/context/base/file-job.lua @@ -913,7 +913,7 @@ function commands.logoptions() end function commands.doifelsecontinuewithfile(inpname) - local continue = addsuffix(inpname,"tex") == addsuffix(environment.jobname,"tex") + local continue = addsuffix(inpname,"tex") == addsuffix(environment.inputfilename,"tex") if continue then report_system("continuing input file %q",inpname) end diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index 516dffe98..58af5a04f 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -419,6 +419,8 @@ function constructors.scale(tfmdata,specification) target.psname = psname target.name = name -- + -- inspect(properties) + -- properties.fontname = fontname properties.fullname = fullname properties.filename = filename diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index 5dba663e4..2c6016427 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -250,9 +250,6 @@ local handlers = { } local rlmode = 0 local featurevalue = false --- we cannot optimize with "start = first_glyph(head)" because then we don't --- know which rlmode we're in which messes up cursive handling later on --- -- head is always a whatsit so we can safely assume that head is not changed -- we use this for special testing and documentation @@ -2206,7 +2203,7 @@ local function featuresprocessor(head,font,attr) -- font interactions and then we do need the full sweeps. -- Keeping track of the headnode is needed for devanagari (I generalized it a bit - -- so that multiple cases are also covered. + -- so that multiple cases are also covered.) for s=1,#sequences do local dataset = datasets[s] diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua index f34bed5fd..2a3d6991e 100644 --- a/tex/context/base/l-file.lua +++ b/tex/context/base/l-file.lua @@ -33,7 +33,8 @@ local noslashes = 1-slashes local name = noperiod^1 local suffix = period/"" * (1-period-slashes)^1 * -1 -local pattern = C((noslashes^0 * slashes^1)^1) +----- pattern = C((noslashes^0 * slashes^1)^1) +local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way local function pathpart(name,default) return name and lpegmatch(pattern,name) or default or "" @@ -45,6 +46,13 @@ local function basename(name) return name and lpegmatch(pattern,name) or name end +-- print(pathpart("file")) +-- print(pathpart("dir/file")) +-- print(pathpart("/dir/file")) +-- print(basename("file")) +-- print(basename("dir/file")) +-- print(basename("/dir/file")) + local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0 local function nameonly(name) @@ -69,7 +77,7 @@ file.extname = suffixonly -- obsolete -- actually these are schemes local drive = C(R("az","AZ")) * colon -local path = C(((1-slashes)^0 * slashes)^0) +local path = C((noslashes^0 * slashes)^0) local suffix = period * C(P(1-period)^0 * P(-1)) local base = C((1-suffix)^0) local rest = C(P(1)^0) @@ -98,9 +106,14 @@ function file.splitbase(str) return str and lpegmatch(pattern_d,str) -- returns path, base+suffix end -function file.nametotable(str,splitdrive) -- returns table +---- stripslash = C((1 - P("/")^1*P(-1))^0) + +function file.nametotable(str,splitdrive) if str then local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str) + -- if path ~= "" then + -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default + -- end if splitdrive then return { path = path, @@ -121,6 +134,20 @@ function file.nametotable(str,splitdrive) -- returns table end end +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) + +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) + local pattern = Cs(((period * noperiod^1 * -1)/"" + 1)^1) function file.removesuffix(name) diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index 407532ebf..8b5389aa1 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -53,8 +53,10 @@ local report = texio and texio.write_nl or print -- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end -- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end -local type, next = type, next +local type, next, tostring = type, next, tostring local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format +----- mod, div = math.mod, math.div +local floor = math.floor -- Beware, we predefine a bunch of patterns here and one reason for doing so -- is that we get consistent behaviour in some of the visualizers. @@ -778,3 +780,44 @@ end -- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3 patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol + +-- The next pattern^n variant is based on an approach suggested +-- by Roberto: constructing a big repetition in chunks. +-- +-- Being sparse is not needed, and only complicate matters and +-- the number of redundant entries is not that large. + +local function nextstep(n,step,result) + local m = n % step -- mod(n,step) + local d = floor(n/step) -- div(n,step) + if d > 0 then + local v = V(tostring(step)) + local s = result.start + for i=1,d do + if s then + s = v * s + else + s = v + end + end + result.start = s + end + if step > 1 and result.start then + local v = V(tostring(step/2)) + result[tostring(step)] = v * v + end + if step > 0 then + return nextstep(m,step/2,result) + else + return result + end +end + +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start", ["1"] = pattern })) +end + +-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1) +-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56" +-- inspect(p) +-- print(lpeg.match(p,s)) diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua index 7a919d189..828285ca1 100644 --- a/tex/context/base/l-string.lua +++ b/tex/context/base/l-string.lua @@ -49,7 +49,7 @@ end -- print(string.unquoted('"test"')) function string.quoted(str) - return format("%q",str) -- always " + return format("%q",str) -- always double quote end function string.count(str,pattern) -- variant 3 diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua index 7bce0052d..a00acdc63 100644 --- a/tex/context/base/luat-env.lua +++ b/tex/context/base/luat-env.lua @@ -335,8 +335,8 @@ function environment.loadluafile(filename, version) local lucname, luaname, chunk local basename = file.removesuffix(filename) if basename == filename then - luaname = fiule.addsuffix(basename,luasuffixes.lua) - lucname = fiule.addsuffix(basename,luasuffixes.luc) + luaname = file.addsuffix(basename,luasuffixes.lua) + lucname = file.addsuffix(basename,luasuffixes.luc) else luaname = basename -- forced suffix lucname = nil diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua index 2d2614ecb..af34fe8ce 100644 --- a/tex/context/base/luat-fmt.lua +++ b/tex/context/base/luat-fmt.lua @@ -6,9 +6,9 @@ if not modules then modules = { } end modules ['luat-fmt'] = { license = "see context related readme files" } - local format = string.format -local quoted = string.quoted +local concat = table.concat +local quoted = string.quoted local luasuffixes = utilities.lua.suffixes local report_format = logs.reporter("resolvers","formats") @@ -16,14 +16,17 @@ local report_format = logs.reporter("resolvers","formats") local function primaryflags() -- not yet ok local trackers = environment.argument("trackers") local directives = environment.argument("directives") - local flags = "" + local flags = { } if trackers and trackers ~= "" then - flags = flags .. "--trackers=" .. quoted(trackers) + flags = { "--trackers=" .. quoted(trackers) } end if directives and directives ~= "" then - flags = flags .. "--directives=" .. quoted(directives) + flags = { "--directives=" .. quoted(directives) } + end + if environment.argument("jit") then + flags = { "--jiton" } end - return flags + return concat(flags," ") end function environment.make_format(name) diff --git a/tex/context/base/math-arr.mkiv b/tex/context/base/math-arr.mkiv index e0ef6095e..6824c362e 100644 --- a/tex/context/base/math-arr.mkiv +++ b/tex/context/base/math-arr.mkiv @@ -11,6 +11,10 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. +%D We keep this file around as reference of his things were done in the +%D past. You can still load this module but it has been replaced by more +%D modern code. + \writestatus{loading}{ConTeXt Math Macros / Arrows} \unprotect diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv index 61153cfdd..0d80b3de0 100644 --- a/tex/context/base/math-def.mkiv +++ b/tex/context/base/math-def.mkiv @@ -108,7 +108,7 @@ \installcorenamespace{mathbig} -\def\choosemathbig#1#2% so we accent \big{||} as well +\unexpanded\def\choosemathbig#1#2% so we accent \big{||} as well {{\hbox{$% \ifcase\bigmathdelimitermethod \doleftbigmath#2\relax diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv index 5ac7908d5..aea43ee87 100644 --- a/tex/context/base/meta-ini.mkiv +++ b/tex/context/base/meta-ini.mkiv @@ -849,7 +849,7 @@ \fi} \def\meta_start_code_instance#1#2\stopMPcode - {\meta_begin_graphic_group{#1::\s!dummy}% name does not matter + {\meta_begin_graphic_group{#1}% \meta_enable_include \meta_process_graphic{#2}% \meta_end_graphic_group} @@ -861,22 +861,25 @@ \let\stopMPcode\relax \unexpanded\def\MPcode - {\dosinglegroupempty\meta_code} + {\dodoublegroupempty\meta_code} \def\meta_code - {\iffirstargument + {\ifsecondargument \expandafter\meta_code_instance \else \expandafter\meta_code_standard \fi} \def\meta_code_instance#1#2% - {\meta_begin_graphic_group{#1::\s!dummy}% name does not matter + {\meta_begin_graphic_group{#1}% + \meta_enable_include \meta_process_graphic{#2}% \meta_end_graphic_group} -\def\meta_code_standard#1% #2 - {\meta_process_graphic} +\def\meta_code_standard#1#2% + {\let\currentMPinstance\defaultMPinstance + \meta_enable_include + \meta_process_graphic{#1}} % a bit nasty (also needed for compatibility: diff --git a/tex/context/base/mlib-pdf.mkiv b/tex/context/base/mlib-pdf.mkiv index 044c416a2..4deb26203 100644 --- a/tex/context/base/mlib-pdf.mkiv +++ b/tex/context/base/mlib-pdf.mkiv @@ -140,15 +140,17 @@ \let\stopMPLIBtoPDF \directstopMPLIBtoPDF \meta_start_current_graphic \forgetall - \normalexpanded{\noexpand\ctxlua{metapost.graphic( - "\currentMPinstance", - "\currentMPformat", - \!!bs#2;\!!es, - \!!bs\meta_flush_current_initializations;\!!es, - \!!bs\meta_flush_current_preamble;\!!es, - \!!bs\meta_flush_current_instance\!!es, - "all" - )}}% + \normalexpanded{\noexpand\ctxlua{metapost.graphic { + instance = "\currentMPinstance", + format = "\currentMPformat", + data = \!!bs#2;\!!es, + initializations = \!!bs\meta_flush_current_initializations\!!es, + extensions = \!!bs\meta_flush_current_extensions\!!es, + inclusions = \!!bs\meta_flush_current_userinclusions\!!es, + definitions = \!!bs\meta_flush_current_definitions\!!es, + figure = "all", + method = "\MPinstanceparameter\c!method", + }}}% \meta_stop_current_graphic \meta_end_graphic_group} diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua index 4756690be..43a548c65 100644 --- a/tex/context/base/mlib-pps.lua +++ b/tex/context/base/mlib-pps.lua @@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['mlib-pps'] = { license = "see context related readme files", } --- todo: report max textexts +-- todo: make a hashed textext variant where we only process the text once (normally +-- we cannot assume that no macros are involved which influence a next textext local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split local tonumber, type = tonumber, type @@ -215,10 +216,14 @@ local current_format, current_graphic, current_initializations metapost.multipass = false -local textexts = { } +local textexts = { } -- all boxes, optionally with a different color +local texslots = { } -- references to textexts in order or usage +local texorder = { } -- references to textexts by mp index +local textrial = 0 +local texfinal = 0 local scratchbox = 0 -local function freeboxes() -- todo: mp direct list ipv box +local function freeboxes() for n, box in next, textexts do local tn = textexts[n] if tn then @@ -231,6 +236,10 @@ local function freeboxes() -- todo: mp direct list ipv box end end textexts = { } + texslots = { } + texorder = { } + textrial = 0 + texfinal = 0 end metapost.resettextexts = freeboxes @@ -480,7 +489,8 @@ local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfu function metapost.textextsdata() local t, nt, n = { }, 0, 0 - for n, box in next, textexts do + for n=1,#texorder do + local box = textexts[texorder[n]] if box then local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor if trace_textexts then @@ -492,6 +502,7 @@ function metapost.textextsdata() break end end +-- inspect(t) return t end @@ -765,13 +776,17 @@ local basepoints = number.dimenfactors["bp"] local function cm(object) local op = object.path - local first, second, fourth = op[1], op[2], op[4] - local tx, ty = first.x_coord , first.y_coord - local sx, sy = second.x_coord - tx, fourth.y_coord - ty - local rx, ry = second.y_coord - ty, fourth.x_coord - tx - if sx == 0 then sx = 0.00001 end - if sy == 0 then sy = 0.00001 end - return sx,rx,ry,sy,tx,ty + if op then + local first, second, fourth = op[1], op[2], op[4] + local tx, ty = first.x_coord , first.y_coord + local sx, sy = second.x_coord - tx, fourth.y_coord - ty + local rx, ry = second.y_coord - ty, fourth.x_coord - tx + if sx == 0 then sx = 0.00001 end + if sy == 0 then sy = 0.00001 end + return sx, rx, ry, sy, tx, ty + else + return 1, 0, 0, 1, 0, 0 -- weird case + end end -- color @@ -782,25 +797,115 @@ end -- text -local tx_done = { } +-- local tx_done = { } +-- +-- local function tx_reset() +-- tx_done = { } +-- end +-- +-- local function tx_analyze(object,prescript) -- todo: hash content and reuse them +-- local tx_stage = prescript.tx_stage +-- if tx_stage then +-- local tx_number = tonumber(prescript.tx_number) +-- if not tx_done[tx_number] then +-- tx_done[tx_number] = true +-- if trace_textexts then +-- report_textexts("setting %s %s (first pass)",tx_stage,tx_number) +-- end +-- local s = object.postscript or "" +-- local c = object.color -- only simple ones, no transparency +-- local a = prescript.tr_alternative +-- local t = prescript.tr_transparency +-- if not c then +-- -- no color +-- elseif #c == 1 then +-- if a and t then +-- s = format("\\directcolored[s=%f,a=%f,t=%f]%s",c[1],a,t,s) +-- else +-- s = format("\\directcolored[s=%f]%s",c[1],s) +-- end +-- elseif #c == 3 then +-- if a and t then +-- s = format("\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s",c[1],c[2],c[3],a,t,s) +-- else +-- s = format("\\directcolored[r=%f,g=%f,b=%f]%s",c[1],c[2],c[3],s) +-- end +-- elseif #c == 4 then +-- if a and t then +-- s = format("\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s",c[1],c[2],c[3],c[4],a,t,s) +-- else +-- s = format("\\directcolored[c=%f,m=%f,y=%f,k=%f]%s",c[1],c[2],c[3],c[4],s) +-- end +-- end +-- context.MPLIBsettext(tx_number,s) -- combine colored in here, saves call +-- metapost.multipass = true +-- end +-- end +-- end +-- +-- local function tx_process(object,prescript,before,after) +-- local tx_number = prescript.tx_number +-- if tx_number then +-- tx_number = tonumber(tx_number) +-- local tx_stage = prescript.tx_stage +-- if tx_stage == "final" then -- redundant test +-- if trace_textexts then +-- report_textexts("processing %s (second pass)",tx_number) +-- end +-- local sx,rx,ry,sy,tx,ty = cm(object) -- outside function ! +-- before[#before+1] = function() +-- -- flush always happens, we can have a special flush function injected before +-- local box = textexts[tx_number] +-- if box then +-- context.MPLIBgettextscaledcm(tx_number, +-- format("%f",sx), -- bah ... %s no longer checks +-- format("%f",rx), -- bah ... %s no longer checks +-- format("%f",ry), -- bah ... %s no longer checks +-- format("%f",sy), -- bah ... %s no longer checks +-- format("%f",tx), -- bah ... %s no longer checks +-- format("%f",ty), -- bah ... %s no longer checks +-- sxsy(box.width,box.height,box.depth)) +-- else +-- report_textexts("unknown %s",tx_number) +-- end +-- end +-- if not trace_textexts then +-- object.path = false -- else: keep it +-- end +-- object.color = false +-- object.grouped = true +-- end +-- end +-- end + +-- experiment + +local tx_hash = { } +local tx_last = 0 local function tx_reset() - tx_done = { } + tx_hash = { } + tx_last = 0 end +local fmt = formatters["%s %s %s % t"] + local function tx_analyze(object,prescript) -- todo: hash content and reuse them local tx_stage = prescript.tx_stage - if tx_stage then + if tx_stage == "trial" then + textrial = textrial + 1 local tx_number = tonumber(prescript.tx_number) - if not tx_done[tx_number] then - tx_done[tx_number] = true - if trace_textexts then - report_textexts("setting %s %s (first pass)",tx_stage,tx_number) - end - local s = object.postscript or "" - local c = object.color -- only simple ones, no transparency - local a = prescript.tr_alternative - local t = prescript.tr_transparency + local s = object.postscript or "" + local c = object.color -- only simple ones, no transparency + local a = prescript.tr_alternative + local t = prescript.tr_transparency + local h = fmt(tx_number,a or "?",t or "?",c) + local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels) + if not n then + tx_last = tx_last + 1 + -- if trace_textexts then + -- report_textexts("setting %s %s (first pass)",tx_stage,tx_number) + -- end if not c then -- no color elseif #c == 1 then @@ -822,8 +927,36 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them s = format("\\directcolored[c=%f,m=%f,y=%f,k=%f]%s",c[1],c[2],c[3],c[4],s) end end - context.MPLIBsettext(tx_number,s) -- combine colored in here, saves call + context.MPLIBsettext(tx_last,s) + metapost.multipass = true + tx_hash[h] = tx_last + texslots[textrial] = tx_last + texorder[tx_number] = tx_last + if trace_textexts then + report_textexts("stage: %s, usage: %s, number: %s, new: %s, hash: %s",tx_stage,textrial,tx_number,tx_last,h) + end + else + texslots[textrial] = n + if trace_textexts then + report_textexts("stage: %s, usage: %s, number: %s, old: %s, hash: %s",tx_stage,textrial,tx_number,n,h) + end + end + elseif tx_stage == "extra" then + textrial = textrial + 1 + local tx_number = tonumber(prescript.tx_number) + if not texorder[tx_number] then + local s = object.postscript or "" + tx_last = tx_last + 1 + -- if trace_textexts then + -- report_textexts("setting %s %s (first pass)",tx_stage,tx_number) + -- end + context.MPLIBsettext(tx_last,s) metapost.multipass = true + texslots[textrial] = tx_last + texorder[tx_number] = tx_last + if trace_textexts then + report_textexts("stage: %s, usage: %s, number: %s, extra: %s",tx_stage,textrial,tx_number,tx_last) + end end end end @@ -833,18 +966,21 @@ local function tx_process(object,prescript,before,after) if tx_number then tx_number = tonumber(tx_number) local tx_stage = prescript.tx_stage - if tx_stage == "final" then -- redundant test + if tx_stage == "final" then + texfinal = texfinal + 1 + local n = texslots[texfinal] + -- if trace_textexts then + -- report_textexts("processing %s (second pass)",tx_number) + -- end if trace_textexts then - report_textexts("processing %s (second pass)",tx_number) + report_textexts("stage: %s, usage: %s, number: %s, use: %s",tx_stage,texfinal,tx_number,n) end - -- before[#before+1] = f_cm(cm(object)) - local sx,rx,ry,sy,tx,ty = cm(object) - before[#before+1] = function() - -- flush always happens, we can have a special flush function injected before - local box = textexts[tx_number] - if box then - -- context.MPLIBgettextscaled(tx_number,sxsy(box.width,box.height,box.depth)) - context.MPLIBgettextscaledcm(tx_number, + local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function + local box = textexts[n] + if box then + before[#before+1] = function() + -- flush always happens, we can have a special flush function injected before + context.MPLIBgettextscaledcm(n, format("%f",sx), -- bah ... %s no longer checks format("%f",rx), -- bah ... %s no longer checks format("%f",ry), -- bah ... %s no longer checks @@ -852,11 +988,12 @@ local function tx_process(object,prescript,before,after) format("%f",tx), -- bah ... %s no longer checks format("%f",ty), -- bah ... %s no longer checks sxsy(box.width,box.height,box.depth)) - else + end + else + before[#before+1] = function() report_textexts("unknown %s",tx_number) end end - -- before[#before+1] = "Q" if not trace_textexts then object.path = false -- else: keep it end @@ -958,7 +1095,7 @@ local function sh_process(object,prescript,before,after) end before[#before+1], after[#after+1] = "q /Pattern cs", format("W n /%s sh Q",name) -- false, not nil, else mt triggered - object.colored = false + object.colored = false -- hm, not object.color ? object.type = false object.grouped = true end diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua index 466b7991d..ebfd3976d 100644 --- a/tex/context/base/node-aux.lua +++ b/tex/context/base/node-aux.lua @@ -204,23 +204,23 @@ function nodes.firstcharinbox(n) return 0 end ---~ local function firstline(n) ---~ while n do ---~ local id = n.id ---~ if id == hlist_code then ---~ if n.subtype == line_code then ---~ return n ---~ else ---~ return firstline(n.list) ---~ end ---~ elseif id == vlist_code then ---~ return firstline(n.list) ---~ end ---~ n = n.next ---~ end ---~ end - ---~ nodes.firstline = firstline +-- local function firstline(n) +-- while n do +-- local id = n.id +-- if id == hlist_code then +-- if n.subtype == line_code then +-- return n +-- else +-- return firstline(n.list) +-- end +-- elseif id == vlist_code then +-- return firstline(n.list) +-- end +-- n = n.next +-- end +-- end + +-- nodes.firstline = firstline -- this depends on fonts, so we have a funny dependency ... will be -- sorted out .. we could make tonodes a plugin into this diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua index 543f64acb..49e1029e7 100644 --- a/tex/context/base/node-fnt.lua +++ b/tex/context/base/node-fnt.lua @@ -41,7 +41,7 @@ local setmetatableindex = table.setmetatableindex -- potential speedup: check for subtype < 256 so that we can remove that test -- elsewhere, danger: injected nodes will not be dealt with but that does not --- happen often; we could consider processing sublists but that might need mor +-- happen often; we could consider processing sublists but that might need more -- checking later on; the current approach also permits variants local run = 0 @@ -96,7 +96,7 @@ function handlers.characters(head) end end for n in traverse_id(glyph_code,head) do --- if n.subtype<256 then +-- if n.subtype<256 then -- all are 1 local font = n.font local attr = has_attribute(n,0) or 0 -- zero attribute is reserved for fonts in context if font ~= prevfont or attr ~= prevattr then diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua index 42c6a8543..adca502db 100644 --- a/tex/context/base/node-pro.lua +++ b/tex/context/base/node-pro.lua @@ -67,7 +67,7 @@ processors.tracer = tracer processors.enabled = true -- this will become a proper state (like trackers) function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) - local first, found = first_glyph(head) + local first, found = first_glyph(head) -- they really need to be glyphs if found then if trace_callbacks then local before = nodes.count(head,true) @@ -78,10 +78,10 @@ function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction) else tracer("pre_linebreak","unchanged",head,groupcode,before,after,true) end - return (done and head) or true + return done and head or true else local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first - return (done and head) or true + return done and head or true end elseif trace_callbacks then local n = nodes.count(head,false) @@ -94,7 +94,7 @@ local enabled = true function processors.hpack_filter(head,groupcode,size,packtype,direction) if enabled then - local first, found = first_glyph(head) + local first, found = first_glyph(head) -- they really need to be glyphs if found then if trace_callbacks then local before = nodes.count(head,true) @@ -127,8 +127,8 @@ function nodes.fasthpack(...) -- todo: pass explicit arguments return hp, b end -callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter,"all kind of horizontal manipulations (before par break)") -callbacks.register('hpack_filter' , processors.hpack_filter,"all kind of horizontal manipulations") +callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)") +callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)") local actions = tasks.actions("finalizers") -- head, where @@ -140,27 +140,20 @@ local actions = tasks.actions("finalizers") -- head, where -- something weird here .. group mvl when making a vbox function processors.post_linebreak_filter(head,groupcode) ---~ local first, found = first_glyph(head) ---~ if found then - if trace_callbacks then - local before = nodes.count(head,true) - local head, done = actions(head,groupcode) - local after = nodes.count(head,true) - if done then - tracer("post_linebreak","changed",head,groupcode,before,after,true) - else - tracer("post_linebreak","unchanged",head,groupcode,before,after,true) - end - return (done and head) or true + if trace_callbacks then + local before = nodes.count(head,true) + local head, done = actions(head,groupcode) + local after = nodes.count(head,true) + if done then + tracer("post_linebreak","changed",head,groupcode,before,after,true) else - local head, done = actions(head,groupcode) - return (done and head) or true + tracer("post_linebreak","unchanged",head,groupcode,before,after,true) end ---~ elseif trace_callbacks then ---~ local n = nodes.count(head,false) ---~ tracer("post_linebreak","no chars",head,groupcode,n,n) ---~ end ---~ return true + return (done and head) or true + else + local head, done = actions(head,groupcode) + return (done and head) or true + end end callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)") diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv index 97774681c..3335c87e2 100644 --- a/tex/context/base/page-mix.mkiv +++ b/tex/context/base/page-mix.mkiv @@ -414,6 +414,13 @@ \let\strc_itemgroups_stop_columns\page_mix_fast_columns_stop +\setupmixedcolumns + [\s!itemgroupcolumns] + [\c!grid=\itemgroupparameter\c!grid] + +\setupitemgroups + [\c!grid=\v!tolerant] + %D The common initialization: \def\page_mix_initialize_columns diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index 0b8c49370..16d33d5d7 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -378,8 +378,8 @@ end -- we can have a fonts.hashes.originals -function scripts.preprocess(head) -- we could probably pass the first glyph (as it's already known) - local start = first_glyph(head) +function scripts.preprocess(head) + local start = first_glyph(head) -- we already have glyphs here (subtype 1) if not start then return head, false else diff --git a/tex/context/base/spac-flr.mkiv b/tex/context/base/spac-flr.mkiv new file mode 100644 index 000000000..0e685b644 --- /dev/null +++ b/tex/context/base/spac-flr.mkiv @@ -0,0 +1,112 @@ +%D \module +%D [ file=spac-fil, +%D version=2013.01.13, +%D title=\CONTEXT\ Spacing Macros, +%D subtitle=Fillers, +%D author={Hans Hagen and Wolfgang Schuster}, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Spacing Macros / Fillers} + +\unprotect + +%D A bit more extensive variant of a prototype posted by Wolfgang to the +%D development list. Instead of dealing with a few leader types it makes +%D sense to support them all as well as simple rule fillers. Eventually we +%D can also use in for the mathfillers. We distinguish between alternatives +%D and with them methods, and a checker is provided for use in applying +%D e.g.\ fillers in lists. + +\installcorenamespace{filler} +\installcorenamespace{filleralternative} +\installcorenamespace{fillerleadermethod} + +\installcommandhandler \??filler {filler} \??filler + +\let\setupfillers\setupfiller + +\unexpanded\def\filler + {\dosingleempty\spac_fillers_indeed} + +\unexpanded\def\checkedfiller#1% + {\ifcsname\namedfillerhash{#1}\s!parent\endcsname + \spac_fillers_indeed[#1]% + \expandafter\gobbleoneargument + \else + \expandafter\firstofoneargument + \fi{#1}} + +\def\spac_fillers_indeed[#1]% + {\removeunwantedspaces + \begingroup + \edef\currentfiller{#1}% + \scratchdimen\fillerparameter\c!leftmargin\relax + \ifdim\scratchdimen=\zeropoint\else + \hskip\scratchdimen + \fi + \fillerparameter\c!left\relax + \expandcheckedcsname\??filleralternative{\fillerparameter\c!alternative}\s!unknown\relax + \fillerparameter\c!right\relax + \scratchdimen\fillerparameter\c!rightmargin\relax + \ifdim\scratchdimen=\zeropoint\else + \hskip\scratchdimen + \fi + \endgroup + \ignorespaces} + +\setvalue{\??filleralternative\s!unknown}% + {} + +\setvalue{\??filleralternative\v!symbol}% + {\expandcheckedcsname\??fillerleadermethod{\fillerparameter\c!method}\v!local + \simplealignedbox + {\fillerparameter\c!width}% + {\fillerparameter\c!align}% + {\fillerparameter\c!symbol}% + \hfill} + +\setvalue{\??filleralternative\c!stretch}% + {\hfill} + +\setvalue{\??filleralternative\c!rule}% + {\expandcheckedcsname\??fillerleadermethod{\fillerparameter\c!method}\v!local + \hrule + \!!height\fillerparameter\c!height + \!!depth \fillerparameter\c!depth + \hfill} + +\letvalue{\??fillerleadermethod\s!local }\normalleaders % overflow ends up inbetween (current box) +\letvalue{\??fillerleadermethod\v!global}\normalgleaders % overflow ends up inbetween (outermost box) +\letvalue{\??fillerleadermethod\v!middle}\normalcleaders % overflow ends up before, after (current box) +\letvalue{\??fillerleadermethod\v!broad }\normalxleaders % overflow ends up before, inbetween, after (current box) + +\setupfillers + [\c!width=\emwidth, + \c!symbol=., + \c!align=\v!middle, + \c!height=.1\exheight, + \c!depth=\zeropoint, + \c!leftmargin=\zeropoint, + \c!rightmargin=\zeropoint, + \c!alternative=\v!symbol, + \c!method=\s!local] + +% maybe box -> symbol + +\protect \endinput + +% \definefiller[test-a][alternative=stretch] +% \definefiller[test-b][alternative=symbol,symbol=!] +% \definefiller[test-c][alternative=rule,height=.1ex,leftmargin=.5em,rightmargin=.25em] + +% \starttext +% text\filler[test-a]text \par +% text\filler[test-b]text \par +% text\filler[test-c]text \par +% text\checkedfiller{<nothing>}text \par +% \stoptext diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua index 696635bfc..68554acb9 100644 --- a/tex/context/base/spac-ver.lua +++ b/tex/context/base/spac-ver.lua @@ -101,12 +101,14 @@ local whatsit_code = nodecodes.whatsit local userskip_code = skipcodes.userskip -builders.vspacing = builders.vspacing or { } -local vspacing = builders.vspacing -vspacing.data = vspacing.data or { } +local vspacing = builders.vspacing or { } +builders.vspacing = vspacing -vspacing.data.snapmethods = vspacing.data.snapmethods or { } -local snapmethods = vspacing.data.snapmethods --maybe some older code can go +local vspacingdata = vspacing.data or { } +vspacing.data = vspacingdata + +vspacingdata.snapmethods = vspacingdata.snapmethods or { } +local snapmethods = vspacingdata.snapmethods --maybe some older code can go storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods") @@ -554,18 +556,18 @@ function vspacing.tocategory(str) end end -vspacing.data.map = vspacing.data.map or { } -- allocate ? -vspacing.data.skip = vspacing.data.skip or { } -- allocate ? +vspacingdata.map = vspacingdata.map or { } -- allocate ? +vspacingdata.skip = vspacingdata.skip or { } -- allocate ? -storage.register("builders/vspacing/data/map", vspacing.data.map, "builders.vspacing.data.map") -storage.register("builders/vspacing/data/skip", vspacing.data.skip, "builders.vspacing.data.skip") +storage.register("builders/vspacing/data/map", vspacingdata.map, "builders.vspacing.data.map") +storage.register("builders/vspacing/data/skip", vspacingdata.skip, "builders.vspacing.data.skip") do -- todo: interface.variables vspacing.fixed = false - local map = vspacing.data.map - local skip = vspacing.data.skip + local map = vspacingdata.map + local skip = vspacingdata.skip local multiplier = C(S("+-")^0 * R("09")^1) * P("*") local category = P(":") * C(P(1)^1) @@ -1244,23 +1246,21 @@ end function vspacing.pagehandler(newhead,where) -- local newhead = texlists.contrib_head if newhead then - local newtail = find_node_tail(newhead) + local newtail = find_node_tail(newhead) -- best pass that tail, known anyway local flush = false stackhack = true -- todo: only when grid snapping once enabled for n in traverse_nodes(newhead) do -- we could just look for glue nodes local id = n.id - if id == glue_code then - if n.subtype == userskip_code then - if has_attribute(n,a_skipcategory) then - stackhack = true - else - flush = true - end + if id ~= glue_code then + flush = true + elseif n.subtype == userskip_code then + if has_attribute(n,a_skipcategory) then + stackhack = true else - -- tricky + flush = true end else - flush = true + -- tricky end end if flush then diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex 29bcc4b27..05e831c58 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex de709d451..e045bc941 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua index 699e2b668..5575639eb 100644 --- a/tex/context/base/trac-inf.lua +++ b/tex/context/base/trac-inf.lua @@ -12,6 +12,7 @@ if not modules then modules = { } end modules ['trac-inf'] = { -- and rawget. local format, lower = string.format, string.lower +local concat = table.concat local clock = os.gettimeofday or os.clock -- should go in environment local write_nl = texio and texio.write_nl or print @@ -121,6 +122,14 @@ function statistics.show(reporter) local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0 return format("%s direct, %s indirect, %s total", total-indirect, indirect, total) end) + if jit then + local status = { jit.status() } + if status[1] then + register("luajit status", function() + return concat(status," ",2) + end) + end + end collectgarbage("collect") register("current memory usage", statistics.memused) register("runtime",statistics.runtime) diff --git a/tex/context/base/type-ini.mkvi b/tex/context/base/type-ini.mkvi index 8345a97f9..ddf7cad8f 100644 --- a/tex/context/base/type-ini.mkvi +++ b/tex/context/base/type-ini.mkvi @@ -460,10 +460,6 @@ \def\font_typefaces_define_b[#name][#style][#fontshape][#fontname][#dummya][#dummyb]% {\font_typefaces_define_a[#name][#style][#fontshape][#fontname][\s!default][#dummyb]} -% \def\font_typefaces_define_c[#name][#style][#settings][#dummya][#dummyb][#dummyc]% misuse for settings -% {\font_typefaces_define_indeed[#name][#style]% -% \getparameters[\??tf#name#style][#settings]} % not used - \def\font_typefaces_define_c[#name][#style][#dummya][#dummyb][#dummyc][#dummyd]% {\font_typefaces_define_indeed[#name][#style]} @@ -561,7 +557,7 @@ %D \inherittypeface[palatino] % == [rm,ss,tt,mm] %D \stoptyping -\def\inherittypeface +\unexpanded\def\inherittypeface {\dotripleempty\font_typescripts_inherit_indeed} \def\font_typescripts_inherit_indeed[#name][#styles][#parentclass]% diff --git a/tex/context/base/util-mrg.lua b/tex/context/base/util-mrg.lua index 8d6c5dd31..fad7ef0a1 100644 --- a/tex/context/base/util-mrg.lua +++ b/tex/context/base/util-mrg.lua @@ -12,7 +12,7 @@ local gsub, format = string.gsub, string.format local concat = table.concat local type, next = type, next -utilities = utilities or {} +utilities = utilities or { } local merger = utilities.merger or { } utilities.merger = merger utilities.report = logs and logs.reporter("system") or print @@ -41,6 +41,13 @@ local m_faked = "-- " .. m_begin_merge .. "\n\n" .. "-- " .. m_end_merge .. "\n\n" +local m_report = [[ +-- used libraries : %s +-- skipped libraries : %s +-- original bytes : %s +-- stripped bytes : %s +]] + local function self_fake() return m_faked end @@ -59,18 +66,79 @@ local function self_load(name) return data or "" end +-- -- saves some 20K .. scite comments +-- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","") +-- -- saves some 20K .. ldx comments +-- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","") + +local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg +local lpegmatch, patterns = lpeg.match, lpeg.patterns + +local equals = P("=")^0 +local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1 +local close = P("]") * C(equals) * P("]") +local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end) +local longstring = open * (1 - closeeq)^0 * close + +local space = patterns.space +local eol = patterns.newline +local quoted = patterns.quoted +local emptyline = space^0 * eol +local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%") +local operator2 = S("*+/") +local operator3 = S("-") +local separator = S(",;") + +local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" + + (P("=") * space^1 * P("{")) / "={" + + (P("(") * space^1) / "(" + + (P("{") * (space+eol)^1 * P("}")) / "{}" +local strings = quoted -- / function (s) print("<<"..s..">>") return s end +local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / "" +local longstr = longstring +local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n" +local pack = ((eol+space)^0 / "") * operator1 * ((eol+space)^0 / "") + + ((eol+space)^0 / "") * operator2 * ((space)^0 / "") + + ((eol+space)^1 / "") * operator3 * ((space)^1 / "") + + ((space)^0 / "") * separator * ((space)^0 / "") +local lines = emptyline^2 / "\n" +local spaces = (space * space) / " " +----- spaces = ((space+eol)^1 ) / " " + +local compact = Cs ( ( + ignore + + strings + + longcmt + + longstr + + comment + + pack + + lines + + spaces + + 1 +)^1 ) + +local strip = Cs((emptyline^2/"\n" + 1)^0) + +local function self_compact(data) + if merger.strip_comment then + local before = #data + data = lpeg.match(compact,data) + data = lpeg.match(strip,data) + -- data = string.strip(data) + local after = #data + local delta = before - after + utilities.report("merge: %s bytes compacted to %s (%s bytes stripped)",before,after,delta) + data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data) + return data, delta + else + return data, 0 + end +end + local function self_save(name, data) if data ~= "" then - if merger.strip_comment then - local n = #data - -- saves some 20K .. scite comments - data = gsub(data,"%-%-~[^\n\r]*[\r\n]","") - -- saves some 20K .. ldx comments - data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","") - utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data) - end io.savedata(name,data) - utilities.report("merge: saving %s",name) + utilities.report("merge: saving %s bytes in %s",#data,name) end end @@ -97,27 +165,32 @@ local function self_libs(libs,list) end if foundpath then utilities.report("merge: using library path %s",foundpath) - local right, wrong = { }, { } + local right, wrong, original, stripped = { }, { }, 0, 0 for i=1,#libs do local lib = libs[i] local fullname = foundpath .. "/" .. lib if lfs.isfile(fullname) then utilities.report("merge: using library %s",fullname) + local data = io.loaddata(fullname,true) + original = original + #data + local data, delta = self_compact(data) right[#right+1] = lib result[#result+1] = m_begin_closure - result[#result+1] = io.loaddata(fullname,true) + result[#result+1] = data result[#result+1] = m_end_closure + stripped = stripped + delta else utilities.report("merge: skipping library %s",fullname) wrong[#wrong+1] = lib end end - if #right > 0 then - utilities.report("merge: used libraries: %s",concat(right," ")) - end - if #wrong > 0 then - utilities.report("merge: skipped libraries: %s",concat(wrong," ")) - end + right = #right > 0 and concat(right," ") or "-" + wrong = #wrong > 0 and concat(wrong," ") or "-" + utilities.report("merge: used libraries: %s",right) + utilities.report("merge: skipped libraries: %s",wrong) + utilities.report("merge: original bytes: %s",original) + utilities.report("merge: stripped bytes: %s",stripped) + result[#result+1] = format(m_report,right,wrong,original,stripped) else utilities.report("merge: no valid library path found") end diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua index 711424a2c..0bf056365 100644 --- a/tex/context/base/util-seq.lua +++ b/tex/context/base/util-seq.lua @@ -235,6 +235,7 @@ local function construct(t) t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls) end end +-- print(t.compiled) return t.compiled -- also stored so that we can trace end @@ -273,7 +274,7 @@ sequencers.compile = compile -- todo: use sequencer (can have arguments and returnvalues etc now) -local template = [[ +local template_yes = [[ %s return function(head%s) local ok, done = false, false @@ -281,6 +282,11 @@ return function(head%s) return head, done end]] +local template_nop = [[ +return function() + return false, false +end]] + function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip local vars, calls, args, n = { }, { }, nil, 0 @@ -319,7 +325,6 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug end end end - local processor = format(template,concat(vars,"\n"),args,concat(calls,"\n")) - -- print(processor) + local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop return processor end diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index 828169554..60e2a7942 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 01/10/13 01:04:42 +-- merge date : 01/13/13 23:10:29 do -- begin closure to overcome local limits and interference @@ -59,8 +59,10 @@ local report = texio and texio.write_nl or print -- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end -- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end -local type, next = type, next +local type, next, tostring = type, next, tostring local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format +----- mod, div = math.mod, math.div +local floor = math.floor -- Beware, we predefine a bunch of patterns here and one reason for doing so -- is that we get consistent behaviour in some of the visualizers. @@ -785,6 +787,47 @@ end patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol +-- The next pattern^n variant is based on an approach suggested +-- by Roberto: constructing a big repetition in chunks. +-- +-- Being sparse is not needed, and only complicate matters and +-- the number of redundant entries is not that large. + +local function nextstep(n,step,result) + local m = n % step -- mod(n,step) + local d = floor(n/step) -- div(n,step) + if d > 0 then + local v = V(tostring(step)) + local s = result.start + for i=1,d do + if s then + s = v * s + else + s = v + end + end + result.start = s + end + if step > 1 and result.start then + local v = V(tostring(step/2)) + result[tostring(step)] = v * v + end + if step > 0 then + return nextstep(m,step/2,result) + else + return result + end +end + +function lpeg.times(pattern,n) + return P(nextstep(n,2^16,{ "start", ["1"] = pattern })) +end + +-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1) +-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56" +-- inspect(p) +-- print(lpeg.match(p,s)) + end -- closure do -- begin closure to overcome local limits and interference @@ -856,7 +899,7 @@ end -- print(string.unquoted('"test"')) function string.quoted(str) - return format("%q",str) -- always " + return format("%q",str) -- always double quote end function string.count(str,pattern) -- variant 3 @@ -2193,7 +2236,8 @@ local noslashes = 1-slashes local name = noperiod^1 local suffix = period/"" * (1-period-slashes)^1 * -1 -local pattern = C((noslashes^0 * slashes^1)^1) +----- pattern = C((noslashes^0 * slashes^1)^1) +local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way local function pathpart(name,default) return name and lpegmatch(pattern,name) or default or "" @@ -2205,6 +2249,13 @@ local function basename(name) return name and lpegmatch(pattern,name) or name end +-- print(pathpart("file")) +-- print(pathpart("dir/file")) +-- print(pathpart("/dir/file")) +-- print(basename("file")) +-- print(basename("dir/file")) +-- print(basename("/dir/file")) + local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0 local function nameonly(name) @@ -2229,7 +2280,7 @@ file.extname = suffixonly -- obsolete -- actually these are schemes local drive = C(R("az","AZ")) * colon -local path = C(((1-slashes)^0 * slashes)^0) +local path = C((noslashes^0 * slashes)^0) local suffix = period * C(P(1-period)^0 * P(-1)) local base = C((1-suffix)^0) local rest = C(P(1)^0) @@ -2258,9 +2309,14 @@ function file.splitbase(str) return str and lpegmatch(pattern_d,str) -- returns path, base+suffix end -function file.nametotable(str,splitdrive) -- returns table +---- stripslash = C((1 - P("/")^1*P(-1))^0) + +function file.nametotable(str,splitdrive) if str then local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str) + -- if path ~= "" then + -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default + -- end if splitdrive then return { path = path, @@ -2281,6 +2337,20 @@ function file.nametotable(str,splitdrive) -- returns table end end +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) +-- print(file.splitname("file")) +-- print(file.splitname("dir/file")) +-- print(file.splitname("/dir/file")) + +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) +-- inspect(file.nametotable("file.ext")) +-- inspect(file.nametotable("dir/file.ext")) +-- inspect(file.nametotable("/dir/file.ext")) + local pattern = Cs(((period * noperiod^1 * -1)/"" + 1)^1) function file.removesuffix(name) @@ -3993,6 +4063,8 @@ function constructors.scale(tfmdata,specification) target.psname = psname target.name = name -- + -- inspect(properties) + -- properties.fontname = fontname properties.fullname = fullname properties.filename = filename @@ -9508,9 +9580,6 @@ local handlers = { } local rlmode = 0 local featurevalue = false --- we cannot optimize with "start = first_glyph(head)" because then we don't --- know which rlmode we're in which messes up cursive handling later on --- -- head is always a whatsit so we can safely assume that head is not changed -- we use this for special testing and documentation @@ -11464,7 +11533,7 @@ local function featuresprocessor(head,font,attr) -- font interactions and then we do need the full sweeps. -- Keeping track of the headnode is needed for devanagari (I generalized it a bit - -- so that multiple cases are also covered. + -- so that multiple cases are also covered.) for s=1,#sequences do local dataset = datasets[s] |