diff options
Diffstat (limited to 'tex/context/base/mkxl')
67 files changed, 2541 insertions, 1057 deletions
diff --git a/tex/context/base/mkxl/attr-ini.lmt b/tex/context/base/mkxl/attr-ini.lmt index 8b2ec8911..32fc36cdd 100644 --- a/tex/context/base/mkxl/attr-ini.lmt +++ b/tex/context/base/mkxl/attr-ini.lmt @@ -10,10 +10,8 @@ local next, type = next, type local osexit = os.exit local sortedhash = table.sortedhash ---[[ldx-- -<p>We start with a registration system for atributes so that we can use the -symbolic names later on.</p> ---ldx]]-- +-- We start with a registration system for atributes so that we can use the symbolic +-- names later on. local nodes = nodes local context = context @@ -71,17 +69,13 @@ trackers.register("attributes.values", function(v) trace_values = v end) -- end -- end ---[[ldx-- -<p>We reserve this one as we really want it to be always set (faster).</p> ---ldx]]-- +-- We reserve this one as we really want it to be always set (faster). names[0], numbers["fontdynamic"] = "fontdynamic", 0 ---[[ldx-- -<p>private attributes are used by the system and public ones are for users. We use dedicated -ranges of numbers for them. Of course a the <l n='context'/> end a private attribute can be -accessible too, so a private attribute can have a public appearance.</p> ---ldx]]-- +-- Private attributes are used by the system and public ones are for users. We use +-- dedicated ranges of numbers for them. Of course a the TeX end a private attribute +-- can be accessible too, so a private attribute can have a public appearance. sharedstorage.attributes_last_private = sharedstorage.attributes_last_private or 15 -- very private sharedstorage.attributes_last_public = sharedstorage.attributes_last_public or 1024 -- less private diff --git a/tex/context/base/mkxl/char-tex.lmt b/tex/context/base/mkxl/char-tex.lmt index 31023136d..0ac297d59 100644 --- a/tex/context/base/mkxl/char-tex.lmt +++ b/tex/context/base/mkxl/char-tex.lmt @@ -46,17 +46,14 @@ local trace_defining = false trackers.register("characters.defining", fu local report_defining = logs.reporter("characters") ---[[ldx-- -<p>In order to deal with 8-bit output, we need to find a way to go from <l n='utf'/> to -8-bit. This is handled in the <l n='luatex'/> engine itself.</p> - -<p>This leaves us problems with characters that are specific to <l n='tex'/> like -<type>{}</type>, <type>$</type> and alike. We can remap some chars that tex input files -are sensitive for to a private area (while writing to a utility file) and revert then -to their original slot when we read in such a file. Instead of reverting, we can (when -we resolve characters to glyphs) map them to their right glyph there. For this purpose -we can use the private planes 0x0F0000 and 0x100000.</p> ---ldx]]-- +-- In order to deal with 8-bit output, we need to find a way to go from UTF to +-- 8-bit. This is handled in the 32 bit engine itself. This leaves us problems with +-- characters that are specific to TeX, like curly braces and dollars. We can remap +-- some chars that tex input files are sensitive for to a private area (while +-- writing to a utility file) and revert then to their original slot when we read in +-- such a file. Instead of reverting, we can (when we resolve characters to glyphs) +-- map them to their right glyph there. For this purpose we can use the private +-- planes 0x0F0000 and 0x100000. local low = allocate() local high = allocate() @@ -106,21 +103,6 @@ private.escape = utf.remapper(escapes) -- maybe: ,"dynamic" private.replace = utf.remapper(low) -- maybe: ,"dynamic" private.revert = utf.remapper(high) -- maybe: ,"dynamic" ---[[ldx-- -<p>We get a more efficient variant of this when we integrate -replacements in collapser. This more or less renders the previous -private code redundant. The following code is equivalent but the -first snippet uses the relocated dollars.</p> - -<typing> -[x] [$x$] -</typing> ---ldx]]-- - --- using the tree-lpeg-mapper would be nice but we also need to deal with end-of-string --- cases: "\"\i" and don't want "\relax" to be seen as \r e lax" (for which we need to mess --- with spaces - local accentmapping = allocate { ['"'] = { [""] = "¨", A = "Ä", a = "ä", @@ -288,12 +270,12 @@ local commandmapping = allocate { texcharacters.commandmapping = commandmapping -local ligaturemapping = allocate { - ["''"] = "”", - ["``"] = "“", - ["--"] = "–", - ["---"] = "—", -} +-- local ligaturemapping = allocate { +-- ["''"] = "”", +-- ["``"] = "“", +-- ["--"] = "–", +-- ["---"] = "—", +-- } -- Older accent handling code can be found in char-def.lua but in the meantime -- we moved on. First the one with commands: @@ -321,9 +303,9 @@ local function toutfpattern() hash["{\\"..k.."}"] = v hash["{\\"..k.." }"] = v end - for k, v in next, ligaturemapping do - hash[k] = v - end + -- for k, v in next, ligaturemapping do + -- hash[k] = v + -- end untex = utfchartabletopattern(hash) / hash end return untex @@ -376,9 +358,9 @@ local function toutfpattern() for k, v in next, commandmapping do hash[k] = v end - for k, v in next, ligaturemapping do - hash[k] = v - end + -- for k, v in next, ligaturemapping do + -- hash[k] = v + -- end untex = utfchartabletopattern(hash) / hash end return untex @@ -580,10 +562,8 @@ implement { -- a waste of scanner but consistent actions = texcharacters.defineaccents } ---[[ldx-- -<p>Instead of using a <l n='tex'/> file to define the named glyphs, we -use the table. After all, we have this information available anyway.</p> ---ldx]]-- +-- Instead of using a TeX file to define the named glyphs, we use the table. After +-- all, we have this information available anyway. local function to_number(s) local n = tonumber(s) @@ -878,10 +858,6 @@ function characters.setactivecatcodes(cct) tex.catcodetable = saved end ---[[ldx-- -<p>Setting the lccodes is also done in a loop over the data table.</p> ---ldx]]-- - implement { name = "chardescription", arguments = "integer", diff --git a/tex/context/base/mkxl/cont-new.mkxl b/tex/context/base/mkxl/cont-new.mkxl index 9a6fc93da..53ccef0b6 100644 --- a/tex/context/base/mkxl/cont-new.mkxl +++ b/tex/context/base/mkxl/cont-new.mkxl @@ -13,7 +13,7 @@ % \normalend % uncomment this to get the real base runtime -\newcontextversion{2023.03.20 15:42} +\newcontextversion{2023.04.01 09:28} %D This file is loaded at runtime, thereby providing an excellent place for hacks, %D patches, extensions and new features. There can be local overloads in cont-loc diff --git a/tex/context/base/mkxl/context.mkxl b/tex/context/base/mkxl/context.mkxl index 1a07772eb..6f4b7d052 100644 --- a/tex/context/base/mkxl/context.mkxl +++ b/tex/context/base/mkxl/context.mkxl @@ -29,7 +29,7 @@ %D {YYYY.MM.DD HH:MM} format. \immutable\edef\contextformat {\jobname} -\immutable\edef\contextversion{2023.03.20 15:42} +\immutable\edef\contextversion{2023.04.01 09:28} %overloadmode 1 % check frozen / warning %overloadmode 2 % check frozen / error @@ -215,8 +215,9 @@ \loadmkxlfile{unic-ini} -\loadmkxlfile{core-two} +%loadmkxlfile{core-two} % retired, not in testsuite, not on garden, not in styles \loadmkxlfile{core-dat} +\loadmkxlfile{core-pag} \loadmkxlfile{colo-ini} \loadmkxlfile{colo-nod} @@ -647,26 +648,26 @@ % we will definitely freeze mkiv and then use lmt files for futher development % of lmtx. We also no longer use the macro feature to replace 5.3 compatible % function calls by native 5.4 features as lmt files assume 5.4 anyway. This -% makes format generation a little faster (not that it's that slow). It might \ +% makes format generation a little faster (not that it's that slow). It might % take a while before we dealt with all of them because I'll also clean them -% up a bit when doing. +% up a bit when doing. Some will probably always be shared, like char-def.lua. % % % luat-bas.mkxl l-macro-imp-optimize % this is no longer used -% c:/data/develop/context/sources/buff-imp-default.lua -% c:/data/develop/context/sources/buff-imp-escaped.lua -% c:/data/develop/context/sources/buff-imp-lua.lua -% c:/data/develop/context/sources/buff-imp-mp.lua -% c:/data/develop/context/sources/buff-imp-nested.lua -% c:/data/develop/context/sources/buff-imp-parsed-xml.lua -% c:/data/develop/context/sources/buff-imp-tex.lua -% c:/data/develop/context/sources/buff-imp-xml.lua - % c:/data/develop/context/sources/buff-par.lua % c:/data/develop/context/sources/buff-ver.lua +% +% c:/data/develop/context/sources/buff-imp-default.lua % shared +% c:/data/develop/context/sources/buff-imp-escaped.lua % shared +% c:/data/develop/context/sources/buff-imp-lua.lua % shared +% c:/data/develop/context/sources/buff-imp-mp.lua % shared +% c:/data/develop/context/sources/buff-imp-nested.lua % shared +% c:/data/develop/context/sources/buff-imp-parsed-xml.lua % shared +% c:/data/develop/context/sources/buff-imp-tex.lua % shared +% c:/data/develop/context/sources/buff-imp-xml.lua % shared % c:/data/develop/context/sources/char-cjk.lua -% c:/data/develop/context/sources/char-def.lua +% c:/data/develop/context/sources/char-def.lua % shared data file, a real big one % c:/data/develop/context/sources/char-enc.lua % c:/data/develop/context/sources/char-ent.lua % c:/data/develop/context/sources/char-fio.lua @@ -680,7 +681,7 @@ % c:/data/develop/context/sources/cldf-com.lua % c:/data/develop/context/sources/cldf-ini.lua -% c:/data/develop/context/sources/cldf-prs.lua % use in chemistry +% c:/data/develop/context/sources/cldf-prs.lua % used in chemistry % c:/data/develop/context/sources/cldf-scn.lua % c:/data/develop/context/sources/cldf-stp.lua % c:/data/develop/context/sources/cldf-ver.lua @@ -690,8 +691,6 @@ % c:/data/develop/context/sources/core-con.lua % c:/data/develop/context/sources/core-ctx.lua -% c:/data/develop/context/sources/core-dat.lua -% c:/data/develop/context/sources/core-two.lua % data... @@ -700,7 +699,7 @@ % c:/data/develop/context/sources/file-res.lua % c:/data/develop/context/sources/font-afk.lua -% c:/data/develop/context/sources/font-agl.lua +% c:/data/develop/context/sources/font-agl.lua % shared data file % c:/data/develop/context/sources/font-aux.lua % c:/data/develop/context/sources/font-cid.lua % c:/data/develop/context/sources/font-enc.lua @@ -724,16 +723,16 @@ % c:/data/develop/context/sources/font-trt.lua % c:/data/develop/context/sources/font-web.lua % proof of concept, never used -% c:/data/develop/context/sources/font-imp-combining.lua % shared, like typescript -% c:/data/develop/context/sources/font-imp-dimensions.lua % idem -% c:/data/develop/context/sources/font-imp-italics.lua % idem -% c:/data/develop/context/sources/font-imp-notused.lua % idem -% c:/data/develop/context/sources/font-imp-properties.lua % idem -% c:/data/develop/context/sources/font-imp-reorder.lua % idem -% c:/data/develop/context/sources/font-imp-spacekerns.lua % idem -% c:/data/develop/context/sources/font-imp-tex.lua % idem -% c:/data/develop/context/sources/font-imp-tweaks.lua % idem -% c:/data/develop/context/sources/font-imp-unicode.lua % idem +% c:/data/develop/context/sources/font-imp-combining.lua % shared +% c:/data/develop/context/sources/font-imp-dimensions.lua % shared +% c:/data/develop/context/sources/font-imp-italics.lua % shared +% c:/data/develop/context/sources/font-imp-notused.lua % shared +% c:/data/develop/context/sources/font-imp-properties.lua % shared +% c:/data/develop/context/sources/font-imp-reorder.lua % shared +% c:/data/develop/context/sources/font-imp-spacekerns.lua % shared +% c:/data/develop/context/sources/font-imp-tex.lua % shared +% c:/data/develop/context/sources/font-imp-tweaks.lua % shared +% c:/data/develop/context/sources/font-imp-unicode.lua % shared % c:/data/develop/context/sources/good-ctx.lua % c:/data/develop/context/sources/good-ini.lua @@ -749,26 +748,26 @@ % c:/data/develop/context/sources/java-ini.lua -% c:/data/develop/context/sources/lang-cnt.lua -% c:/data/develop/context/sources/lang-def.lua % these are data files -% c:/data/develop/context/sources/lang-txt.lua % these are data files +% c:/data/develop/context/sources/lang-cnt.lua % shared data file +% c:/data/develop/context/sources/lang-def.lua % shared data file +% c:/data/develop/context/sources/lang-txt.lua % shared data file % c:/data/develop/context/sources/lang-wrd.lua % c:/data/develop/context/sources/luat-exe.lua % c:/data/develop/context/sources/luat-iop.lua % c:/data/develop/context/sources/luat-mac.lua % will become lmt -% c:/data/develop/context/sources/lxml-aux.lua -% c:/data/develop/context/sources/lxml-css.lua -% c:/data/develop/context/sources/lxml-dir.lua -% c:/data/develop/context/sources/lxml-ent.lua -% c:/data/develop/context/sources/lxml-ini.lua -% c:/data/develop/context/sources/lxml-lpt.lua -% c:/data/develop/context/sources/lxml-mis.lua -% c:/data/develop/context/sources/lxml-sor.lua -% c:/data/develop/context/sources/lxml-tab.lua -% c:/data/develop/context/sources/lxml-tex.lua -% c:/data/develop/context/sources/lxml-xml.lua +% c:/data/develop/context/sources/lxml-aux.lua % the xml interfcace is rather stable +% c:/data/develop/context/sources/lxml-css.lua % and is also provided/used in lua so +% c:/data/develop/context/sources/lxml-dir.lua % might as well share these because they +% c:/data/develop/context/sources/lxml-ent.lua % are unlikely to change +% c:/data/develop/context/sources/lxml-ini.lua % +% c:/data/develop/context/sources/lxml-lpt.lua % +% c:/data/develop/context/sources/lxml-mis.lua % +% c:/data/develop/context/sources/lxml-sor.lua % +% c:/data/develop/context/sources/lxml-tab.lua % +% c:/data/develop/context/sources/lxml-tex.lua % +% c:/data/develop/context/sources/lxml-xml.lua % % c:/data/develop/context/sources/meta-blb.lua % c:/data/develop/context/sources/meta-fun.lua @@ -788,16 +787,16 @@ % c:/data/develop/context/sources/page-pst.lua % c:/data/develop/context/sources/publ-aut.lua % shared -% c:/data/develop/context/sources/publ-dat.lua -% c:/data/develop/context/sources/publ-fnd.lua -% c:/data/develop/context/sources/publ-inc.lua -% c:/data/develop/context/sources/publ-ini.lua -% c:/data/develop/context/sources/publ-jrn.lua -% c:/data/develop/context/sources/publ-oth.lua -% c:/data/develop/context/sources/publ-reg.lua -% c:/data/develop/context/sources/publ-sor.lua -% c:/data/develop/context/sources/publ-tra.lua -% c:/data/develop/context/sources/publ-usr.lua +% c:/data/develop/context/sources/publ-dat.lua % shared +% c:/data/develop/context/sources/publ-fnd.lua % shared +% c:/data/develop/context/sources/publ-inc.lua % shared +% c:/data/develop/context/sources/publ-ini.lua % shared +% c:/data/develop/context/sources/publ-jrn.lua % shared +% c:/data/develop/context/sources/publ-oth.lua % shared +% c:/data/develop/context/sources/publ-reg.lua % shared +% c:/data/develop/context/sources/publ-sor.lua % shared +% c:/data/develop/context/sources/publ-tra.lua % shared +% c:/data/develop/context/sources/publ-usr.lua % shared % c:/data/develop/context/sources/scrn-but.lua % c:/data/develop/context/sources/scrn-fld.lua @@ -828,6 +827,3 @@ % c:/data/develop/context/sources/trac-lmx.lua % c:/data/develop/context/sources/trac-par.lua % c:/data/develop/context/sources/trac-tex.lua - -% c:/data/develop/context/sources/typo-cln.lua -- wrong name for what it does -% c:/data/develop/context/sources/typo-dha.lua diff --git a/tex/context/base/mkxl/core-dat.lmt b/tex/context/base/mkxl/core-dat.lmt new file mode 100644 index 000000000..fd8aa0fb6 --- /dev/null +++ b/tex/context/base/mkxl/core-dat.lmt @@ -0,0 +1,225 @@ +if not modules then modules = { } end modules ['core-dat'] = { + version = 1.001, + comment = "companion to core-dat.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module provides a (multipass) container for arbitrary data. It replaces the +-- twopass data mechanism. + +local tonumber, tostring, type = tonumber, tostring, type + +local context = context + +local trace_datasets = false trackers.register("job.datasets" , function(v) trace_datasets = v end) + +local report_dataset = logs.reporter("dataset") + +local allocate = utilities.storage.allocate +local settings_to_hash = utilities.parsers.settings_to_hash + +local texgetcount = tex.getcount +local texsetcount = tex.setcount + +local v_yes = interfaces.variables.yes + +local new_latelua = nodes.pool.latelua + +local implement = interfaces.implement + +local c_realpageno = tex.iscount("realpageno") + +local collected = allocate() +local tobesaved = allocate() + +local datasets = { + collected = collected, + tobesaved = tobesaved, +} + +job.datasets = datasets + +local function initializer() + collected = datasets.collected + tobesaved = datasets.tobesaved +end + +job.register('job.datasets.collected', tobesaved, initializer, nil) + +local sets = { } + +table.setmetatableindex(tobesaved, function(t,k) + local v = { } + t[k] = v + return v +end) + +table.setmetatableindex(sets, function(t,k) + local v = { + index = 0, + order = 0, + } + t[k] = v + return v +end) + +local function setdata(settings) + local name = settings.name + local tag = settings.tag + local data = settings.data + local list = tobesaved[name] + if settings.convert and type(data) == "string" then + data = settings_to_hash(data) + end + if type(data) ~= "table" then + data = { data = data } + end + if not tag then + tag = #list + 1 + else + tag = tonumber(tag) or tag -- autonumber saves keys + end + list[tag] = data + if settings.delay == v_yes then + local set = sets[name] + local index = set.index + 1 + set.index = index + data.index = index + data.order = index + data.realpage = texgetcount(c_realpageno) + if trace_datasets then + report_dataset("action %a, name %a, tag %a, index %a","assign delayed",name,tag,index) + end + elseif trace_datasets then + report_dataset("action %a, name %a, tag %a","assign immediate",name,tag) + end + return name, tag, data +end + +datasets.setdata = setdata + +function datasets.extend(name,tag) + if type(name) == "table" then + name, tag = name.name, name.tag + end + local set = sets[name] + local order = set.order + 1 + local realpage = texgetcount(c_realpageno) + set.order = order + local t = tobesaved[name][tag] + t.realpage = realpage + t.order = order + if trace_datasets then + report_dataset("action %a, name %a, tag %a, page %a, index %a","flush by order",name,tag,t.index or 0,order,realpage) + end +end + +function datasets.getdata(name,tag,key,default) + local t = collected[name] + if t == nil then + if trace_datasets then + report_dataset("error: unknown dataset, name %a",name) + end + elseif type(t) ~= "table" then + return t + else + t = t[tag] or t[tonumber(tag)] + if not t then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a",name,tag) + end + elseif key then + return t[key] or default + else + return t + end + end + return default +end + +local function setdataset(settings) + settings.convert = true + local name, tag = setdata(settings) + if settings.delay ~= v_yes then + -- + else + context(new_latelua { action = job.datasets.extend, name = name, tag = tag }) + end +end + +local cache = table.setmetatableindex(function(t,k) + local v = table.load(k..".tuc") + if v then + v = v.job + if v then + v = v.datasets + if v then + v = v.collected + end + end + end + if not v then + v = { } + if trace_datasets then + report_dataset("error: unknown dataset job %a",k) + end + end + t[k] = v + return v +end) + +local function datasetvariable(name,tag,key,cache) + local t = (cache or collected)[name] + if t == nil then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name) -- no tag + end + elseif type(t) ~= "table" then + context(tostring(t)) + else + t = t and (t[tag] or t[tonumber(tag)]) + if not t then + if trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) + end + elseif type(t) == "table" then + local s = t[key] + if type(s) ~= "table" then + context(tostring(s)) + elseif trace_datasets then + report_dataset("error: unknown dataset, name %a, tag %a, not passed to tex",name,tag) + end + end + end +end + +local function datasetvariablefromjob(jobnname,name,tag,key) + datasetvariable(name,tag,key,cache[jobnname]) +end + +implement { + name = "setdataset", + actions = setdataset, + arguments = { + { + { "name" }, + { "tag" }, + { "delay" }, + { "data" }, + } + } +} + +implement { + name = "datasetvariable", + actions = datasetvariable, + arguments = "3 strings", +} + +implement { + name = "datasetvariablefromjob", + arguments = { "string", "string", "string", "string" }, + actions = datasetvariablefromjob +} diff --git a/tex/context/base/mkxl/core-dat.mkxl b/tex/context/base/mkxl/core-dat.mkxl index ab40d874c..6d7d1bd14 100644 --- a/tex/context/base/mkxl/core-dat.mkxl +++ b/tex/context/base/mkxl/core-dat.mkxl @@ -1,6 +1,6 @@ %D \module %D [ file=core-dat, -%D version=20122.04.17, % replaces core-two from 1997.03.31, +%D version=2021.04.17, % replaces core-two from 1997.03.31, %D title=\CONTEXT\ Core Macros, %D subtitle=Multipass Datasets, %D author=Hans Hagen, @@ -42,7 +42,7 @@ \unprotect -\registerctxluafile{core-dat}{} +\registerctxluafile{core-dat}{autosuffix} \installcorenamespace{dataset} @@ -78,50 +78,4 @@ \expandafter\clf_datasetvariable \fi} -\installcorenamespace{pagestate} -\installcorenamespace{pagestatecounter} - -\installcommandhandler \??pagestate {pagestate} \??pagestate - -\def\syst_pagestates_allocate - {\expandafter\newinteger\csname\??pagestatecounter\currentpagestate\endcsname} - -\appendtoks - \syst_pagestates_allocate -\to \everydefinepagestate - -\setuppagestate - [\c!delay=\v!yes] - -\permanent\tolerant\protected\def\setpagestate[#1]#*[#2]% - {\begingroup - \edef\currentpagestate{#1}% - \ifcsname\??pagestatecounter\currentpagestate\endcsname - \scratchcounter\lastnamedcs - \advanceby\scratchcounter\plusone - \else - \scratchcounter\plusone - \syst_pagestates_allocate - \fi - \global\csname\??pagestatecounter\currentpagestate\endcsname\scratchcounter - \clf_setpagestate - name {\currentpagestate}% - tag {\ifparameter#2\or#2\else\number\scratchcounter\fi}% - delay {\pagestateparameter\c!delay}% - \relax - \endgroup} - -\permanent\protected\def\autosetpagestate#1% - {\setpagestate[#1]\relax} - -\permanent\def\autopagestatenumber#1{\begincsname\??pagestatecounter#1\endcsname} - -\permanent\def\pagestaterealpage #1#2{\clf_pagestaterealpage {#1}{#2}} -\permanent\def\setpagestaterealpageno#1#2{\clf_setpagestaterealpageno{#1}{#2}} -\permanent\def\pagestaterealpageorder#1#2{\clf_pagestaterealpageorder{#1}#2\relax} - -\permanent\def\autopagestaterealpage #1{\clf_pagestaterealpage {#1}{\number\autopagestatenumber{#1}}} -\permanent\def\setautopagestaterealpageno#1{\clf_setpagestaterealpageno{#1}{\number\autopagestatenumber{#1}}} -\permanent\def\autopagestaterealpageorder#1{\clf_pagestaterealpageorder{#1}\numexpr\autopagestatenumber{#1}\relax} - \protect diff --git a/tex/context/base/mkxl/core-pag.lmt b/tex/context/base/mkxl/core-pag.lmt new file mode 100644 index 000000000..219171d42 --- /dev/null +++ b/tex/context/base/mkxl/core-pag.lmt @@ -0,0 +1,160 @@ +if not modules then modules = { } end modules ['core-dat'] = { + version = 1.001, + comment = "companion to core-dat.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This module provides a (multipass) container for arbitrary data. It replaces the +-- twopass data mechanism. + +local tonumber = tonumber + +local context = context +local ctx_latelua = context.latelua + +local trace_pagestates = false trackers.register("job.pagestates", function(v) trace_pagestates = v end) + +local report_pagestate = logs.reporter("pagestate") + +local allocate = utilities.storage.allocate + +local texgetcount = tex.getcount +local texsetcount = tex.setcount + +local new_latelua = nodes.pool.latelua + +local implement = interfaces.implement +local getnamespace = interfaces.getnamespace + +local c_realpageno = tex.iscount("realpageno") +local c_realpagestateno = tex.iscount("realpagestateno") + +local collected = allocate() +local tobesaved = allocate() + +local pagestates = { + collected = collected, + tobesaved = tobesaved, +} + +job.pagestates = pagestates + +local function initializer() + collected = pagestates.collected + tobesaved = pagestates.tobesaved +end + +job.register("job.pagestates.collected", tobesaved, initializer, nil) + +table.setmetatableindex(tobesaved, "table") + +local function setstate(settings) + local name = settings.name + local tag = settings.tag + local list = tobesaved[name] + if not tag then + tag = #list + 1 + else + tag = tonumber(tag) or tag -- autonumber saves keys + end + local realpage = texgetcount(c_realpageno) + local data = realpage + list[tag] = data + if trace_pagestates then + report_pagestate("action %a, name %a, tag %a, preset %a","set",name,tag,realpage) + end + return name, tag, data +end + +local function extend(name,tag) + local realpage = texgetcount(c_realpageno) + if trace_pagestates then + report_pagestate("action %a, name %a, tag %a, preset %a","synchronize",name,tag,realpage) + end + tobesaved[name][tag] = realpage +end + +local function realpage(name,tag,default) + local t = collected[name] + if t then + t = t[tag] or t[tonumber(tag)] + if t then + return tonumber(t or default) + elseif trace_pagestates then + report_pagestate("error: unknown dataset, name %a, tag %a",name,tag) + end + elseif trace_pagestates then + report_pagestate("error: unknown dataset, name %a, tag %a",name) -- nil + end + return default +end + +local function realpageorder(name,tag) + local t = collected[name] + if t then + local p = t[tag] + if p then + local n = 1 + for i=tag-1,1,-1 do + if t[i] == p then + n = n +1 + end + end + return n + end + end + return 0 +end + +pagestates.setstate = setstate +pagestates.extend = extend +pagestates.realpage = realpage +pagestates.realpageorder = realpageorder + +function pagestates.countervalue(name) + return name and texgetcount(getnamespace("pagestatecounter") .. name) or 0 +end + +local function setpagestate(settings) + local name, tag = setstate(settings) + -- context(new_latelua(function() extend(name,tag) end)) + ctx_latelua(function() extend(name,tag) end) +end + +local function setpagestaterealpageno(name,tag) + local t = collected[name] + t = t and (t[tag] or t[tonumber(tag)]) + texsetcount("realpagestateno",t or texgetcount(c_realpageno)) +end + +implement { + name = "setpagestate", + actions = setpagestate, + arguments = { + { + { "name" }, + { "tag" }, + { "delay" }, + } + } +} + +implement { + name = "pagestaterealpage", + actions = { realpage, context }, + arguments = "2 strings", +} + +implement { + name = "setpagestaterealpageno", + actions = setpagestaterealpageno, + arguments = "2 strings", +} + +implement { + name = "pagestaterealpageorder", + actions = { realpageorder, context }, + arguments = { "string", "integer" } +} diff --git a/tex/context/base/mkxl/core-pag.mkxl b/tex/context/base/mkxl/core-pag.mkxl new file mode 100644 index 000000000..43b398b16 --- /dev/null +++ b/tex/context/base/mkxl/core-pag.mkxl @@ -0,0 +1,68 @@ +%D \module +%D [ file=core-pag, +%D version=2023.03.23, % moved from core-dat +%D title=\CONTEXT\ Core Macros, +%D subtitle=Multipass Pagestate, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\writestatus{loading}{ConTeXt Core Macros / Multipass Pagestate} + +\unprotect + +\newinteger\realpagestateno + +\registerctxluafile{core-pag}{autosuffix} + +\installcorenamespace{pagestate} +\installcorenamespace{pagestatecounter} + +\installcommandhandler \??pagestate {pagestate} \??pagestate + +\def\syst_pagestates_allocate + {\expandafter\newinteger\csname\??pagestatecounter\currentpagestate\endcsname} + +\appendtoks + \syst_pagestates_allocate +\to \everydefinepagestate + +\setuppagestate + [\c!delay=\v!yes] + +\permanent\tolerant\protected\def\setpagestate[#1]#*[#2]% + {\begingroup + \edef\currentpagestate{#1}% + \ifcsname\??pagestatecounter\currentpagestate\endcsname + \scratchcounter\lastnamedcs + \advanceby\scratchcounter\plusone + \else + \scratchcounter\plusone + \syst_pagestates_allocate + \fi + \global\csname\??pagestatecounter\currentpagestate\endcsname\scratchcounter + \clf_setpagestate + name {\currentpagestate}% + tag {\ifparameter#2\or#2\else\number\scratchcounter\fi}% + delay {\pagestateparameter\c!delay}% + \relax + \endgroup} + +\permanent\protected\def\autosetpagestate#1% + {\setpagestate[#1]\relax} + +\permanent\def\autopagestatenumber#1{\begincsname\??pagestatecounter#1\endcsname} + +\permanent\def\pagestaterealpage #1#2{\clf_pagestaterealpage {#1}{#2}} +\permanent\def\setpagestaterealpageno#1#2{\clf_setpagestaterealpageno{#1}{#2}} +\permanent\def\pagestaterealpageorder#1#2{\clf_pagestaterealpageorder{#1}#2\relax} + +\permanent\def\autopagestaterealpage #1{\clf_pagestaterealpage {#1}{\number\autopagestatenumber{#1}}} +\permanent\def\setautopagestaterealpageno#1{\clf_setpagestaterealpageno{#1}{\number\autopagestatenumber{#1}}} +\permanent\def\autopagestaterealpageorder#1{\clf_pagestaterealpageorder{#1}\numexpr\autopagestatenumber{#1}\relax} + +\protect diff --git a/tex/context/base/mkxl/core-two.lmt b/tex/context/base/mkxl/core-two.lmt new file mode 100644 index 000000000..7ea42374e --- /dev/null +++ b/tex/context/base/mkxl/core-two.lmt @@ -0,0 +1,210 @@ +if not modules then modules = { } end modules ['core-two'] = { + version = 1.001, + comment = "companion to core-two.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This is actually one of the oldest MkIV files and basically a port of MkII but +-- the old usage has long be phased out. Also, the public part is now handled by +-- datasets which makes this a more private store. + +-- local next = next +-- local remove, concat = table.remove, table.concat + +local allocate = utilities.storage.allocate + +local collected = allocate() +local tobesaved = allocate() + +local jobpasses = { + collected = collected, + tobesaved = tobesaved, +} + +job.passes = jobpasses + +local function initializer() + collected = jobpasses.collected + tobesaved = jobpasses.tobesaved +end + +job.register('job.passes.collected', tobesaved, initializer, nil) + +function jobpasses.getcollected(id) + return collected[id] or { } +end + +function jobpasses.gettobesaved(id) + local t = tobesaved[id] + if not t then + t = { } + tobesaved[id] = t + end + return t +end + +-- local function define(id) +-- local p = tobesaved[id] +-- if not p then +-- p = { } +-- tobesaved[id] = p +-- end +-- return p +-- end +-- +-- local function save(id,str,index) +-- local jti = define(id) +-- if index then +-- jti[index] = str +-- else +-- jti[#jti+1] = str +-- end +-- end +-- +-- local function savetagged(id,tag,str) +-- local jti = define(id) +-- jti[tag] = str +-- end +-- +-- local function getdata(id,index,default) +-- local jti = collected[id] +-- local value = jti and jti[index] +-- return value ~= "" and value or default or "" +-- end +-- +-- local function getfield(id,index,tag,default) +-- local jti = collected[id] +-- jti = jti and jti[index] +-- local value = jti and jti[tag] +-- return value ~= "" and value or default or "" +-- end +-- +-- local function getcollected(id) +-- return collected[id] or { } +-- end +-- +-- local function gettobesaved(id) +-- return define(id) +-- end +-- +-- local function get(id) +-- local jti = collected[id] +-- if jti and #jti > 0 then +-- return remove(jti,1) +-- end +-- end +-- +-- local function first(id) +-- local jti = collected[id] +-- return jti and jti[1] +-- end +-- +-- local function last(id) +-- local jti = collected[id] +-- return jti and jti[#jti] +-- end +-- +-- local function find(id,n) +-- local jti = collected[id] +-- return jti and jti[n] or nil +-- end +-- +-- local function count(id) +-- local jti = collected[id] +-- return jti and #jti or 0 +-- end +-- +-- local function list(id) +-- local jti = collected[id] +-- if jti then +-- return concat(jti,',') +-- end +-- end +-- +-- local function inlist(id,str) +-- local jti = collected[id] +-- if jti then +-- for _, v in next, jti do +-- if v == str then +-- return true +-- end +-- end +-- end +-- return false +-- end +-- +-- local check = first +-- +-- jobpasses.define = define +-- jobpasses.save = save +-- jobpasses.savetagged = savetagged +-- jobpasses.getdata = getdata +-- jobpasses.getfield = getfield +-- jobpasses.getcollected = getcollected +-- jobpasses.gettobesaved = gettobesaved +-- jobpasses.get = get +-- jobpasses.first = first +-- jobpasses.last = last +-- jobpasses.find = find +-- jobpasses.list = list +-- jobpasses.count = count +-- jobpasses.check = check +-- jobpasses.inlist = inlist +-- +-- -- interface +-- +-- local implement = interfaces.implement +-- +-- implement { name = "gettwopassdata", actions = { get, context }, arguments = "string" } +-- implement { name = "getfirsttwopassdata",actions = { first, context }, arguments = "string" } +-- implement { name = "getlasttwopassdata", actions = { last, context }, arguments = "string" } +-- implement { name = "findtwopassdata", actions = { find, context }, arguments = "2 strings" } +-- implement { name = "gettwopassdatalist", actions = { list, context }, arguments = "string" } +-- implement { name = "counttwopassdata", actions = { count, context }, arguments = "string" } +-- implement { name = "checktwopassdata", actions = { check, context }, arguments = "string" } +-- +-- implement { +-- name = "definetwopasslist", +-- actions = define, +-- arguments = "string" +-- } +-- +-- implement { +-- name = "savetwopassdata", +-- actions = save, +-- arguments = "2 strings", +-- } +-- +-- implement { +-- name = "savetaggedtwopassdata", +-- actions = savetagged, +-- arguments = "3 strings", +-- } +-- +-- implement { +-- name = "doifelseintwopassdata", +-- actions = { inlist, commands.doifelse }, +-- arguments = "2 strings", +-- } +-- +-- -- local ctx_latelua = context.latelua +-- +-- -- implement { +-- -- name = "lazysavetwopassdata", +-- -- arguments = "3 strings", +-- -- public = true, +-- -- actions = function(a,b,c) +-- -- ctx_latelua(function() save(a,c) end) +-- -- end, +-- -- } +-- +-- -- implement { +-- -- name = "lazysavetaggedtwopassdata", +-- -- arguments = "3 strings", +-- -- public = true, +-- -- actions = function(a,b,c) +-- -- ctx_latelua(function() savetagged(a,b,c) end) +-- -- end, +-- -- } diff --git a/tex/context/base/mkxl/core-two.mkxl b/tex/context/base/mkxl/core-two.mkxl index 38f03c7c4..10a7eec9e 100644 --- a/tex/context/base/mkxl/core-two.mkxl +++ b/tex/context/base/mkxl/core-two.mkxl @@ -1,6 +1,6 @@ %D \module %D [ file=core-two, % moved from core-uti -%D version=1997.03.31, +%D version=1997.03.31, % stripped down 2023-03-21 %D title=\CONTEXT\ Core Macros, %D subtitle=Two Pass Data, %D author=Hans Hagen, @@ -11,102 +11,110 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\writestatus{loading}{ConTeXt Core Macros / Two Pass Data} +%D The public interface is replaced by datasets and two pass data is now private +%D to the engine. For the moment we keep some commands commented. The unused +%D (second) argument is an inheritance from \MKII. If needed we can bring back +%D a compatible interface. -%D This is a rather old mechanism which has not changed much over time, apart from -%D adding a few more selectors. This code used to be part of \type {core-uti}. The -%D following examples demonstrate the interface. -%D -%D \startbuffer -%D \definetwopasslist{test-1} -%D -%D \gettwopassdatalist{test-1} [\twopassdatalist=] -%D \checktwopassdata {test-1} [\twopassdata=] -%D \checktwopassdata {test-1} [\twopassdata=] -%D \gettwopassdata {test-1} [\twopassdata=] -%D \gettwopassdata {test-1} [\twopassdata=] -%D -%D \definetwopasslist{test-2} -%D -%D \lazysavetwopassdata{test-2}{1}{x} -%D \lazysavetwopassdata{test-2}{2}{y} -%D \lazysavetwopassdata{test-2}{3}{z} -%D -%D \gettwopassdatalist{test-2} [\twopassdatalist=x,y,z] -%D \checktwopassdata {test-2} [\twopassdata=x] -%D \checktwopassdata {test-2} [\twopassdata=x] -%D \gettwopassdata {test-2} [\twopassdata=x] -%D \gettwopassdata {test-2} [\twopassdata=y] -%D \gettwopassdata {test-2} [\twopassdata=z] -%D \gettwopassdata {test-2} [\twopassdata=] -%D -%D \definetwopasslist{test-3} -%D -%D \lazysavetaggedtwopassdata{test-3}{1}{x}{a} -%D \lazysavetaggedtwopassdata{test-3}{2}{y}{b} -%D \lazysavetaggedtwopassdata{test-3}{3}{z}{c} -%D -%D \findtwopassdata{test-3}{x} [\twopassdata=a] -%D \findtwopassdata{test-3}{y} [\twopassdata=b] -%D \findtwopassdata{test-3}{z} [\twopassdata=c] -%D \findtwopassdata{test-3}{w} [\twopassdata=] -%D -%D \definetwopasslist{test-4} -%D -%D \lazysavetwopassdata{test-4}{1}{A} -%D \lazysavetwopassdata{test-4}{2}{B} -%D \lazysavetwopassdata{test-4}{3}{C} -%D -%D \getfirsttwopassdata{test-4} [\twopassdata=A] -%D \getlasttwopassdata {test-4} [\twopassdata=C] -%D \getfirsttwopassdata{test-4} [\twopassdata=A] -%D \getlasttwopassdata {test-4} [\twopassdata=C] -%D \getfromtwopassdata {test-4}{1} [\twopassdata=A] -%D \getfromtwopassdata {test-4}{3} [\twopassdata=C] -%D \getfromtwopassdata {test-4}{2} [\twopassdata=B] -%D \stopbuffer -%D -%D \getbuffer \typebuffer +\writestatus{loading}{ConTeXt Core Macros / Two Pass Data} \unprotect -\registerctxluafile{core-two}{} - -\permanent\def\immediatesavetwopassdata #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}} -\permanent\def \lazysavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata("#1","#3")}}} -\permanent\let \savetwopassdata \lazysavetwopassdata -\permanent\def \savetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}} -\permanent\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata("#1",'#3',"#4")}}} - -% temp hack: needs a proper \starteverytimeluacode - -\setfalse\twopassdatafound - -\mutable\lettonothing\twopassdata -\mutable\lettonothing\twopassdatalist - -\mutable\let\noftwopassitems\!!zeropoint - -\def\syst_twopass_check % can be delegated to lua once obsolete is gone - {\ifempty\twopassdata - \setfalse\twopassdatafound - \else - \settrue\twopassdatafound - \fi} - -\permanent\protected\def\definetwopasslist #1{\clf_definetwopasslist{#1}} -\permanent\protected\def\gettwopassdata #1{\edef\twopassdata {\clf_gettwopassdata {#1}}\syst_twopass_check} -\permanent\protected\def\checktwopassdata #1{\edef\twopassdata {\clf_checktwopassdata {#1}}\syst_twopass_check} -\permanent\protected\def\findtwopassdata #1#2{\edef\twopassdata {\clf_findtwopassdata {#1}{#2}}\syst_twopass_check} -\permanent\protected\def\getfirsttwopassdata #1{\edef\twopassdata {\clf_getfirsttwopassdata {#1}}\syst_twopass_check} -\permanent\protected\def\getlasttwopassdata #1{\edef\twopassdata {\clf_getlasttwopassdata {#1}}% - \edef\noftwopassitems{\clf_counttwopassdata {#1}}\syst_twopass_check} -\permanent\protected\def\getnamedtwopassdatalist#1#2{\edef #1{\clf_gettwopassdatalist {#2}}} -\permanent\protected\def\gettwopassdatalist #1{\edef\twopassdatalist{\clf_gettwopassdatalist {#1}}} - -\permanent\protected\def\doifelseintwopassdata #1#2{\clf_doifelseintwopassdata{#1}{#2}} +\registerctxluafile{core-two}{autosuffix} -\aliased\let\doifintwopassdataelse\doifelseintwopassdata -\aliased\let\getfromtwopassdata \findtwopassdata +% %D This is a rather old mechanism which has not changed much over time, apart from +% %D adding a few more selectors. This code used to be part of \type {core-uti}. The +% %D following examples demonstrate the interface. +% %D +% %D \startbuffer +% %D \definetwopasslist{test-1} +% %D +% %D \gettwopassdatalist{test-1} [\twopassdatalist=] +% %D \checktwopassdata {test-1} [\twopassdata=] +% %D \checktwopassdata {test-1} [\twopassdata=] +% %D \gettwopassdata {test-1} [\twopassdata=] +% %D \gettwopassdata {test-1} [\twopassdata=] +% %D +% %D \definetwopasslist{test-2} +% %D +% %D \lazysavetwopassdata{test-2}{1}{x} +% %D \lazysavetwopassdata{test-2}{2}{y} +% %D \lazysavetwopassdata{test-2}{3}{z} +% %D +% %D \gettwopassdatalist{test-2} [\twopassdatalist=x,y,z] +% %D \checktwopassdata {test-2} [\twopassdata=x] +% %D \checktwopassdata {test-2} [\twopassdata=x] +% %D \gettwopassdata {test-2} [\twopassdata=x] +% %D \gettwopassdata {test-2} [\twopassdata=y] +% %D \gettwopassdata {test-2} [\twopassdata=z] +% %D \gettwopassdata {test-2} [\twopassdata=] +% %D +% %D \definetwopasslist{test-3} +% %D +% %D \lazysavetaggedtwopassdata{test-3}{1}{x}{a} +% %D \lazysavetaggedtwopassdata{test-3}{2}{y}{b} +% %D \lazysavetaggedtwopassdata{test-3}{3}{z}{c} +% %D +% %D \findtwopassdata{test-3}{x} [\twopassdata=a] +% %D \findtwopassdata{test-3}{y} [\twopassdata=b] +% %D \findtwopassdata{test-3}{z} [\twopassdata=c] +% %D \findtwopassdata{test-3}{w} [\twopassdata=] +% %D +% %D \definetwopasslist{test-4} +% %D +% %D \lazysavetwopassdata{test-4}{1}{A} +% %D \lazysavetwopassdata{test-4}{2}{B} +% %D \lazysavetwopassdata{test-4}{3}{C} +% %D +% %D \getfirsttwopassdata{test-4} [\twopassdata=A] +% %D \getlasttwopassdata {test-4} [\twopassdata=C] +% %D \getfirsttwopassdata{test-4} [\twopassdata=A] +% %D \getlasttwopassdata {test-4} [\twopassdata=C] +% %D \getfromtwopassdata {test-4}{1} [\twopassdata=A] +% %D \getfromtwopassdata {test-4}{3} [\twopassdata=C] +% %D \getfromtwopassdata {test-4}{2} [\twopassdata=B] +% %D \stopbuffer +% %D +% %D \getbuffer \typebuffer +% +% %D The next code can be simplified (read: defined at the \LUA\ end) but we never use this +% %D mechanism which has been replaced by datasets so it's not worth the effort. +% +% \permanent\def\immediatesavetwopassdata #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}} +% \permanent\def \lazysavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata("#1","#3")}}} +% \permanent\let \savetwopassdata \lazysavetwopassdata +% \permanent\def \savetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}} +% \permanent\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata("#1","#3","#4")}}} +% +% % temp hack: needs a proper \starteverytimeluacode +% +% \setfalse\twopassdatafound +% +% \mutable\lettonothing\twopassdata +% \mutable\lettonothing\twopassdatalist +% +% \mutable\let\noftwopassitems\!!zeropoint +% +% \def\syst_twopass_check % can be delegated to lua once obsolete is gone +% {\ifempty\twopassdata +% \setfalse\twopassdatafound +% \else +% \settrue\twopassdatafound +% \fi} +% +% \permanent\protected\def\definetwopasslist #1{\clf_definetwopasslist{#1}} +% \permanent\protected\def\gettwopassdata #1{\edef\twopassdata {\clf_gettwopassdata {#1}}\syst_twopass_check} +% \permanent\protected\def\checktwopassdata #1{\edef\twopassdata {\clf_checktwopassdata {#1}}\syst_twopass_check} +% \permanent\protected\def\findtwopassdata #1#2{\edef\twopassdata {\clf_findtwopassdata {#1}{#2}}\syst_twopass_check} +% \permanent\protected\def\getfirsttwopassdata #1{\edef\twopassdata {\clf_getfirsttwopassdata {#1}}\syst_twopass_check} +% \permanent\protected\def\getlasttwopassdata #1{\edef\twopassdata {\clf_getlasttwopassdata {#1}}% +% \edef\noftwopassitems{\clf_counttwopassdata {#1}}\syst_twopass_check} +% \permanent\protected\def\getnamedtwopassdatalist#1#2{\edef #1{\clf_gettwopassdatalist {#2}}} +% \permanent\protected\def\gettwopassdatalist #1{\edef\twopassdatalist{\clf_gettwopassdatalist {#1}}} +% +% \permanent\protected\def\doifelseintwopassdata #1#2{\clf_doifelseintwopassdata{#1}{#2}} +% +% \aliased\let\doifintwopassdataelse\doifelseintwopassdata +% \aliased\let\getfromtwopassdata \findtwopassdata \protect \endinput diff --git a/tex/context/base/mkxl/core-uti.lmt b/tex/context/base/mkxl/core-uti.lmt index 966428b36..e4b6606e3 100644 --- a/tex/context/base/mkxl/core-uti.lmt +++ b/tex/context/base/mkxl/core-uti.lmt @@ -6,16 +6,13 @@ if not modules then modules = { } end modules ['core-uti'] = { license = "see context related readme files" } --- todo: keep track of changes here (hm, track access, and only true when --- accessed and changed) - ---[[ldx-- -<p>A utility file has always been part of <l n='context'/> and with -the move to <l n='luatex'/> we also moved a lot of multi-pass info -to a <l n='lua'/> table. Instead of loading a <l n='tex'/> based -utility file under different setups, we now load a table once. This -saves much runtime but at the cost of more memory usage.</p> ---ldx]]-- +-- A utility file has always been part of ConTeXt and with the move to LuaTeX we +-- also moved a lot of multi-pass info to a Lua table. Instead of loading a TeX +-- based utility file under different setups, we now load a table once. This saves +-- much runtime but at the cost of more memory usage. +-- +-- In the meantime the overhead is a bit more due to the amount of data being saved +-- and more agressive compacting. local math = math local next, type, tostring, tonumber, setmetatable, load = next, type, tostring, tonumber, setmetatable, load @@ -46,14 +43,9 @@ local job = job job.version = 1.33 job.packversion = 1.02 --- some day we will implement loading of other jobs and then we need --- job.jobs - ---[[ldx-- -<p>Variables are saved using in the previously defined table and passed -onto <l n='tex'/> using the following method. Of course one can also -directly access the variable using a <l n='lua'/> call.</p> ---ldx]]-- +-- Variables are saved using in the previously defined table and passed onto TeX +-- using the following method. Of course one can also directly access the variable +-- using a Lua call. local savelist, comment = { }, { } @@ -382,6 +374,12 @@ function job.load(filename) end function job.loadother(filename) + local jobname = environment.jobname + if filename == jobname then + return + else + report_passes("integrating list %a into %a",filename,jobname) + end statistics.starttiming(loadedfiles) filename = file.addsuffix(filename,"tuc") local unpacked = othercache[filename] diff --git a/tex/context/base/mkxl/file-mod.lmt b/tex/context/base/mkxl/file-mod.lmt index d10abf533..567387a3a 100644 --- a/tex/context/base/mkxl/file-mod.lmt +++ b/tex/context/base/mkxl/file-mod.lmt @@ -6,17 +6,11 @@ if not modules then modules = { } end modules ['file-mod'] = { license = "see context related readme files" } --- This module will be redone! For instance, the prefixes will move to data-* --- as they arr sort of generic along with home:// etc/. - --- context is not defined yet! todo! (we need to load tupp-fil after cld) --- todo: move startreadingfile to lua and push regime there - ---[[ldx-- -<p>It's more convenient to manipulate filenames (paths) in -<l n='lua'/> than in <l n='tex'/>. These methods have counterparts -at the <l n='tex'/> side.</p> ---ldx]]-- +-- This module will be redone! For instance, the prefixes will move to data-* as +-- they are sort of generic along with home:// etc/. +-- +-- It is more convenient to manipulate filenames (paths) in Lua than in TeX. The +-- methods below have counterparts at the TeX end. local format, find, concat, tonumber = string.format, string.find, table.concat, tonumber local sortedhash = table.sortedhash diff --git a/tex/context/base/mkxl/font-con.lmt b/tex/context/base/mkxl/font-con.lmt index 073af7d2e..5a887d61d 100644 --- a/tex/context/base/mkxl/font-con.lmt +++ b/tex/context/base/mkxl/font-con.lmt @@ -22,11 +22,9 @@ local trace_scaling = false trackers.register("fonts.scaling", function(v) local report_defining = logs.reporter("fonts","defining") --- watch out: no negative depths and negative eights permitted in regular fonts - ---[[ldx-- -<p>Here we only implement a few helper functions.</p> ---ldx]]-- +-- Watch out: no negative depths and negative heights are permitted in regular +-- fonts. Also, the code in LMTX is a bit different. Here we only implement a +-- few helper functions. local fonts = fonts local constructors = fonts.constructors or { } @@ -53,11 +51,9 @@ constructors.loadedfonts = loadedfonts ----- scalecommands = fonts.helpers.scalecommands ---[[ldx-- -<p>We need to normalize the scale factor (in scaled points). This has to -do with the fact that <l n='tex'/> uses a negative multiple of 1000 as -a signal for a font scaled based on the design size.</p> ---ldx]]-- +-- We need to normalize the scale factor (in scaled points). This has to do with the +-- fact that TeX uses a negative multiple of 1000 as a signal for a font scaled +-- based on the design size. local factors = { pt = 65536.0, @@ -112,33 +108,29 @@ function constructors.getmathparameter(tfmdata,name) end end ---[[ldx-- -<p>Beware, the boundingbox is passed as reference so we may not overwrite it -in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to -excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p> ---ldx]]-- - --- The scaler is only used for otf and afm and virtual fonts. If a virtual font has italic --- correction make sure to set the hasitalics flag. Some more flags will be added in the --- future. - ---[[ldx-- -<p>The reason why the scaler was originally split, is that for a while we experimented -with a helper function. However, in practice the <l n='api'/> calls are too slow to -make this profitable and the <l n='lua'/> based variant was just faster. A days -wasted day but an experience richer.</p> ---ldx]]-- +-- Beware, the boundingbox is passed as reference so we may not overwrite it in the +-- process; numbers are of course copies. Here 65536 equals 1pt. (Due to excessive +-- memory usage in CJK fonts, we no longer pass the boundingbox.) +-- +-- The scaler is only used for OTF and AFM and virtual fonts. If a virtual font has +-- italic correction make sure to set the hasitalics flag. Some more flags will be +-- added in the future. +-- +-- The reason why the scaler was originally split, is that for a while we +-- experimented with a helper function. However, in practice the API calls are too +-- slow to make this profitable and the Lua based variant was just faster. A days +-- wasted day but an experience richer. -- experimental, sharing kerns (unscaled and scaled) saves memory -- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata) -- loop over descriptions (afm and otf have descriptions, tfm not) -- there is no need (yet) to assign a value to chr.tonunicode - +-- -- constructors.prepare_base_kerns(tfmdata) -- optimalization - --- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename --- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files --- can have multiple subfonts +-- +-- We have target.name = metricfile and target.fullname = RealName and +-- target.filename = diskfilename when collapsing fonts. LuaTeX looks at both +-- target.name and target.fullname because TTC files can have multiple subfonts. function constructors.calculatescale(tfmdata,scaledpoints) -- implemented in font-ctx.lmt @@ -1008,9 +1000,7 @@ function constructors.finalize(tfmdata) return tfmdata end ---[[ldx-- -<p>A unique hash value is generated by:</p> ---ldx]]-- +-- A unique hash value is generated by: local hashmethods = { } constructors.hashmethods = hashmethods @@ -1069,13 +1059,11 @@ hashmethods.normal = function(list) end end ---[[ldx-- -<p>In principle we can share tfm tables when we are in need for a font, but then -we need to define a font switch as an id/attr switch which is no fun, so in that -case users can best use dynamic features ... so, we will not use that speedup. Okay, -when we get rid of base mode we can optimize even further by sharing, but then we -loose our testcases for <l n='luatex'/>.</p> ---ldx]]-- +-- In principle we can share tfm tables when we are in need for a font, but then we +-- need to define a font switch as an id/attr switch which is no fun, so in that +-- case users can best use dynamic features ... so, we will not use that speedup. +-- Okay, when we get rid of base mode we can optimize even further by sharing, but +-- then we loose our testcases for LuaTeX. function constructors.hashinstance(specification,force) -- implemented in font-ctx.lmt @@ -1407,10 +1395,7 @@ do end ---[[ldx-- -<p>We need to check for default features. For this we provide -a helper function.</p> ---ldx]]-- +-- We need to check for default features. For this we provide a helper function. function constructors.checkedfeatures(what,features) local defaults = handlers[what].features.defaults diff --git a/tex/context/base/mkxl/font-ctx.lmt b/tex/context/base/mkxl/font-ctx.lmt index 77953d64a..1d59ad728 100644 --- a/tex/context/base/mkxl/font-ctx.lmt +++ b/tex/context/base/mkxl/font-ctx.lmt @@ -529,19 +529,13 @@ do end ---[[ldx-- -<p>So far we haven't really dealt with features (or whatever we want -to pass along with the font definition. We distinguish the following -situations:</p> -situations:</p> - -<code> -name:xetex like specs -name@virtual font spec -name*context specification -</code> ---ldx]]-- - +-- So far we haven't really dealt with features (or whatever we want to pass along +-- with the font definition. We distinguish the following situations: +-- +-- name:xetex like specs +-- name@virtual font spec +-- name*context specification +-- -- Currently fonts are scaled while constructing the font, so we have to do scaling -- of commands in the vf at that point using e.g. "local scale = g.parameters.factor -- or 1" after all, we need to work with copies anyway and scaling needs to be done @@ -2269,10 +2263,8 @@ dimenfactors.em = nil dimenfactors["%"] = nil dimenfactors.pct = nil ---[[ldx-- -<p>Before a font is passed to <l n='tex'/> we scale it. Here we also need -to scale virtual characters.</p> ---ldx]]-- +-- Before a font is passed to TeX we scale it. Here we also need to scale virtual +-- characters. do diff --git a/tex/context/base/mkxl/font-def.lmt b/tex/context/base/mkxl/font-def.lmt index 6afeeb474..ea6b2d0c0 100644 --- a/tex/context/base/mkxl/font-def.lmt +++ b/tex/context/base/mkxl/font-def.lmt @@ -24,10 +24,9 @@ trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading local report_defining = logs.reporter("fonts","defining") ---[[ldx-- -<p>Here we deal with defining fonts. We do so by intercepting the -default loader that only handles <l n='tfm'/>.</p> ---ldx]]-- +-- Here we deal with defining fonts. We do so by intercepting the default loader +-- that only handles TFM files. Although, we started out that way but in the +-- meantime we can hardly speak of TFM any more. local nextfont = font.nextid @@ -55,25 +54,18 @@ local designsizes = constructors.designsizes local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end ---[[ldx-- -<p>We hardly gain anything when we cache the final (pre scaled) -<l n='tfm'/> table. But it can be handy for debugging, so we no -longer carry this code along. Also, we now have quite some reference -to other tables so we would end up with lots of catches.</p> ---ldx]]-- - ---[[ldx-- -<p>We can prefix a font specification by <type>name:</type> or -<type>file:</type>. The first case will result in a lookup in the -synonym table.</p> - -<typing> -[ name: | file: ] identifier [ separator [ specification ] ] -</typing> - -<p>The following function split the font specification into components -and prepares a table that will move along as we proceed.</p> ---ldx]]-- +-- We hardly gain anything when we cache the final (pre scaled) TFM table. But it +-- can be handy for debugging, so we no longer carry this code along. Also, we now +-- have quite some reference to other tables so we would end up with lots of +-- catches. +-- +-- We can prefix a font specification by "name:" or "file:". The first case will +-- result in a lookup in the synonym table. +-- +-- [ name: | file: ] identifier [ separator [ specification ] ] +-- +-- The following function split the font specification into components and prepares +-- a table that will move along as we proceed. -- beware, we discard additional specs -- @@ -166,9 +158,7 @@ do end ---[[ldx-- -<p>We can resolve the filename using the next function:</p> ---ldx]]-- +-- We can resolve the filename using the next function: definers.resolvers = definers.resolvers or { } local resolvers = definers.resolvers @@ -261,23 +251,17 @@ function definers.resolve(specification) return specification end ---[[ldx-- -<p>The main read function either uses a forced reader (as determined by -a lookup) or tries to resolve the name using the list of readers.</p> - -<p>We need to cache when possible. We do cache raw tfm data (from <l -n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based -on specificstion (name) and size, that is, <l n='tex'/> only needs a number -for an already loaded fonts. However, it may make sense to cache fonts -before they're scaled as well (store <l n='tfm'/>'s with applied methods -and features). However, there may be a relation between the size and -features (esp in virtual fonts) so let's not do that now.</p> - -<p>Watch out, here we do load a font, but we don't prepare the -specification yet.</p> ---ldx]]-- - --- very experimental: +-- The main read function either uses a forced reader (as determined by a lookup) or +-- tries to resolve the name using the list of readers. +-- +-- We need to cache when possible. We do cache raw tfm data (from TFM, AFM or OTF). +-- After that we can cache based on specificstion (name) and size, that is, TeX only +-- needs a number for an already loaded fonts. However, it may make sense to cache +-- fonts before they're scaled as well (store TFM's with applied methods and +-- features). However, there may be a relation between the size and features (esp in +-- virtual fonts) so let's not do that now. +-- +-- Watch out, here we do load a font, but we don't prepare the specification yet. function definers.applypostprocessors(tfmdata) local postprocessors = tfmdata.postprocessors @@ -431,17 +415,13 @@ function constructors.readanddefine(name,size) -- no id -- maybe a dummy first return fontdata[id], id end ---[[ldx-- -<p>So far the specifiers. Now comes the real definer. Here we cache -based on id's. Here we also intercept the virtual font handler. Since -it evolved stepwise I may rewrite this bit (combine code).</p> - -In the previously defined reader (the one resulting in a <l n='tfm'/> -table) we cached the (scaled) instances. Here we cache them again, but -this time based on id. We could combine this in one cache but this does -not gain much. By the way, passing id's back to in the callback was -introduced later in the development.</p> ---ldx]]-- +-- So far the specifiers. Now comes the real definer. Here we cache based on id's. +-- Here we also intercept the virtual font handler. +-- +-- In the previously defined reader (the one resulting in a TFM table) we cached the +-- (scaled) instances. Here we cache them again, but this time based on id. We could +-- combine this in one cache but this does not gain much. By the way, passing id's +-- back to in the callback was introduced later in the development. function definers.registered(hash) local id = internalized[hash] diff --git a/tex/context/base/mkxl/font-fbk.lmt b/tex/context/base/mkxl/font-fbk.lmt index bdc5265ae..09f20b42c 100644 --- a/tex/context/base/mkxl/font-fbk.lmt +++ b/tex/context/base/mkxl/font-fbk.lmt @@ -10,10 +10,6 @@ local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format local utfbyte, utfchar = utf.byte, utf.char local next = next ---[[ldx-- -<p>This is very experimental code!</p> ---ldx]]-- - local trace_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_visualize = v end) local trace_define = false trackers.register("fonts.composing.define", function(v) trace_define = v end) diff --git a/tex/context/base/mkxl/font-fil.mklx b/tex/context/base/mkxl/font-fil.mklx index 79535ea11..73348645d 100644 --- a/tex/context/base/mkxl/font-fil.mklx +++ b/tex/context/base/mkxl/font-fil.mklx @@ -294,7 +294,7 @@ % pre-expansion. \def\font_helpers_update_font_class_parameters - {\edef\m_font_class_direction {\begincsname\??fontclass\fontclass\fontstyle\s!direction \endcsname}% + {%edef\m_font_class_direction {\begincsname\??fontclass\fontclass\fontstyle\s!direction \endcsname}% \edef\m_font_class_features {\begincsname\??fontclass\fontclass\fontstyle\s!features \endcsname}% \edef\m_font_class_fallbacks {\begincsname\??fontclass\fontclass\fontstyle\s!fallbacks \endcsname}% \edef\m_font_class_goodies {\begincsname\??fontclass\fontclass\fontstyle\s!goodies \endcsname}% diff --git a/tex/context/base/mkxl/font-ini.lmt b/tex/context/base/mkxl/font-ini.lmt index bc68fa83d..dcec8594e 100644 --- a/tex/context/base/mkxl/font-ini.lmt +++ b/tex/context/base/mkxl/font-ini.lmt @@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['font-ini'] = { license = "see context related readme files" } ---[[ldx-- -<p>Not much is happening here.</p> ---ldx]]-- - local sortedhash, setmetatableindex = table.sortedhash, table.setmetatableindex local allocate = utilities.storage.allocate diff --git a/tex/context/base/mkxl/font-ini.mklx b/tex/context/base/mkxl/font-ini.mklx index 6efae2ae1..ea727bde4 100644 --- a/tex/context/base/mkxl/font-ini.mklx +++ b/tex/context/base/mkxl/font-ini.mklx @@ -755,6 +755,16 @@ \immutable\dimensiondef\d_font_default_size 10pt +%lettonothing\m_font_class_direction % no longer used +\lettonothing\m_font_class_features +\lettonothing\m_font_class_fallbacks +\lettonothing\m_font_class_goodies + +\lettonothing\m_font_direction +\lettonothing\m_font_features +\lettonothing\m_font_fallbacks +\lettonothing\m_font_goodies + \protected\def\font_helpers_low_level_define {\ifconditional\c_font_compact \expandafter\font_helpers_low_level_define_compact diff --git a/tex/context/base/mkxl/font-mat.mklx b/tex/context/base/mkxl/font-mat.mklx index 76f6f87b9..54473a347 100644 --- a/tex/context/base/mkxl/font-mat.mklx +++ b/tex/context/base/mkxl/font-mat.mklx @@ -337,15 +337,17 @@ %D 0 while in rl mode 0 is a copy of 1. There is no real overhead involved in this. %D This also permits different font definitions for normal and mixed. -\lettonothing\m_font_class_direction -\lettonothing\m_font_class_features -\lettonothing\m_font_class_fallbacks -\lettonothing\m_font_class_goodies - -\lettonothing\m_font_direction -\lettonothing\m_font_features -\lettonothing\m_font_fallbacks -\lettonothing\m_font_goodies +% moved to ini +% +% \lettonothing\m_font_class_direction +% \lettonothing\m_font_class_features +% \lettonothing\m_font_class_fallbacks +% \lettonothing\m_font_class_goodies +% +% \lettonothing\m_font_direction +% \lettonothing\m_font_features +% \lettonothing\m_font_fallbacks +% \lettonothing\m_font_goodies \appendtoks \font_helpers_set_math_family\c_font_fam_mr\s!mr diff --git a/tex/context/base/mkxl/font-one.lmt b/tex/context/base/mkxl/font-one.lmt index 453f61192..71694dcca 100644 --- a/tex/context/base/mkxl/font-one.lmt +++ b/tex/context/base/mkxl/font-one.lmt @@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-one'] = { license = "see context related readme files" } ---[[ldx-- -<p>Some code may look a bit obscure but this has to do with the fact that we also use -this code for testing and much code evolved in the transition from <l n='tfm'/> to -<l n='afm'/> to <l n='otf'/>.</p> - -<p>The following code still has traces of intermediate font support where we handles -font encodings. Eventually font encoding went away but we kept some code around in -other modules.</p> - -<p>This version implements a node mode approach so that users can also more easily -add features.</p> ---ldx]]-- +-- Some code may look a bit obscure but this has to do with the fact that we also +-- use this code for testing and much code evolved in the transition from TFM to AFM +-- to OTF. +-- +-- The following code still has traces of intermediate font support where we handles +-- font encodings. Eventually font encoding went away but we kept some code around +-- in other modules. +-- +-- This version implements a node mode approach so that users can also more easily +-- add features. local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers @@ -71,15 +69,13 @@ local overloads = fonts.mappings.overloads local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes ---[[ldx-- -<p>We cache files. Caching is taken care of in the loader. We cheat a bit by adding -ligatures and kern information to the afm derived data. That way we can set them faster -when defining a font.</p> - -<p>We still keep the loading two phased: first we load the data in a traditional -fashion and later we transform it to sequences. Then we apply some methods also -used in opentype fonts (like <t>tlig</t>).</p> ---ldx]]-- +-- We cache files. Caching is taken care of in the loader. We cheat a bit by adding +-- ligatures and kern information to the afm derived data. That way we can set them +-- faster when defining a font. +-- +-- We still keep the loading two phased: first we load the data in a traditional +-- fashion and later we transform it to sequences. Then we apply some methods also +-- used in opentype fonts (like tlig). function afm.load(filename) filename = resolvers.findfile(filename,'afm') or "" @@ -312,10 +308,8 @@ local function enhance_fix_names(data) end end ---[[ldx-- -<p>These helpers extend the basic table with extra ligatures, texligatures -and extra kerns. This saves quite some lookups later.</p> ---ldx]]-- +-- These helpers extend the basic table with extra ligatures, texligatures and extra +-- kerns. This saves quite some lookups later. local addthem = function(rawdata,ligatures) if ligatures then @@ -349,17 +343,14 @@ local function enhance_add_ligatures(rawdata) addthem(rawdata,afm.helpdata.ligatures) end ---[[ldx-- -<p>We keep the extra kerns in separate kerning tables so that we can use -them selectively.</p> ---ldx]]-- - --- This is rather old code (from the beginning when we had only tfm). If --- we unify the afm data (now we have names all over the place) then --- we can use shcodes but there will be many more looping then. But we --- could get rid of the tables in char-cmp then. Als, in the generic version --- we don't use the character database. (Ok, we can have a context specific --- variant). +-- We keep the extra kerns in separate kerning tables so that we can use them +-- selectively. +-- +-- This is rather old code (from the beginning when we had only tfm). If we unify +-- the afm data (now we have names all over the place) then we can use shcodes but +-- there will be many more looping then. But we could get rid of the tables in +-- char-cmp then. Als, in the generic version we don't use the character database. +-- (Ok, we can have a context specific variant). local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust here local descriptions = rawdata.descriptions @@ -440,9 +431,7 @@ local function enhance_add_extra_kerns(rawdata) -- using shcodes is not robust h do_it_copy(afm.helpdata.rightkerned) end ---[[ldx-- -<p>The copying routine looks messy (and is indeed a bit messy).</p> ---ldx]]-- +-- The copying routine looks messy (and is indeed a bit messy). local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name if data then @@ -619,11 +608,9 @@ end return nil end ---[[ldx-- -<p>Originally we had features kind of hard coded for <l n='afm'/> files but since I -expect to support more font formats, I decided to treat this fontformat like any -other and handle features in a more configurable way.</p> ---ldx]]-- +-- Originally we had features kind of hard coded for AFM files but since I expect to +-- support more font formats, I decided to treat this fontformat like any other and +-- handle features in a more configurable way. function afm.setfeatures(tfmdata,features) local okay = constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) @@ -715,13 +702,10 @@ local function afmtotfm(specification) end end ---[[ldx-- -<p>As soon as we could intercept the <l n='tfm'/> reader, I implemented an -<l n='afm'/> reader. Since traditional <l n='pdftex'/> could use <l n='opentype'/> -fonts with <l n='afm'/> companions, the following method also could handle -those cases, but now that we can handle <l n='opentype'/> directly we no longer -need this features.</p> ---ldx]]-- +-- As soon as we could intercept the TFM reader, I implemented an AFM reader. Since +-- traditional pdfTeX could use OpenType fonts with AFM companions, the following +-- method also could handle those cases, but now that we can handle OpenType +-- directly we no longer need this features. local function read_from_afm(specification) local tfmdata = afmtotfm(specification) @@ -736,9 +720,7 @@ local function read_from_afm(specification) return tfmdata end ---[[ldx-- -<p>We have the usual two modes and related features initializers and processors.</p> ---ldx]]-- +-- We have the usual two modes and related features initializers and processors. registerafmfeature { name = "mode", diff --git a/tex/context/base/mkxl/font-onr.lmt b/tex/context/base/mkxl/font-onr.lmt index d28c247df..04f9d3bb2 100644 --- a/tex/context/base/mkxl/font-onr.lmt +++ b/tex/context/base/mkxl/font-onr.lmt @@ -7,18 +7,16 @@ if not modules then modules = { } end modules ['font-onr'] = { license = "see context related readme files" } ---[[ldx-- -<p>Some code may look a bit obscure but this has to do with the fact that we also use -this code for testing and much code evolved in the transition from <l n='tfm'/> to -<l n='afm'/> to <l n='otf'/>.</p> - -<p>The following code still has traces of intermediate font support where we handles -font encodings. Eventually font encoding went away but we kept some code around in -other modules.</p> - -<p>This version implements a node mode approach so that users can also more easily -add features.</p> ---ldx]]-- +-- Some code may look a bit obscure but this has to do with the fact that we also +-- use this code for testing and much code evolved in the transition from TFM to AFM +-- to OTF. +-- +-- The following code still has traces of intermediate font support where we handles +-- font encodings. Eventually font encoding went away but we kept some code around +-- in other modules. +-- +-- This version implements a node mode approach so that users can also more easily +-- add features. local fonts, logs, trackers, resolvers = fonts, logs, trackers, resolvers @@ -49,12 +47,9 @@ pfb.version = 1.002 local readers = afm.readers or { } afm.readers = readers ---[[ldx-- -<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/> -and <l n='otf'/> reader.</p> -<p>We use a new (unfinished) pfb loader but I see no differences between the old -and new vectors (we actually had one bad vector with the old loader).</p> ---ldx]]-- +-- We start with the basic reader which we give a name similar to the built in TFM +-- and OTF reader. We use a PFB loader but I see no differences between the old and +-- new vectors (we actually had one bad vector with the old loader). local get_indexes, get_shapes @@ -71,7 +66,7 @@ do -- local plain = bxor(cipher,rshift(r,8)) local plain = (cipher ~ ((r >> 8) & 0xFFFFFFFF)) -- r = ((cipher + r) * c1 + c2) % 65536 - r = ((cipher + r) * c1 + c2) % 0x10000 + r = ((cipher + r) * c1 + c2) % 0x10000 return char(plain) end @@ -366,11 +361,10 @@ do end ---[[ldx-- -<p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/> -and <l n='otf'/> reader. We only need data that is relevant for our use. We don't support -more complex arrangements like multiple master (obsolete), direction specific kerning, etc.</p> ---ldx]]-- +-- We start with the basic reader which we give a name similar to the built in TFM +-- and OTF reader. We only need data that is relevant for our use. We don't support +-- more complex arrangements like multiple master (obsolete), direction specific +-- kerning, etc. local spacer = patterns.spacer local whitespace = patterns.whitespace diff --git a/tex/context/base/mkxl/font-ota.lmt b/tex/context/base/mkxl/font-ota.lmt index 157270ef1..6e8130741 100644 --- a/tex/context/base/mkxl/font-ota.lmt +++ b/tex/context/base/mkxl/font-ota.lmt @@ -56,10 +56,8 @@ local chardata = characters and characters.data local otffeatures = fonts.constructors.features.otf local registerotffeature = otffeatures.register ---[[ldx-- -<p>Analyzers run per script and/or language and are needed in order to -process features right.</p> ---ldx]]-- +-- Analyzers run per script and/or language and are needed in order to process +-- features right. local setstate = nuts.setstate local getstate = nuts.getstate diff --git a/tex/context/base/mkxl/font-ots.lmt b/tex/context/base/mkxl/font-ots.lmt index e7fcfc576..0e99de6d1 100644 --- a/tex/context/base/mkxl/font-ots.lmt +++ b/tex/context/base/mkxl/font-ots.lmt @@ -7,92 +7,90 @@ if not modules then modules = { } end modules ['font-ots'] = { -- sequences license = "see context related readme files", } ---[[ldx-- -<p>I need to check the description at the microsoft site ... it has been improved -so maybe there are some interesting details there. Most below is based on old and -incomplete documentation and involved quite a bit of guesswork (checking with the -abstract uniscribe of those days. But changing things is tricky!</p> - -<p>This module is a bit more split up that I'd like but since we also want to test -with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/> -and discussion about improvements and functionality mostly happens on the -<l n='context'/> mailing list.</p> - -<p>The specification of OpenType is (or at least decades ago was) kind of vague. -Apart from a lack of a proper free specifications there's also the problem that -Microsoft and Adobe may have their own interpretation of how and in what order to -apply features. In general the Microsoft website has more detailed specifications -and is a better reference. There is also some information in the FontForge help -files. In the end we rely most on the Microsoft specification.</p> - -<p>Because there is so much possible, fonts might contain bugs and/or be made to -work with certain rederers. These may evolve over time which may have the side -effect that suddenly fonts behave differently. We don't want to catch all font -issues.</p> - -<p>After a lot of experiments (mostly by Taco, me and Idris) the first implementation -was already quite useful. When it did most of what we wanted, a more optimized version -evolved. Of course all errors are mine and of course the code can be improved. There -are quite some optimizations going on here and processing speed is currently quite -acceptable and has been improved over time. Many complex scripts are not yet supported -yet, but I will look into them as soon as <l n='context'/> users ask for it.</p> - -<p>The specification leaves room for interpretation. In case of doubt the Microsoft -implementation is the reference as it is the most complete one. As they deal with -lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and -their suggestions help improve the code. I'm aware that not all border cases can be -taken care of, unless we accept excessive runtime, and even then the interference -with other mechanisms (like hyphenation) are not trivial.</p> - -<p>Especially discretionary handling has been improved much by Kai Eigner who uses complex -(latin) fonts. The current implementation is a compromis between his patches and my code -and in the meantime performance is quite ok. We cannot check all border cases without -compromising speed but so far we're okay. Given good test cases we can probably improve -it here and there. Especially chain lookups are non trivial with discretionaries but -things got much better over time thanks to Kai.</p> - -<p>Glyphs are indexed not by unicode but in their own way. This is because there is no -relationship with unicode at all, apart from the fact that a font might cover certain -ranges of characters. One character can have multiple shapes. However, at the -<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private -space. This is needed because we need to access them and <l n='tex'/> has to include -then in the output eventually.</p> - -<p>The initial data table is rather close to the open type specification and also not -that different from the one produced by <l n='fontforge'/> but we uses hashes instead. -In <l n='context'/> that table is packed (similar tables are shared) and cached on disk -so that successive runs can use the optimized table (after loading the table is -unpacked).</p> - -<p>This module is sparsely documented because it is has been a moving target. The -table format of the reader changed a bit over time and we experiment a lot with -different methods for supporting features. By now the structures are quite stable</p> - -<p>Incrementing the version number will force a re-cache. We jump the number by one -when there's a fix in the reader or processing code that can result in different -results.</p> - -<p>This code is also used outside context but in context it has to work with other -mechanisms. Both put some constraints on the code here.</p> - ---ldx]]-- - --- Remark: We assume that cursives don't cross discretionaries which is okay because it --- is only used in semitic scripts. +-- I need to check the description at the microsoft site ... it has been improved so +-- maybe there are some interesting details there. Most below is based on old and +-- incomplete documentation and involved quite a bit of guesswork (checking with the +-- abstract uniscribe of those days. But changing things is tricky! +-- +-- This module is a bit more split up that I'd like but since we also want to test +-- with plain TeX it has to be so. This module is part of ConTeXt and discussion +-- about improvements and functionality mostly happens on the ConTeXt mailing list. +-- +-- The specification of OpenType is (or at least decades ago was) kind of vague. +-- Apart from a lack of a proper free specifications there's also the problem that +-- Microsoft and Adobe may have their own interpretation of how and in what order to +-- apply features. In general the Microsoft website has more detailed specifications +-- and is a better reference. There is also some information in the FontForge help +-- files. In the end we rely most on the Microsoft specification. +-- +-- Because there is so much possible, fonts might contain bugs and/or be made to +-- work with certain rederers. These may evolve over time which may have the side +-- effect that suddenly fonts behave differently. We don't want to catch all font +-- issues. +-- +-- After a lot of experiments (mostly by Taco, me and Idris) the first +-- implementation was already quite useful. When it did most of what we wanted, a +-- more optimized version evolved. Of course all errors are mine and of course the +-- code can be improved. There are quite some optimizations going on here and +-- processing speed is currently quite acceptable and has been improved over time. +-- Many complex scripts are not yet supported yet, but I will look into them as soon +-- as ConTeXt users ask for it. +-- +-- The specification leaves room for interpretation. In case of doubt the Microsoft +-- implementation is the reference as it is the most complete one. As they deal with +-- lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code +-- and their suggestions help improve the code. I'm aware that not all border cases +-- can be taken care of, unless we accept excessive runtime, and even then the +-- interference with other mechanisms (like hyphenation) are not trivial. +-- +-- Especially discretionary handling has been improved much by Kai Eigner who uses +-- complex (latin) fonts. The current implementation is a compromis between his +-- patches and my code and in the meantime performance is quite ok. We cannot check +-- all border cases without compromising speed but so far we're okay. Given good +-- test cases we can probably improve it here and there. Especially chain lookups +-- are non trivial with discretionaries but things got much better over time thanks +-- to Kai. +-- +-- Glyphs are indexed not by unicode but in their own way. This is because there is +-- no relationship with unicode at all, apart from the fact that a font might cover +-- certain ranges of characters. One character can have multiple shapes. However, at +-- the TeX end we use unicode so and all extra glyphs are mapped into a private +-- space. This is needed because we need to access them and TeX has to include then +-- in the output eventually. +-- +-- The initial data table is rather close to the open type specification and also +-- not that different from the one produced by Fontforge but we uses hashes instead. +-- In ConTeXt that table is packed (similar tables are shared) and cached on disk so +-- that successive runs can use the optimized table (after loading the table is +-- unpacked). +-- +-- This module is sparsely documented because it is has been a moving target. The +-- table format of the reader changed a bit over time and we experiment a lot with +-- different methods for supporting features. By now the structures are quite stable +-- +-- Incrementing the version number will force a re-cache. We jump the number by one +-- when there's a fix in the reader or processing code that can result in different +-- results. +-- +-- This code is also used outside ConTeXt but in ConTeXt it has to work with other +-- mechanisms. Both put some constraints on the code here. +-- +-- Remark: We assume that cursives don't cross discretionaries which is okay because +-- it is only used in semitic scripts. -- -- Remark: We assume that marks precede base characters. -- --- Remark: When complex ligatures extend into discs nodes we can get side effects. Normally --- this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}. +-- Remark: When complex ligatures extend into discs nodes we can get side effects. +-- Normally this doesn't happen; ff\d{l}{l}{l} in lm works but ff\d{f}{f}{f}. -- -- Todo: check if we copy attributes to disc nodes if needed. -- --- Todo: it would be nice if we could get rid of components. In other places we can use --- the unicode properties. We can just keep a lua table. +-- Todo: it would be nice if we could get rid of components. In other places we can +-- use the unicode properties. We can just keep a lua table. -- --- Remark: We do some disc juggling where we need to keep in mind that the pre, post and --- replace fields can have prev pointers to a nesting node ... I wonder if that is still --- needed. +-- Remark: We do some disc juggling where we need to keep in mind that the pre, post +-- and replace fields can have prev pointers to a nesting node ... I wonder if that +-- is still needed. -- -- Remark: This is not possible: -- @@ -1092,10 +1090,8 @@ function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,skiphash,st end end ---[[ldx-- -<p>We get hits on a mark, but we're not sure if the it has to be applied so -we need to explicitly test for basechar, baselig and basemark entries.</p> ---ldx]]-- +-- We get hits on a mark, but we're not sure if the it has to be applied so we need +-- to explicitly test for basechar, baselig and basemark entries. function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode,skiphash) local markchar = getchar(start) @@ -1292,10 +1288,8 @@ function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,sk return head, start, false end ---[[ldx-- -<p>I will implement multiple chain replacements once I run into a font that uses -it. It's not that complex to handle.</p> ---ldx]]-- +-- I will implement multiple chain replacements once I run into a font that uses it. +-- It's not that complex to handle. local chainprocs = { } @@ -1348,29 +1342,22 @@ end chainprocs.reversesub = reversesub ---[[ldx-- -<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be -applied: single, alternate, multiple or ligature where ligature can be an invalid -one in the sense that it will replace multiple by one but not neccessary one that -looks like the combination (i.e. it is the counterpart of multiple then). For -example, the following is valid:</p> - -<typing> -<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line> -</typing> - -<p>Therefore we we don't really do the replacement here already unless we have the -single lookup case. The efficiency of the replacements can be improved by deleting -as less as needed but that would also make the code even more messy.</p> ---ldx]]-- - ---[[ldx-- -<p>Here we replace start by a single variant.</p> ---ldx]]-- - --- To be done (example needed): what if > 1 steps - --- this is messy: do we need this disc checking also in alternates? +-- This chain stuff is somewhat tricky since we can have a sequence of actions to be +-- applied: single, alternate, multiple or ligature where ligature can be an invalid +-- one in the sense that it will replace multiple by one but not neccessary one that +-- looks like the combination (i.e. it is the counterpart of multiple then). For +-- example, the following is valid: +-- +-- xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx +-- +-- Therefore we we don't really do the replacement here already unless we have the +-- single lookup case. The efficiency of the replacements can be improved by +-- deleting as less as needed but that would also make the code even more messy. +-- +-- Here we replace start by a single variant. +-- +-- To be done : what if > 1 steps (example needed) +-- This is messy: do we need this disc checking also in alternates? local function reportzerosteps(dataset,sequence) logwarning("%s: no steps",cref(dataset,sequence)) @@ -1446,9 +1433,7 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,r return head, start, false end ---[[ldx-- -<p>Here we replace start by new glyph. First we delete the rest of the match.</p> ---ldx]]-- +-- Here we replace start by new glyph. First we delete the rest of the match. -- char_1 mark_1 -> char_x mark_1 (ignore marks) -- char_1 mark_1 -> char_x @@ -1500,9 +1485,7 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku return head, start, false end ---[[ldx-- -<p>Here we replace start by a sequence of new glyphs.</p> ---ldx]]-- +-- Here we replace start by a sequence of new glyphs. function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex) local mapping = currentlookup.mapping @@ -1526,11 +1509,9 @@ function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup return head, start, false end ---[[ldx-- -<p>When we replace ligatures we use a helper that handles the marks. I might change -this function (move code inline and handle the marks by a separate function). We -assume rather stupid ligatures (no complex disc nodes).</p> ---ldx]]-- +-- When we replace ligatures we use a helper that handles the marks. I might change +-- this function (move code inline and handle the marks by a separate function). We +-- assume rather stupid ligatures (no complex disc nodes). -- compare to handlers.gsub_ligature which is more complex ... why diff --git a/tex/context/base/mkxl/font-tfm.lmt b/tex/context/base/mkxl/font-tfm.lmt index 9fce8fc5f..d6857b39e 100644 --- a/tex/context/base/mkxl/font-tfm.lmt +++ b/tex/context/base/mkxl/font-tfm.lmt @@ -50,21 +50,18 @@ constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua fonts.formats.tfm = "type1" -- we need to have at least a value here fonts.formats.ofm = "type1" -- we need to have at least a value here ---[[ldx-- -<p>The next function encapsulates the standard <l n='tfm'/> loader as -supplied by <l n='luatex'/>.</p> ---ldx]]-- - --- this might change: not scaling and then apply features and do scaling in the --- usual way with dummy descriptions but on the other hand .. we no longer use --- tfm so why bother - --- ofm directive blocks local path search unless set; btw, in context we --- don't support ofm files anyway as this format is obsolete - --- we need to deal with nested virtual fonts, but because we load in the --- frontend we also need to make sure we don't nest too deep (esp when sizes --- get large) +-- The next function encapsulates the standard TFM loader as supplied by LuaTeX. +-- +-- This might change: not scaling and then apply features and do scaling in the +-- usual way with dummy descriptions but on the other hand. However, we no longer +-- use TFM (except for the JMN math fonts) so why bother. +-- +-- The ofm directive blocks a local path search unless set. Actually, in ConTeXt we +-- never had to deal with OFM files anyway as this format is obsolete (there are +-- hardly any fonts in that format that are of use). +-- +-- We need to deal with nested virtual fonts, but because we load in the frontend we +-- also need to make sure we don't nest too deep (esp when sizes get large) -- -- (VTITLE Example of a recursion) -- (MAPFONT D 0 (FONTNAME recurse)(FONTAT D 2)) @@ -72,7 +69,8 @@ supplied by <l n='luatex'/>.</p> -- (CHARACTER C B (CHARWD D 2)(CHARHT D 2)(MAP (SETCHAR C A))) -- (CHARACTER C C (CHARWD D 4)(CHARHT D 4)(MAP (SETCHAR C B))) -- --- we added the same checks as below to the luatex engine +-- The virtual fonts are handled in the backend and therefore LMTX provides more +-- features than in the original specification. LuaTeX already had a few more. function tfm.setfeatures(tfmdata,features) local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm) diff --git a/tex/context/base/mkxl/lang-url.lmt b/tex/context/base/mkxl/lang-url.lmt index b918464d0..7607d7d84 100644 --- a/tex/context/base/mkxl/lang-url.lmt +++ b/tex/context/base/mkxl/lang-url.lmt @@ -23,12 +23,10 @@ local v_after = variables.after local is_letter = characters.is_letter ---[[ -<p>Hyphenating <l n='url'/>'s is somewhat tricky and a matter of taste. I did -consider using a dedicated hyphenation pattern or dealing with it by node -parsing, but the following solution suits as well. After all, we're mostly -dealing with <l n='ascii'/> characters.</p> -]]-- +-- Hyphenating URL's is somewhat tricky and a matter of taste. I did consider using +-- a dedicated hyphenation pattern or dealing with it by node parsing, but the +-- following solution suits as well. After all, we're mostly dealing with ASCII +-- characters. local urls = { } languages.urls = urls diff --git a/tex/context/base/mkxl/lpdf-ano.lmt b/tex/context/base/mkxl/lpdf-ano.lmt index 55b145730..2e19ffd5e 100644 --- a/tex/context/base/mkxl/lpdf-ano.lmt +++ b/tex/context/base/mkxl/lpdf-ano.lmt @@ -725,6 +725,7 @@ lpdf.action = pdfaction function codeinjections.prerollreference(actions) -- share can become option if actions then +-- inspect(actions) local main, n = pdfaction(actions) if main then local bs, bc = pdfborder() diff --git a/tex/context/base/mkxl/lpdf-pde.lmt b/tex/context/base/mkxl/lpdf-pde.lmt index 68712d58d..4e5d73e04 100644 --- a/tex/context/base/mkxl/lpdf-pde.lmt +++ b/tex/context/base/mkxl/lpdf-pde.lmt @@ -67,7 +67,6 @@ local lpdf = lpdf local lpdf_epdf = { } lpdf.epdf = lpdf_epdf -local pdfopen = pdfe.open local pdfopenfile = pdfe.openfile local pdfnew = pdfe.new local pdfclose = pdfe.close @@ -540,10 +539,9 @@ function lpdf_epdf.load(filename,userpassword,ownerpassword,fromstring) local __file__ if fromstring then __data__ = pdfnew(filename,#filename) - elseif pdfopenfile then - __data__ = pdfopenfile(ioopen(filename,"rb")) else - __data__ = pdfopen(filename) + local f = ioopen(filename,"rb") + __data__ = f and pdfopenfile(f) end if __data__ then if userpassword and getstatus(__data__) < 0 then diff --git a/tex/context/base/mkxl/luat-cbk.lmt b/tex/context/base/mkxl/luat-cbk.lmt index 744d12e27..2a3a58b04 100644 --- a/tex/context/base/mkxl/luat-cbk.lmt +++ b/tex/context/base/mkxl/luat-cbk.lmt @@ -12,20 +12,16 @@ local collectgarbage, type, next = collectgarbage, type, next local round = math.round local sortedhash, sortedkeys, tohash = table.sortedhash, table.sortedkeys, table.tohash ---[[ldx-- -<p>Callbacks are the real asset of <l n='luatex'/>. They permit you to hook -your own code into the <l n='tex'/> engine. Here we implement a few handy -auxiliary functions.</p> ---ldx]]-- +-- Callbacks are the real asset of LuaTeX. They permit you to hook your own code +-- into the TeX engine. Here we implement a few handy auxiliary functions. Watch +-- out, there are diferences between LuateX and LuaMetaTeX. callbacks = callbacks or { } local callbacks = callbacks ---[[ldx-- -<p>When you (temporarily) want to install a callback function, and after a -while wants to revert to the original one, you can use the following two -functions. This only works for non-frozen ones.</p> ---ldx]]-- +-- When you (temporarily) want to install a callback function, and after a while +-- wants to revert to the original one, you can use the following two functions. +-- This only works for non-frozen ones. local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end) local trace_calls = false -- only used when analyzing performance and initializations @@ -47,13 +43,12 @@ local list = callbacks.list local permit_overloads = false local block_overloads = false ---[[ldx-- -<p>By now most callbacks are frozen and most provide a way to plug in your own code. For instance -all node list handlers provide before/after namespaces and the file handling code can be extended -by adding schemes and if needed I can add more hooks. So there is no real need to overload a core -callback function. It might be ok for quick and dirty testing but anyway you're on your own if -you permanently overload callback functions.</p> ---ldx]]-- +-- By now most callbacks are frozen and most provide a way to plug in your own code. +-- For instance all node list handlers provide before/after namespaces and the file +-- handling code can be extended by adding schemes and if needed I can add more +-- hooks. So there is no real need to overload a core callback function. It might be +-- ok for quick and dirty testing but anyway you're on your own if you permanently +-- overload callback functions. -- This might become a configuration file only option when it gets abused too much. diff --git a/tex/context/base/mkxl/luat-cod.mkxl b/tex/context/base/mkxl/luat-cod.mkxl index ed4a13981..322076aa1 100644 --- a/tex/context/base/mkxl/luat-cod.mkxl +++ b/tex/context/base/mkxl/luat-cod.mkxl @@ -42,7 +42,7 @@ \toksapp \everydump {% \permanent\let\ctxlatelua \latelua \permanent\def\ctxlatecommand#1{\latelua{commands.#1}}% - \aliased\let\lateluacode \ctxlatelua + \aliased\let\lateluacode \ctxlatelua } % no \appendtoks yet \protect \endinput diff --git a/tex/context/base/mkxl/luat-ini.lmt b/tex/context/base/mkxl/luat-ini.lmt index 3202ea42b..56e3bd1c1 100644 --- a/tex/context/base/mkxl/luat-ini.lmt +++ b/tex/context/base/mkxl/luat-ini.lmt @@ -6,11 +6,9 @@ if not modules then modules = { } end modules ['luat-ini'] = { license = "see context related readme files" } ---[[ldx-- -<p>We cannot load anything yet. However what we will do us reserve a few tables. -These can be used for runtime user data or third party modules and will not be -cluttered by macro package code.</p> ---ldx]]-- +-- We cannot load anything yet. However what we will do us reserve a few tables. +-- These can be used for runtime user data or third party modules and will not be +-- cluttered by macro package code. userdata = userdata or { } -- for users (e.g. functions etc) thirddata = thirddata or { } -- only for third party modules diff --git a/tex/context/base/mkxl/math-act.lmt b/tex/context/base/mkxl/math-act.lmt index 0c75147f6..4a46baff9 100644 --- a/tex/context/base/mkxl/math-act.lmt +++ b/tex/context/base/mkxl/math-act.lmt @@ -533,7 +533,7 @@ do k = mathgaps[k] or k local character = targetcharacters[k] if character then --- if not character.tweaked then -- todo: add a force + -- if not character.tweaked then -- todo: add a force local t = type(v) if t == "number" then v = list[v] @@ -666,7 +666,7 @@ do else report_mathtweak("invalid dimension entry %U",k) end --- character.tweaked = true + -- character.tweaked = true if v.all then local nxt = character.next if nxt then @@ -680,7 +680,7 @@ do end end end --- end + -- end else report_tweak("no character %U",target,original,k) end @@ -1938,63 +1938,178 @@ do -- vfmath.builders.extension(target) local rbe = newprivateslot("radical bar extender") + local fbe = newprivateslot("fraction bar extender") + + local frp = { + newprivateslot("flat rule left piece"), + newprivateslot("flat rule middle piece"), + newprivateslot("flat rule right piece"), + } + + local rrp = { + newprivateslot("radical rule middle piece"), + newprivateslot("radical rule right piece"), + } + + local mrp = { + newprivateslot("minus rule left piece"), + newprivateslot("minus rule middle piece"), + newprivateslot("minus rule right piece"), + } - local function useminus(unicode,characters,parameters) + local function useminus(target,unicode,characters,parameters,skipfirst,what) local minus = characters[0x2212] - local xoffset = parameters.xoffset or .075 - local yoffset = parameters.yoffset or .9 - local xscale = parameters.xscale or 1 - local yscale = parameters.yscale or 1 - local xwidth = parameters.width or (1 - 2*xoffset) - local xheight = parameters.height or (1 - yoffset) - local mheight = minus.height - local mwidth = minus.width - local height = xheight*mheight - local xshift = xoffset * mwidth - local yshift = yoffset * mheight - local advance = xwidth * mwidth - local step = mwidth / 2 - characters[unicode] = { - height = height, - depth = height, - width = advance, - commands = { - push, - leftcommand[xshift], - downcommand[yshift], - -- slotcommand[0][0x2212], - { "slot", 0, 0x2212, xscale, yscale }, - pop, - }, - unicode = unicode, - -- parts = { - -- { extender = 0, glyph = first, ["end"] = fw/2, start = 0, advance = fw }, - -- { extender = 1, glyph = middle, ["end"] = mw/2, start = mw/2, advance = mw }, - -- { extender = 0, glyph = last, ["end"] = 0, start = lw/2, advance = lw }, - -- }, - parts = { - { extender = 0, glyph = unicode, ["end"] = step, start = 0, advance = advance }, - { extender = 1, glyph = unicode, ["end"] = step, start = step, advance = advance }, - }, - partsorientation = "horizontal", - } + local parts = minus.parts + if parameters == true then + parameters = { } + end + if parts then + parts = copytable(parts) + local xscale = parameters.xscale or 1 + local yscale = parameters.yscale or 1 + local mheight = minus.height + local height = (parameters.height or 1) * mheight + local yshift = (parameters.yoffset or 0) * mheight + if skipfirst then + table.remove(parts,1) + end + height = height / 2 + yshift = yshift + height + for i=1,#parts do + local part = parts[i] + local glyph = part.glyph + local gdata = characters[glyph] + local width = gdata.width + local xshift = 0 + if i == 1 and parameters.leftoffset then + xshift = (parameters.leftoffset) * width + width = width - xshift + elseif i == #parts and parameters.rightoffset then + width = (1 + parameters.rightoffset) * width + end + characters[what[i]] = { + height = height, + depth = height, + width = width, + commands = { + leftcommand[xshift], + downcommand[yshift], +-- slotcommand[0][glyph], + { "slot", 0, glyph, xscale, yscale }, + }, + } + part.glyph = what[i] + part.advance = width + end + characters[unicode] = { + height = height, + depth = height, + width = advance, + commands = { + downcommand[yshift], +-- slotcommand[0][0x2212], + { "slot", 0, 0x2212, xscale, yscale }, + }, + unicode = unicode, + parts = parts, + partsorientation = "horizontal", + } + end + end + + -- add minus parts of not there and create clipped clone + + local function checkminus(target,unicode,characters,parameters,skipfirst,what) + local minus = characters[unicode] + local parts = minus.parts + if parameters == true then + parameters = { } + end + local p_normal = 0 + local p_flat = 0 + local mwidth = minus.width + local height = minus.height + local depth = minus.depth + local loffset = parameters.leftoffset or 0 + local roffset = parameters.rightoffset or 0 + local lshift = mwidth * loffset + local rshift = mwidth * roffset + local width = mwidth - lshift - rshift + if parts then + -- print("minus has parts") + if lshift ~= 0 or width ~= mwidth then + parts = copytable(parts) + for i=1,#parts do + local part = parts[i] + local glyph = part.glyph + local gdata = characters[glyph] + local width = gdata.width + local advance = part.advance + local lshift = 0 + if i == 1 and left ~= 0 then + lshift = loffset * width + width = width - lshift + advance = advance - lshift + elseif i == #parts and roffset ~= 0 then + width = width - rshift + advance = advance - rshift + end + characters[what[i]] = { + height = height, + depth = depth, + width = width, + commands = { + leftcommand[lshift], + slotcommand[0][glyph], + }, + } + part.glyph = what[i] + part.advance = advance + end + minus.parts = parts + minus.partsorientation = "horizontal" + + end + else + local f_normal = formatters["M-NORMAL-%H"](unicode) + -- local p_normal = hasprivate(main,f_normal) + p_normal = addprivate(target,f_normal,{ + height = height, + width = width, + commands = { + push, + leftcommand[lshift], + slotcommand[0][unicode], + pop, + }, + }) + local step = width/2 + minus.parts = { + { extender = 0, glyph = p_normal, ["end"] = step, start = 0, advance = width }, + { extender = 1, glyph = p_normal, ["end"] = step, start = step, advance = width }, + { extender = 0, glyph = p_normal, ["end"] = 0, start = step, advance = width }, + } + minus.partsorientation = "horizontal" + end end function mathtweaks.replacerules(target,original,parameters) local characters = target.characters + local minus = parameters.minus local fraction = parameters.fraction local radical = parameters.radical + local stacker = parameters.stacker + if minus then + checkminus(target,0x2212,characters,minus,false,mrp) + end if fraction then - local template = fraction.template - if template == 0x2212 or template == "minus" then - useminus(0x203E,characters,fraction) - end + useminus(target,fbe,characters,fraction,false,frp) end if radical then - local template = radical.template - if template == 0x2212 or template == "minus" then - useminus(rbe,characters,radical) - end + useminus(target,rbe,characters,radical,true,rrp) + end + if stacker then + useminus(target,0x203E,characters,stacker,false,frp) end end @@ -2110,6 +2225,7 @@ do return { -- [0x002D] = { { left = slack, right = slack, glyph = 0x2212 }, single }, -- rel +-- [0x2212] = { { left = slack, right = slack, glyph = 0x2212 }, single }, -- rel -- [0x2190] = leftsingle, -- leftarrow [0x219E] = leftsingle, -- twoheadleftarrow @@ -3091,59 +3207,6 @@ do local double <const> = 0x2016 local triple <const> = 0x2980 - -- local nps = fonts.helpers.newprivateslot - -- - -- local function variantlist(characters,unicode,chardata,what,total,used) - -- local parenthesis = characters[0x28].next - -- local width = chardata.width - -- local height = chardata.height - -- local depth = chardata.depth - -- local total = height + depth - -- local count = 1 - -- while parenthesis do - -- local private = nps(what .. " size " .. count) - -- local pardata = characters[parenthesis] - -- local parheight = pardata.height - -- local pardepth = pardata.depth - -- local scale = (parheight+pardepth)/total - -- local offset = - pardepth + scale * depth - -- chardata.next = private - -- chardata = { - -- unicode = unicode, - -- width = width, - -- height = parheight, - -- depth = pardepth, - -- commands = { - -- { "offset", 0, offset, unicode, 1, scale } - -- }, - -- } - -- characters[private] = chardata - -- parenthesis = pardata.next - -- if paranthesis then - -- pardata = characters[parenthesis] - -- end - -- count = count + 1 - -- end - -- chardata.parts = { - -- { - -- advance = total, - -- ["end"] = used, - -- glyph = unicode, - -- start = 0, - -- -- start = used/5, - -- }, - -- { - -- advance = total, - -- -- ["end"] = 0, - -- ["end"] = used/5, -- prevents small gap with inward curved endpoints - -- extender = 1, - -- glyph = unicode, - -- start = used, - -- }, - -- } - -- chardata.partsorientation = "vertical" - -- end - local function variantlist(unicode,chardata,total,used) chardata.varianttemplate = 0x0028 chardata.parts = { diff --git a/tex/context/base/mkxl/math-ali.mkxl b/tex/context/base/mkxl/math-ali.mkxl index b37887332..b90bad174 100644 --- a/tex/context/base/mkxl/math-ali.mkxl +++ b/tex/context/base/mkxl/math-ali.mkxl @@ -1403,9 +1403,41 @@ \c!toffset=.25\exheight, \c!boffset=\mathmatrixparameter\c!toffset] -\noaligned\permanent\tolerant\protected\def\math_matrix_HL[#1]#*% +% \noaligned\permanent\tolerant\protected\def\math_matrix_HL[#1]#*% +% {\noalign\bgroup +% \math_matrix_check_rule[#1]% +% \divideby\scratchdimen\plustwo +% \ifdim\scratchdimen>\zeropoint +% % \autorule\s!height\scratchdimen\s!depth\scratchdimen\relax +% \scratchdistance\mathmatrixparameter\c!toffset\relax +% \ifdim\scratchdistance>\zeropoint +% \nohrule +% \s!attr \mathalignmentvruleattribute\plustwo +% \s!height\scratchdistance +% \s!depth \zeropoint +% \relax +% \fi +% \hrule +% \s!attr \mathalignmentvruleattribute\plusthree +% \s!height\scratchdimen +% \s!depth \scratchdimen +% \relax +% \scratchdistance\mathmatrixparameter\c!boffset\relax +% \ifdim\scratchdistance>\zeropoint +% \nohrule +% \s!attr \mathalignmentvruleattribute\plusfour +% \s!height\zeropoint +% \s!depth \scratchdistance +% \relax +% \fi +% \else +% % zero dimensions disable the rule +% \fi +% \egroup} + +\def\math_matrix_HL_indeed#1#2% {\noalign\bgroup - \math_matrix_check_rule[#1]% + \math_matrix_check_rule[#2]% \divideby\scratchdimen\plustwo \ifdim\scratchdimen>\zeropoint % \autorule\s!height\scratchdimen\s!depth\scratchdimen\relax @@ -1422,6 +1454,17 @@ \s!height\scratchdimen \s!depth \scratchdimen \relax + \ifnum#1>\plusone + \localcontrolledloop\plustwo#1\plusone + {\kern.125\d_math_eqalign_distance % hskip + \hrule + \s!attr \mathalignmentvruleattribute\plusthree + \s!height\scratchdimen + \s!depth \scratchdimen + \relax}% + \kern-2\scratchdimen + \kern-.125\d_math_eqalign_distance % hskip + \fi \scratchdistance\mathmatrixparameter\c!boffset\relax \ifdim\scratchdistance>\zeropoint \nohrule @@ -1435,6 +1478,9 @@ \fi \egroup} +\permanent\tolerant\noaligned\protected\def\math_matrix_HL [#1]#*{\math_matrix_HL_indeed\plusone{#1}} +\permanent\tolerant\noaligned\protected\def\math_matrix_HLHL[#1]#*{\math_matrix_HL_indeed\plustwo{#1}} + \protected\def\math_matrix_vertical_rule_indeed#1#2% {\math_matrix_check_rule[#2]% \enablematrixrules @@ -1514,19 +1560,38 @@ %boundary\c_math_matrix_sl_boundary \enforced\let\NR\math_matrix_NL_NR} -\permanent\tolerant\protected\def\math_matrix_VL[#1]#*% +% \permanent\tolerant\protected\def\math_matrix_VL[#1]#*% +% {\span\omit +% \ifconditional\c_math_matrix_first\else +% \kern.5\d_math_eqalign_distance % hskip +% \fi +% \math_matrix_vertical_rule_yes{#1}% +% \kern.5\d_math_eqalign_distance % hskip +% \global\setfalse\c_math_matrix_first +% \aligntab +% \boundary\c_math_matrix_vl_boundary +% \enforced\let\NR\math_matrix_NL_NR +% } + +\def\math_matrix_VL_indeed#1#2%% {\span\omit \ifconditional\c_math_matrix_first\else \kern.5\d_math_eqalign_distance % hskip \fi - \math_matrix_vertical_rule_yes{#1}% - \kern.5\d_math_eqalign_distance % hskip + \math_matrix_vertical_rule_yes{#2}% + \localcontrolledloop\plustwo#1\plusone + {\kern.125\d_math_eqalign_distance % hskip + \math_matrix_vertical_rule_yes{#2}}% + \kern.5\d_math_eqalign_distance \global\setfalse\c_math_matrix_first \aligntab \boundary\c_math_matrix_vl_boundary \enforced\let\NR\math_matrix_NL_NR } +\permanent\tolerant\protected\def\math_matrix_VL [#1]#*{\math_matrix_VL_indeed\plusone{#1}} +\permanent\tolerant\protected\def\math_matrix_VLVL[#1]#*{\math_matrix_VL_indeed\plustwo{#1}} + \permanent\tolerant\protected\def\math_matrix_NL[#1]#*% {\span\omit \ifconditional\c_math_matrix_first\else @@ -1585,6 +1650,9 @@ \enforced\let\VC\math_matrix_VC % bonus, extra column \enforced\let\VT\math_matrix_VT % bonus, idem but tight \enforced\let\TB\math_common_TB + % just because it's easy: + \enforced\let\VLVL\math_matrix_VLVL + \enforced\let\HLHL\math_matrix_HLHL \to \everymathmatrix \definesystemattribute[mathmatrixornament][public] diff --git a/tex/context/base/mkxl/math-fnt.lmt b/tex/context/base/mkxl/math-fnt.lmt index 911e0adb5..7e2c0c75c 100644 --- a/tex/context/base/mkxl/math-fnt.lmt +++ b/tex/context/base/mkxl/math-fnt.lmt @@ -63,9 +63,11 @@ local function register_extensible(font,char,style,box) return nil else local bx = tonut(box) - updaters.apply("tagging.state.disable") -- fast enough - nodes.handlers.finalizelist(bx) - updaters.apply("tagging.state.enable") + -- actually we don't want colors and such so if we do finalize we + -- should be more selctive: +-- updaters.apply("tagging.state.disable") +-- nodes.handlers.finalizelist(bx) +-- updaters.apply("tagging.state.enable") local id = getid(bx) local al = getattrlst(bx) local wd, ht, dp = getwhd(bx) diff --git a/tex/context/base/mkxl/math-frc.mkxl b/tex/context/base/mkxl/math-frc.mkxl index 47edc52c4..5c1eab8dd 100644 --- a/tex/context/base/mkxl/math-frc.mkxl +++ b/tex/context/base/mkxl/math-frc.mkxl @@ -104,6 +104,14 @@ \c!vfactor=\plusthousand, \c!rule=\v!auto] +%D We now default to nice bars: + +\integerdef\fractionbarextenderuc \privatecharactercode{fraction bar extender} + +\setupmathfractions + [\c!rule=\v!symbol, + \c!middle=\fractionbarextenderuc] + \appendtoks \instance\frozen\protected\edefcsname\currentmathfraction\endcsname{\math_frac{\currentmathfraction}}% \to \everydefinemathfraction diff --git a/tex/context/base/mkxl/math-ini.mkxl b/tex/context/base/mkxl/math-ini.mkxl index 8c0615eb6..6f2dfc1c2 100644 --- a/tex/context/base/mkxl/math-ini.mkxl +++ b/tex/context/base/mkxl/math-ini.mkxl @@ -1399,6 +1399,10 @@ % \im{1\unit{hour} 20 \unit{minute} 56 \unit{second}} % \inherited\setmathspacing \mathdimensioncode \mathdigitcode \allmathstyles \thickmuskip + \inherited\setmathspacing \mathdimensioncode \mathbinarycode \allsplitstyles \medmuskip + \inherited\setmathspacing \mathdimensioncode \mathbinarycode \allunsplitstyles \pettymuskip + \inherited\setmathspacing \mathdimensioncode \mathrelationcode \allsplitstyles \thickmuskip + \inherited\setmathspacing \mathdimensioncode \mathrelationcode \allunsplitstyles \pettymuskip % \inherited\setmathspacing \mathfakecode \mathallcode \allmathstyles \tinymuskip \inherited\setmathspacing \mathallcode \mathfakecode \allmathstyles \tinymuskip @@ -2814,50 +2818,83 @@ \installcorenamespace {mathautopunctuation} -\bgroup - - % This can and will be replaced by classes: - - \catcode\commaasciicode \activecatcode - \catcode\periodasciicode \activecatcode - \catcode\semicolonasciicode\activecatcode - - \gdefcsname\??mathautopunctuation\v!no\endcsname - {\let,\math_punctuation_nop_comma - \let.\math_punctuation_nop_period - \let;\math_punctuation_nop_semicolon} - - \gdefcsname\??mathautopunctuation\v!yes\endcsname - {\let,\math_punctuation_yes_comma - \let.\math_punctuation_yes_period - \let;\math_punctuation_nop_semicolon} - - \gdefcsname\??mathautopunctuation\v!all\endcsname - {\let,\math_punctuation_all_comma - \let.\math_punctuation_all_period - \let;\math_punctuation_nop_semicolon} - - \gdefcsname\??mathautopunctuation comma\endcsname - {\let,\math_punctuation_yes_comma - \let.\math_punctuation_yes_period - \let;\math_punctuation_nop_semicolon} - - \gdefcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname - {\let,\math_punctuation_yes_comma - \let.\math_punctuation_yes_period - \let;\math_punctuation_yes_semicolon} - - \gdefcsname\??mathautopunctuation comma\string,semicolon\endcsname - {\let,\math_punctuation_yes_comma - \let.\math_punctuation_yes_period - \let;\math_punctuation_yes_semicolon} - - \gdefcsname\??mathautopunctuation\v!all\string,semicolon\endcsname - {\let,\math_punctuation_all_comma - \let.\math_punctuation_all_period - \let;\math_punctuation_all_semicolon} +% \bgroup +% +% \catcode\commaasciicode \activecatcode +% \catcode\periodasciicode \activecatcode +% \catcode\semicolonasciicode\activecatcode +% +% \gdefcsname\??mathautopunctuation\v!no\endcsname +% {\let,\math_punctuation_nop_comma +% \let.\math_punctuation_nop_period +% \let;\math_punctuation_nop_semicolon} +% +% \gdefcsname\??mathautopunctuation\v!yes\endcsname +% {\let,\math_punctuation_yes_comma +% \let.\math_punctuation_yes_period +% \let;\math_punctuation_nop_semicolon} +% +% \gdefcsname\??mathautopunctuation\v!all\endcsname +% {\let,\math_punctuation_all_comma +% \let.\math_punctuation_all_period +% \let;\math_punctuation_nop_semicolon} +% +% \gdefcsname\??mathautopunctuation comma\endcsname +% {\let,\math_punctuation_yes_comma +% \let.\math_punctuation_yes_period +% \let;\math_punctuation_nop_semicolon} +% +% \gdefcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname +% {\let,\math_punctuation_yes_comma +% \let.\math_punctuation_yes_period +% \let;\math_punctuation_yes_semicolon} +% +% \gdefcsname\??mathautopunctuation comma\string,semicolon\endcsname +% {\let,\math_punctuation_yes_comma +% \let.\math_punctuation_yes_period +% \let;\math_punctuation_yes_semicolon} +% +% \gdefcsname\??mathautopunctuation\v!all\string,semicolon\endcsname +% {\let,\math_punctuation_all_comma +% \let.\math_punctuation_all_period +% \let;\math_punctuation_all_semicolon} +% +% \egroup -\egroup +\defcsname\??mathautopunctuation\v!no\endcsname + {\letcharcode\commaasciicode \math_punctuation_nop_comma + \letcharcode\periodasciicode \math_punctuation_nop_period + \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon} + +\defcsname\??mathautopunctuation\v!yes\endcsname + {\letcharcode\commaasciicode \math_punctuation_yes_comma + \letcharcode\periodasciicode \math_punctuation_yes_period + \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon} + +\defcsname\??mathautopunctuation\v!all\endcsname + {\letcharcode\commaasciicode \math_punctuation_all_comma + \letcharcode\periodasciicode \math_punctuation_all_period + \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon} + +\defcsname\??mathautopunctuation comma\endcsname + {\letcharcode\commaasciicode \math_punctuation_yes_comma + \letcharcode\periodasciicode \math_punctuation_yes_period + \letcharcode\semicolonasciicode\math_punctuation_nop_semicolon} + +\defcsname\??mathautopunctuation\v!yes\string,semicolon\endcsname + {\letcharcode\commaasciicode \math_punctuation_yes_comma + \letcharcode\periodasciicode \math_punctuation_yes_period + \letcharcode\semicolonasciicode\math_punctuation_yes_semicolon} + +\defcsname\??mathautopunctuation comma\string,semicolon\endcsname + {\letcharcode\commaasciicode \math_punctuation_yes_comma + \letcharcode\periodasciicode \math_punctuation_yes_period + \letcharcode\semicolonasciicode\math_punctuation_yes_semicolon} + +\defcsname\??mathautopunctuation\v!all\string,semicolon\endcsname + {\letcharcode\commaasciicode \math_punctuation_all_comma + \letcharcode\periodasciicode \math_punctuation_all_period + \letcharcode\semicolonasciicode\math_punctuation_all_semicolon} % \appendtoks % \global\mathcode\commaasciicode \c_math_special diff --git a/tex/context/base/mkxl/math-map.lmt b/tex/context/base/mkxl/math-map.lmt index 98cc59c89..0bd75d748 100644 --- a/tex/context/base/mkxl/math-map.lmt +++ b/tex/context/base/mkxl/math-map.lmt @@ -7,31 +7,13 @@ if not modules then modules = { } end modules ['math-map'] = { license = "see context related readme files" } --- todo: make sparse .. if self - ---[[ldx-- -<p>Remapping mathematics alphabets.</p> ---ldx]]-- - --- oldstyle: not really mathematics but happened to be part of --- the mathematics fonts in cmr --- --- persian: we will also provide mappers for other --- scripts - --- todo: alphabets namespace --- maybe: script/scriptscript dynamic, - --- superscripped primes get unscripted ! - --- to be looked into once the fonts are ready (will become font --- goodie): --- --- (U+2202,U+1D715) : upright --- (U+2202,U+1D715) : italic --- (U+2202,U+1D715) : upright --- --- plus add them to the regular vectors below so that they honor \it etc +-- persian: we will also provide mappers for other scripts +-- todo : alphabets namespace +-- maybe : script/scriptscript dynamic, +-- check : (U+2202,U+1D715) : upright +-- (U+2202,U+1D715) : italic +-- (U+2202,U+1D715) : upright +-- add them to the regular vectors below so that they honor \it etc local type, next = type, next local merged, sortedhash = table.merged, table.sortedhash diff --git a/tex/context/base/mkxl/math-noa.lmt b/tex/context/base/mkxl/math-noa.lmt index 4a0cb5744..f64783ed9 100644 --- a/tex/context/base/mkxl/math-noa.lmt +++ b/tex/context/base/mkxl/math-noa.lmt @@ -890,39 +890,43 @@ do local data = fontdata[font] local characters = data.characters local olddata = characters[oldchar] --- local oldheight = olddata.height or 0 --- local olddepth = olddata.depth or 0 - local template = olddata.varianttemplate - local newchar = mathematics.big(data,template or oldchar,size,method) - local newdata = characters[newchar] - local newheight = newdata.height or 0 - local newdepth = newdata.depth or 0 - if template then --- local ratio = (newheight + newdepth) / (oldheight + olddepth) --- setheight(pointer,ratio * oldheight) --- setdepth(pointer,ratio * olddepth) - setheight(pointer,newheight) - setdepth(pointer,newdepth) - if not olddata.extensible then - -- check this on bonum and antykwa - setoptions(pointer,0) - end - if trace_fences then --- report_fences("replacing %C using method %a, size %a, template %C and ratio %.3f",newchar,method,size,template,ratio) - report_fences("replacing %C using method %a, size %a and template %C",newchar,method,size,template) - end - else - -- 1 scaled point is a signal, for now - if ht == 1 then + if olddata then +-- local oldheight = olddata.height or 0 +-- local olddepth = olddata.depth or 0 + local template = olddata.varianttemplate + local newchar = mathematics.big(data,template or oldchar,size,method) + local newdata = characters[newchar] + local newheight = newdata.height or 0 + local newdepth = newdata.depth or 0 + if template then +-- local ratio = (newheight + newdepth) / (oldheight + olddepth) +-- setheight(pointer,ratio * oldheight) +-- setdepth(pointer,ratio * olddepth) setheight(pointer,newheight) - end - if dp == 1 then setdepth(pointer,newdepth) + if not olddata.extensible then + -- check this on bonum and antykwa + setoptions(pointer,0) + end + if trace_fences then +-- report_fences("replacing %C using method %a, size %a, template %C and ratio %.3f",newchar,method,size,template,ratio) + report_fences("replacing %C using method %a, size %a and template %C",newchar,method,size,template) + end + else + -- 1 scaled point is a signal, for now + if ht == 1 then + setheight(pointer,newheight) + end + if dp == 1 then + setdepth(pointer,newdepth) + end + setchar(delimiter,newchar) + if trace_fences then + report_fences("replacing %C by %C using method %a and size %a",oldchar,char,method,size) + end end - setchar(delimiter,newchar) - if trace_fences then - report_fences("replacing %C by %C using method %a and size %a",oldchar,char,method,size) - end + elseif trace_fences then + report_fences("not replacing %C using method %a and size %a",oldchar,method,size) end end end diff --git a/tex/context/base/mkxl/math-rad.mklx b/tex/context/base/mkxl/math-rad.mklx index 863bb2128..ee91243e0 100644 --- a/tex/context/base/mkxl/math-rad.mklx +++ b/tex/context/base/mkxl/math-rad.mklx @@ -378,6 +378,12 @@ \integerdef\delimitedrightanutityuc \privatecharactercode{delimited right annuity} \integerdef\radicalbarextenderuc \privatecharactercode{radical bar extender} +%D We now default to nice bars: + +\setupmathradical + [\c!rule=\v!symbol, + \c!top=\radicalbarextenderuc] + \definemathradical [rannuity] [\c!left=\zerocount, diff --git a/tex/context/base/mkxl/math-spa.lmt b/tex/context/base/mkxl/math-spa.lmt index d2927ff58..a575b1714 100644 --- a/tex/context/base/mkxl/math-spa.lmt +++ b/tex/context/base/mkxl/math-spa.lmt @@ -41,6 +41,7 @@ local getnormalizedline = node.direct.getnormalizedline local getbox = nuts.getbox local setoffsets = nuts.setoffsets local addxoffset = nuts.addxoffset +local setattrlist = nuts.setattrlist local nextglue = nuts.traversers.glue local nextlist = nuts.traversers.list @@ -48,7 +49,9 @@ local nextboundary = nuts.traversers.boundary local nextnode = nuts.traversers.node local insertafter = nuts.insertafter +local insertbefore = nuts.insertbefore local newkern = nuts.pool.kern +local newstrutrule = nuts.pool.strutrule local texsetdimen = tex.setdimen local texgetdimen = tex.getdimen @@ -68,6 +71,10 @@ local d_strc_math_first_height = texisdimen("d_strc_math_first_height") local d_strc_math_last_depth = texisdimen("d_strc_math_last_depth") local d_strc_math_indent = texisdimen("d_strc_math_indent") +local report = logs.reporter("mathalign") + +local trace = false trackers.register("mathalign",function(v) trace = v end ) + local function moveon(s) for n, id, subtype in nextnode, getnext(s) do s = n @@ -138,15 +145,20 @@ stages[1] = function(specification,stage) p = getprev(p) end end - -- we use a hangindent so we need to treat the first one - local f = found[1] - local delta = f[2] - max - if delta ~= 0 then - insertafter(head,moveon(head),newkern(-delta)) - end - for i=2,#found do + for i=1,#found do local f = found[i] - insertafter(head,moveon(f[3]),newkern(-f[2])) -- check head + local w = f[2] + local d = i == 1 and (max-w) or -w + local k = newkern(d) + local r = newstrutrule(0,2*65536,2*65536) + local s = moveon(f[3]) + if trace then + report("row %i, width %p, delta %p",i,w,d) + end + setattrlist(r,head) + setattrlist(k,head) + insertbefore(head,s,r) + insertafter(head,r,k) end end texsetdimen("global",d_strc_math_indent,max) diff --git a/tex/context/base/mkxl/math-stc.mklx b/tex/context/base/mkxl/math-stc.mklx index fdad71978..5a701426a 100644 --- a/tex/context/base/mkxl/math-stc.mklx +++ b/tex/context/base/mkxl/math-stc.mklx @@ -1043,7 +1043,7 @@ \definemathstackers [\v!medium] [\v!mathematics] [\c!hoffset=1.5\mathemwidth] \definemathstackers [\v!big] [\v!mathematics] [\c!hoffset=2\mathemwidth] -\definemathextensible [\v!reverse] [xrel] ["002D] +\definemathextensible [\v!reverse] [xrel] ["2212] % ["002D] \definemathextensible [\v!reverse] [xequal] ["003D] \definemathextensible [\v!reverse] [xleftarrow] ["2190] % ["27F5] \definemathextensible [\v!reverse] [xrightarrow] ["2192] % ["27F6] @@ -1066,7 +1066,7 @@ \definemathextensible [\v!reverse] [xrightleftharpoons] ["21CC] \definemathextensible [\v!reverse] [xtriplerel] ["2261] -\definemathextensible [\v!mathematics] [mrel] ["002D] +\definemathextensible [\v!mathematics] [mrel] ["2212] % ["002D] \definemathextensible [\v!mathematics] [mequal] ["003D] \definemathextensible [\v!mathematics] [mleftarrow] ["2190] % ["27F5] \definemathextensible [\v!mathematics] [mrightarrow] ["2192] % ["27F6] @@ -1089,7 +1089,7 @@ \definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC] \definemathextensible [\v!mathematics] [mtriplerel] ["2261] -\definemathextensible [\v!text] [trel] ["002D] +\definemathextensible [\v!text] [trel] ["2212] % ["002D] \definemathextensible [\v!text] [tequal] ["003D] \definemathextensible [\v!text] [tmapsto] ["21A6] \definemathextensible [\v!text] [tleftarrow] ["2190] % ["27F5] @@ -1168,9 +1168,9 @@ %D in the backend (okay, we still need to deal with some cut and paste issues but at %D least we now know what we deal with. -\definemathoverextensible [\v!vfenced] [overbar] ["203E] -\definemathunderextensible [\v!vfenced] [underbar] ["203E] % ["0332] -\definemathdoubleextensible [\v!vfenced] [doublebar] ["203E] ["203E] % ["0332] +\definemathoverextensible [\v!vfenced] [overbar] ["203E] % todo: private +\definemathunderextensible [\v!vfenced] [underbar] ["203E] % todo: private +\definemathdoubleextensible [\v!vfenced] [doublebar] ["203E] ["203E] % todo: private \definemathoverextensible [\v!vfenced] [overbrace] ["23DE] \definemathunderextensible [\v!vfenced] [underbrace] ["23DF] @@ -1186,13 +1186,13 @@ %D For mathml: -\definemathdoubleextensible [\v!both] [overbarunderbar] ["203E] ["203E] +\definemathdoubleextensible [\v!both] [overbarunderbar] ["203E] ["203E] % todo: private \definemathdoubleextensible [\v!both] [overbraceunderbrace] ["23DE] ["23DF] \definemathdoubleextensible [\v!both] [overparentunderparent] ["23DC] ["23DD] \definemathdoubleextensible [\v!both] [overbracketunderbracket] ["23B4] ["23B5] -\definemathovertextextensible [\v!bothtext] [overbartext] ["203E] -\definemathundertextextensible [\v!bothtext] [underbartext] ["203E] +\definemathovertextextensible [\v!bothtext] [overbartext] ["203E] % todo: private +\definemathundertextextensible [\v!bothtext] [underbartext] ["203E] % todo: private \definemathovertextextensible [\v!bothtext] [overbracetext] ["23DE] \definemathundertextextensible [\v!bothtext] [underbracetext] ["23DF] \definemathovertextextensible [\v!bothtext] [overparenttext] ["23DC] @@ -1285,8 +1285,8 @@ \permanent\tolerant\protected\def\defineextensiblefiller[#1]#*[#2]% {\frozen\instance\edefcsname#1\endcsname{\mathfiller{\number#2}}} -%defineextensiblefiller [barfill] ["203E] % yet undefined -\defineextensiblefiller [relfill] ["002D] +%defineextensiblefiller [barfill] ["203E] % % todo: private +\defineextensiblefiller [relfill] ["2212] % ["002D] \defineextensiblefiller [equalfill] ["003D] \defineextensiblefiller [leftarrowfill] ["2190] \defineextensiblefiller [rightarrowfill] ["2192] diff --git a/tex/context/base/mkxl/math-twk.mkxl b/tex/context/base/mkxl/math-twk.mkxl index 6ffb36818..6e015d3de 100644 --- a/tex/context/base/mkxl/math-twk.mkxl +++ b/tex/context/base/mkxl/math-twk.mkxl @@ -95,5 +95,12 @@ \permanent\protected\def\minute{\iffontchar\font\textminute\textminute\else\mathminute\fi} \permanent\protected\def\second{\iffontchar\font\textsecond\textsecond\else\mathsecond\fi} +% \startsetups[math:rules] +% \letmathfractionparameter\c!rule\v!symbol +% \setmathfractionparameter\c!middle{"203E}% +% \letmathradicalparameter \c!rule\v!symbol +% \setmathradicalparameter \c!top{\radicalbarextenderuc}% +% \setmathfenceparameter \c!alternative{1}% +% \stopsetups \protect diff --git a/tex/context/base/mkxl/math-vfu.lmt b/tex/context/base/mkxl/math-vfu.lmt index 0a2b440a1..1639517b5 100644 --- a/tex/context/base/mkxl/math-vfu.lmt +++ b/tex/context/base/mkxl/math-vfu.lmt @@ -83,27 +83,37 @@ nps("flat double rule left piece") nps("flat double rule middle piece") nps("flat double rule right piece") +nps("minus rule left piece") +nps("minus rule middle piece") +nps("minus rule right piece") + do - local function horibar(main,unicode,rule,left,right,normal) + -- this overlaps with math-act + + local function horibar(main,unicode,rule,left,right,normal,force,m,l,r) local characters = main.characters - if not characters[unicode] then + local data = characters[unicode] + if force or not data then local height = main.mathparameters.defaultrulethickness or 4*65536/10 - local f_rule = rule and formatters["M-HORIBAR-RULE-%H"](rule) - local p_rule = rule and hasprivate(main,f_rule) + local f_rule = rule and formatters["M-HORIBAR-M-%H"](rule) + local p_rule = rule and hasprivate(main,f_rule) + local ndata = normal and characters[normal] if rule and left and right and normal then - local ldata = characters[left] - local mdata = characters[rule] - local rdata = characters[right] - local ndata = characters[normal] + local ldata = characters[l or left] + local mdata = characters[m or rule] + local rdata = characters[r or right] local lwidth = ldata.width or 0 local mwidth = mdata.width or 0 local rwidth = rdata.width or 0 local nwidth = ndata.width or 0 local down = (mdata.height / 2) - height - -- - local f_left = right and formatters["M-HORIBAR-LEFT-%H"](right) - local f_right = right and formatters["M-HORIBAR-RIGHT-%H"](right) +if unicode == normal then + height = ndata.height + down = 0 +end -- + local f_left = left and formatters["M-HORIBAR-L-%H"](left) + local f_right = right and formatters["M-HORIBAR-R-%H"](right) local p_left = left and hasprivate(main,f_left) local p_right = right and hasprivate(main,f_right) -- @@ -116,7 +126,7 @@ do push, leftcommand[.025*mwidth], downcommand[down], - slotcommand[0][rule], + slotcommand[0][m or rule], pop, }, }) @@ -130,7 +140,7 @@ do push, leftcommand[.025*lwidth], downcommand[down], - slotcommand[0][left], + slotcommand[0][l or left], pop, }, }) @@ -144,48 +154,72 @@ do push, leftcommand[.025*rwidth], downcommand[down], - slotcommand[0][right], + slotcommand[0][r or right], pop, }, }) end - characters[unicode] = { - keepvirtual = true, - partsorientation = "horizontal", - height = height, - width = nwidth, --- keepvirtual = true, - commands = { +if unicode ~= normal then + data = { + unicode = unicode, + height = height, + width = nwidth, + commands = { downcommand[down], slotcommand[0][normal] }, - parts = { - { glyph = p_left, ["end"] = 0.4*lwidth }, - { glyph = p_rule, extender = 1, ["start"] = mwidth, ["end"] = mwidth }, - { glyph = p_right, ["start"] = 0.6*rwidth }, - } + } + characters[unicode] = data +end + data.parts = { + { glyph = p_left, ["end"] = 0.4*lwidth }, + { glyph = p_rule, extender = 1, ["start"] = mwidth, ["end"] = mwidth }, + { glyph = p_right, ["start"] = 0.6*rwidth }, } else - local width = main.parameters.quad/4 or 4*65536 + local width = main.parameters.quad/2 or 4*65536 -- 3 if not characters[p_rule] then - p_rule = addprivate(main,f_rule,{ - height = height, - width = width, --- keepvirtual = true, - commands = { push, { "rule", height, width }, pop }, - }) + if unicode == normal then + p_rule = addprivate(main,f_rule,{ + height = ndata.height, + width = width, + commands = { + push, + upcommand[(ndata.height - height)/2], + { "rule", height, width }, + pop + }, + }) + else + p_rule = addprivate(main,f_rule,{ + height = height, + width = width, + commands = { + push, + { "rule", height, width }, + pop + }, + }) + end end - characters[unicode] = { - height = height, - width = nwidth, --- keepvirtual = true, - partsorientation = "horizontal", - parts = { - { glyph = p_rule }, - { glyph = p_rule, extender = 1, ["start"] = width/2, ["end"] = width/2 }, +if unicode ~= normal then + data = { + unicode = unicode, + height = height, + width = width, + commands = { + slotcommand[0][p_rule] } } + characters[unicode] = data +end + data.parts = { + { glyph = p_rule, ["start"] = width/2, ["end"] = width/2 }, + { glyph = p_rule, extender = 1, ["start"] = width/2, ["end"] = width/2 }, + } end + data.keepvirtual = true -- i need to figure this out + data.partsorientation = "horizontal" end end @@ -205,8 +239,8 @@ do local nwidth = ndata.width or 0 local down = (mdata.height / 2) - height -- - local f_rule = rule and formatters["M-ROOTBAR-RULE-%H"](rule) - local f_right = right and formatters["M-ROOTBAR-RIGHT-%H"](right) + local f_rule = rule and formatters["M-ROOTBAR-M-%H"](rule) + local f_right = right and formatters["M-ROOTBAR-R-%H"](right) local p_rule = rule and hasprivate(main,f_rule) local p_right = right and hasprivate(main,f_right) -- diff --git a/tex/context/base/mkxl/meta-imp-newmath.mkxl b/tex/context/base/mkxl/meta-imp-newmath.mkxl new file mode 100644 index 000000000..af49f82ac --- /dev/null +++ b/tex/context/base/mkxl/meta-imp-newmath.mkxl @@ -0,0 +1,76 @@ +%D \module +%D [ file=meta-imp-newmath, +%D version=2023.04.01, +%D title=\METAPOST\ Graphics, +%D subtitle=New Math Symbols, +%D author=Mikael Sundqvist & Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +%D In this file we will collect solutions for special math symbols. When such symbols +%D are used in publications the CMS will contact the Unicode Consortium to suggest that +%D they get a slot, because then we have proof of usage. We also consider old obsolete +%D symbols because they can be treated like some ancient out|-|of|-|use script and fit +%D into the \type {ancient math script}. + +\startMPextensions + vardef math_ornament_hat(expr w,h,d,o,l) = + image ( path p ; p := + (w/2,h + 10l) -- + (o + w,h + o) -- + (w/2,h + 7l) -- + (-o,h + o) -- + cycle ; + fill p randomized o ; + setbounds currentpicture to (-o,0) -- (w+o,0) -- (w+o,h+2o) -- (-o,h+2o) -- cycle ; + ) + enddef ; +\stopMPextensions + +\startuniqueMPgraphic{math:ornament:hat} + draw + math_ornament_hat( + OverlayWidth, + OverlayHeight, + OverlayDepth, + OverlayOffset, + OverlayLineWidth + ) + withpen + pencircle + xscaled (2OverlayLineWidth) + yscaled (3OverlayLineWidth/4) + rotated 30 + withcolor + OverlayLineColor ; +% draw boundingbox currentpicture; +\stopuniqueMPgraphic + +\definemathornament [widerandomhat] [mp=math:ornament:hat] + +\continueifinputfile{meta-imp-newnmath.mkxl} + +\starttext + +This symbol was designed for one of Mikaels students working on a thesis on +probability. This student needed to typeset the characteristic function of a +random variable \im {X} with density function \im {f_{X}}, and it was insisted to +use another notation than the (wide) hat, that was already used for something +else. For this reason the \tex {widerandomhat} was introduced, + +\startformula + E[\ee^{\ii tX}] = \widerandomhat{f_{X}}(t)\mtp{,} + E[\ee^{\ii t(X_1+X_2)}] = \widerandomhat{f_{X_1} \ast f_{X_2}}(t)\mtp{.} +\stopformula + +Naturally, it is automatically scaled, just like the ordinary wide hat + +\startformula + \widehat{a+b+c+d+e+f} \neq \widerandomhat{a+b+c+d+e+f} +\stopformula + +\stoptext diff --git a/tex/context/base/mkxl/mlib-run.lmt b/tex/context/base/mkxl/mlib-run.lmt index 0e955818e..de5ceb1db 100644 --- a/tex/context/base/mkxl/mlib-run.lmt +++ b/tex/context/base/mkxl/mlib-run.lmt @@ -6,28 +6,16 @@ if not modules then modules = { } end modules ['mlib-run'] = { license = "see context related readme files", } --- cmyk -> done, native --- spot -> done, but needs reworking (simpler) --- multitone -> --- shade -> partly done, todo: cm --- figure -> done --- hyperlink -> low priority, easy - --- new * run --- or --- new * execute^1 * finish - --- a*[b,c] == b + a * (c-b) - ---[[ldx-- -<p>The directional helpers and pen analysis are more or less translated from the -<l n='c'/> code. It really helps that Taco know that source so well. Taco and I spent -quite some time on speeding up the <l n='lua'/> and <l n='c'/> code. There is not -much to gain, especially if one keeps in mind that when integrated in <l n='tex'/> -only a part of the time is spent in <l n='metapost'/>. Of course an integrated -approach is way faster than an external <l n='metapost'/> and processing time -nears zero.</p> ---ldx]]-- +-- The directional helpers and pen analysis are more or less translated from the C +-- code. In LuaTeX we spent quite some time on speeding up the Lua interface as well +-- as the C code. There is not much to gain, especially if one keeps in mind that +-- when integrated in TeX only a part of the time is spent in MetaPost. Of course an +-- integrated approach is way faster than an external MetaPost and processing time +-- nears zero. +-- +-- In LuaMetaTeX the MetaPost core has been cleaned up a it and as a result +-- processing in double mode is now faster than in scaled mode. There are also extra +-- features and interfaces, so the MkIV and MkXL (LMTX) implementation differ! local type, tostring, tonumber, next = type, tostring, tonumber, next local find, striplines = string.find, utilities.strings.striplines diff --git a/tex/context/base/mkxl/node-ini.lmt b/tex/context/base/mkxl/node-ini.lmt index f1b9bb452..38f55c160 100644 --- a/tex/context/base/mkxl/node-ini.lmt +++ b/tex/context/base/mkxl/node-ini.lmt @@ -6,19 +6,13 @@ if not modules then modules = { } end modules ['node-ini'] = { license = "see context related readme files" } ---[[ldx-- -<p>Most of the code that had accumulated here is now separated in modules.</p> ---ldx]]-- - local next, type, tostring = next, type, tostring local gsub = string.gsub local concat, remove = table.concat, table.remove local sortedhash, sortedkeys, swapped = table.sortedhash, table.sortedkeys, table.swapped ---[[ldx-- -<p>Access to nodes is what gives <l n='luatex'/> its power. Here we implement a -few helper functions. These functions are rather optimized.</p> ---ldx]]-- +-- Access to nodes is what gives LuaTeX its power. Here we implement a few helper +-- functions. These functions are rather optimized. nodes = nodes or { } local nodes = nodes diff --git a/tex/context/base/mkxl/node-res.lmt b/tex/context/base/mkxl/node-res.lmt index 6fed08b63..2d2c31965 100644 --- a/tex/context/base/mkxl/node-res.lmt +++ b/tex/context/base/mkxl/node-res.lmt @@ -10,11 +10,6 @@ local type, next, rawset = type, next, rawset local gmatch, format = string.gmatch, string.format local round = math.round ---[[ldx-- -<p>The next function is not that much needed but in <l n='context'/> we use -for debugging <l n='luatex'/> node management.</p> ---ldx]]-- - local nodes, node = nodes, node local report_nodes = logs.reporter("nodes","housekeeping") diff --git a/tex/context/base/mkxl/node-tra.lmt b/tex/context/base/mkxl/node-tra.lmt index 1ef1bb8ad..fe212f787 100644 --- a/tex/context/base/mkxl/node-tra.lmt +++ b/tex/context/base/mkxl/node-tra.lmt @@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['node-tra'] = { license = "see context related readme files" } ---[[ldx-- -<p>This is rather experimental. We need more control and some of this -might become a runtime module instead. This module will be cleaned up!</p> ---ldx]]-- +-- Some of the code here might become a runtime module instead. This old module will +-- be cleaned up anyway! local next = next local utfchar = utf.char diff --git a/tex/context/base/mkxl/pack-obj.lmt b/tex/context/base/mkxl/pack-obj.lmt index 1e22515b9..a18f5e7e7 100644 --- a/tex/context/base/mkxl/pack-obj.lmt +++ b/tex/context/base/mkxl/pack-obj.lmt @@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['pack-obj'] = { license = "see context related readme files" } ---[[ldx-- -<p>We save object references in the main utility table. jobobjects are -reusable components.</p> ---ldx]]-- +-- We save object references in the main utility table; job objects are reusable +-- components. local context = context local codeinjections = backends.codeinjections diff --git a/tex/context/base/mkxl/pack-rul.lmt b/tex/context/base/mkxl/pack-rul.lmt index 12d131c88..62a904901 100644 --- a/tex/context/base/mkxl/pack-rul.lmt +++ b/tex/context/base/mkxl/pack-rul.lmt @@ -7,10 +7,6 @@ if not modules then modules = { } end modules ['pack-rul'] = { license = "see context related readme files" } ---[[ldx-- -<p>An explanation is given in the history document <t>mk</t>.</p> ---ldx]]-- - -- we need to be careful with display math as it uses shifts -- \framed[align={lohi,middle}]{$x$} diff --git a/tex/context/base/mkxl/publ-ini.mkxl b/tex/context/base/mkxl/publ-ini.mkxl index b75a933ad..802768a8c 100644 --- a/tex/context/base/mkxl/publ-ini.mkxl +++ b/tex/context/base/mkxl/publ-ini.mkxl @@ -342,7 +342,7 @@ \newtoks\t_btx_cmd \newbox \b_btx_cmd -\t_btx_cmd{\global\setbox\b_btx_cmd\hpack{\clf_btxcmdstring}} +\t_btx_cmd{\global\setbox\b_btx_cmd\hbox{\clf_btxcmdstring}} % no \hpack, otherwise prerolling --- doesn't work \aliased\let\btxcmd\btxcommand diff --git a/tex/context/base/mkxl/regi-ini.lmt b/tex/context/base/mkxl/regi-ini.lmt index c0cd4f1c8..efacd5128 100644 --- a/tex/context/base/mkxl/regi-ini.lmt +++ b/tex/context/base/mkxl/regi-ini.lmt @@ -6,11 +6,8 @@ if not modules then modules = { } end modules ['regi-ini'] = { license = "see context related readme files" } ---[[ldx-- -<p>Regimes take care of converting the input characters into -<l n='utf'/> sequences. The conversion tables are loaded at -runtime.</p> ---ldx]]-- +-- Regimes take care of converting the input characters into UTF sequences. The +-- conversion tables are loaded at runtime. local tostring = tostring local utfchar = utf.char diff --git a/tex/context/base/mkxl/scrn-wid.lmt b/tex/context/base/mkxl/scrn-wid.lmt index caa09adbd..f2112aa11 100644 --- a/tex/context/base/mkxl/scrn-wid.lmt +++ b/tex/context/base/mkxl/scrn-wid.lmt @@ -42,8 +42,6 @@ interactions.linkedlists = linkedlists local texsetbox = tex.setbox -local jobpasses = job.passes - local texgetcount = tex.getcount local codeinjections = backends.codeinjections @@ -277,7 +275,24 @@ implement { } } --- Linkedlists (only a context interface) +-- Linkedlists (only a context interface) .. untested, just adapted from old code. + +local collected = allocate() +local tobesaved = allocate() + +local linkedlists = { + collected = collected, + tobesaved = tobesaved, +} + +job.linkedlists = linkedlists + +local function initializer() + collected = linkedlists.collected + tobesaved = linkedlists.tobesaved +end + +job.register("job.linkedlists.collected", tobesaved, initializer, nil) implement { name = "definelinkedlist", @@ -291,10 +306,12 @@ implement { name = "enhancelinkedlist", arguments = { "string", "integer" }, actions = function(tag,n) - local ll = jobpasses.gettobesaved(tag) - if ll then - ll[n] = texgetcount("realpageno") + local linkedlist = tobesaved[tag] + if not linkedlist then + linkedlist = { } + tobesaved[tag] = linkedlist end + linkedlist[n] = texgetcount("realpageno") end } @@ -302,15 +319,18 @@ implement { name = "addlinklistelement", arguments = "string", actions = function(tag) - local tobesaved = jobpasses.gettobesaved(tag) - local collected = jobpasses.getcollected(tag) or { } + local tobesaved = tobesaved[tag] or { } + local collected = collected[tag] or { } local currentlink = #tobesaved + 1 local noflinks = #collected - tobesaved[currentlink] = 0 + -- + tobesaved[currentlink] = 0 -- needs checking + -- local f = collected[1] or 0 local l = collected[noflinks] or 0 local p = collected[currentlink-1] or f local n = collected[currentlink+1] or l + -- context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l) -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end) end diff --git a/tex/context/base/mkxl/spac-pag.mkxl b/tex/context/base/mkxl/spac-pag.mkxl index d61ddcbe6..2e3e1bc00 100644 --- a/tex/context/base/mkxl/spac-pag.mkxl +++ b/tex/context/base/mkxl/spac-pag.mkxl @@ -16,7 +16,6 @@ \unprotect \newif \ifpagestatemismatch -\newinteger \realpagestateno \newconstant\frozenpagestate \permanent\protected\def\dotrackpagestate#1#2% diff --git a/tex/context/base/mkxl/strc-itm.lmt b/tex/context/base/mkxl/strc-itm.lmt index f9153c98e..4ee084ca3 100644 --- a/tex/context/base/mkxl/strc-itm.lmt +++ b/tex/context/base/mkxl/strc-itm.lmt @@ -6,20 +6,28 @@ if not modules then modules = { } end modules ['strc-itm'] = { license = "see context related readme files" } -local structures = structures -local itemgroups = structures.itemgroups -local jobpasses = job.passes - +local allocate = utilities.storage.allocate local implement = interfaces.implement -local setvariable = jobpasses.save -local getvariable = jobpasses.getfield - local texsetcount = tex.setcount local texsetdimen = tex.setdimen -local f_stamp = string.formatters["itemgroup:%s:%s"] -local counts = table.setmetatableindex("number") +local itemgroups = structures.itemgroups + +local collected = allocate() +local tobesaved = allocate() + +itemgroups.collected = collected +itemgroups.tobesaved = tobesaved + +local function initializer() + collected = itemgroups.collected + tobesaved = itemgroups.tobesaved +end + +if job then + job.register("structures.itemgroups.collected", tobesaved, initializer) +end local c_strc_itemgroups_max_items = tex.iscount("c_strc_itemgroups_max_items") local d_strc_itemgroups_max_width = tex.isdimen("d_strc_itemgroups_max_width") @@ -28,6 +36,8 @@ local d_strc_itemgroups_max_width = tex.isdimen("d_strc_itemgroups_max_width") -- an itemgroup which in turn makes for less passes when one itemgroup -- entry is added or removed. +local counts = table.setmetatableindex("number") + local trialtypesetting = context.trialtypesetting local function analyzeitemgroup(name,level) @@ -36,16 +46,37 @@ local function analyzeitemgroup(name,level) n = n + 1 counts[name] = n end - local stamp = f_stamp(name,n) - texsetcount(c_strc_itemgroups_max_items,getvariable(stamp,level,1,0)) - texsetdimen(d_strc_itemgroups_max_width,getvariable(stamp,level,2,0)) + local items = 0 + local width = 0 + local itemgroup = collected[name] + if itemgroup then + local entry = itemgroup[n] + if entry then + local l = entry[level] + if l then + items = l[1] or 0 + width = l[2] or 0 + end + end + end + texsetcount(c_strc_itemgroups_max_items,items) + texsetdimen(d_strc_itemgroups_max_width,width) end local function registeritemgroup(name,level,nofitems,maxwidth) local n = counts[name] if not trialtypesetting() then - -- no trialtypsetting - setvariable(f_stamp(name,n), { nofitems, maxwidth }, level) + local itemgroup = tobesaved[name] + if not itemgroup then + itemgroup = { } + tobesaved[name] = itemgroup + end + local entry = itemgroup[n] + if not entry then + entry = { } + itemgroup[n] = entry + end + entry[level] = { nofitems, maxwidth } elseif level == 1 then counts[name] = n - 1 end diff --git a/tex/context/base/mkxl/strc-lst.lmt b/tex/context/base/mkxl/strc-lst.lmt index b60b75208..d54129f29 100644 --- a/tex/context/base/mkxl/strc-lst.lmt +++ b/tex/context/base/mkxl/strc-lst.lmt @@ -1571,7 +1571,7 @@ end function lists.integrate(utilitydata) local filename = utilitydata.comment.file - if filename then + if filename and filename ~= environment.jobname then local structures = utilitydata.structures if structures then local lists = structures.lists.collected or { } diff --git a/tex/context/base/mkxl/strc-ref.lmt b/tex/context/base/mkxl/strc-ref.lmt index 26b189475..945364b18 100644 --- a/tex/context/base/mkxl/strc-ref.lmt +++ b/tex/context/base/mkxl/strc-ref.lmt @@ -561,7 +561,7 @@ end function references.integrate(utilitydata) local filename = utilitydata.comment.file - if filename then + if filename and filename ~= environment.jobname then -- lists are already internalized local structures = utilitydata.structures if structures then diff --git a/tex/context/base/mkxl/strc-reg.lmt b/tex/context/base/mkxl/strc-reg.lmt index b66b22921..27d7e2586 100644 --- a/tex/context/base/mkxl/strc-reg.lmt +++ b/tex/context/base/mkxl/strc-reg.lmt @@ -1045,6 +1045,7 @@ function registers.use(tag,filename,class,prefix) filename = filename, data = job.loadother(filename), prefix = prefix or class, + label = prefix or class, } end @@ -1054,13 +1055,43 @@ implement { actions = registers.use, } +-- function registers.use(tag,specification) +-- local class = specification.class +-- local filename = specification.filename +-- local prefix = specification.prefix or class +-- local label = specification.label or prefix +-- if class and filename then +-- used[tag] = { +-- class = class, +-- filename = filename, +-- data = job.loadother(filename), +-- prefix = prefix, +-- label = label, +-- } +-- end +-- end + +-- implement { +-- name = "useregister", +-- actions = registers.use, +-- arguments = { +-- "string", +-- { +-- { "filename" }, +-- { "class" }, +-- { "prefix" }, +-- { "label" }, +-- }, +-- } +-- } + implement { - name = "registerprefix", + name = "registerlabel", arguments = "string", actions = function(tag) local u = used[tag] if u then - context(u.prefix) + context(u.label) end end } @@ -1075,7 +1106,13 @@ local function analyzeregister(class,options) local list = utilities.parsers.settings_to_array(class) local entries = { } local nofentries = 0 - local metadata = false + local multiple = false + for i=1,#list do + if used[list[i]] then + multiple = true + break + end + end for i=1,#list do local l = list[i] local u = used[l] @@ -1089,9 +1126,14 @@ local function analyzeregister(class,options) end if d then local e = d.entries - local u = u and { u.prefix } or nil +-- local u = u and { u.prefix } or nil +local u = multiple and { string.formatters["%03i"](i) } or nil -- maybe prefix but then how about main for i=1,#e do local ei = e[i] +if multiple and ei.metadata.kind == "see" then + -- skip see, can become an option +else + nofentries = nofentries + 1 entries[nofentries] = ei if u then @@ -1099,6 +1141,7 @@ local function analyzeregister(class,options) eil[#eil+1] = u ei.external = l -- this is the (current) abstract tag, used for prefix end +end end if not metadata then metadata = d.metadata @@ -1107,9 +1150,11 @@ local function analyzeregister(class,options) end data = { metadata = metadata or { }, + multiple = multiple, entries = entries, } collected[class] = data + options.multiple = multiple end if data and data.entries then options = options or { } @@ -1322,7 +1367,9 @@ function registers.flush(data,options,prefixspec,pagespec) -- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1]) end end - if entry.external then +-- move up ? +-- if entry.external then + if options.multiple or entry.external then local list = entry.list list[#list] = nil end @@ -1741,7 +1788,7 @@ interfaces.implement { function registers.integrate(utilitydata) local filename = utilitydata.comment.file - if filename then + if filename and filename ~= environment.jobname then local structures = utilitydata.structures if structures then local registers = structures.registers.collected or { } diff --git a/tex/context/base/mkxl/strc-reg.mkxl b/tex/context/base/mkxl/strc-reg.mkxl index afe3d27a0..464ac4eb1 100644 --- a/tex/context/base/mkxl/strc-reg.mkxl +++ b/tex/context/base/mkxl/strc-reg.mkxl @@ -792,6 +792,25 @@ {\doifelsefiledefined{#1}{}{\usefile[#1][#2]}% \clf_useregister{#1}{#2}{#3}{#4}} +% \permanent\protected\tolerant\def\useregister[#1]#*[#2]#*[#3]#*[#4]% tag file class prefix +% {\begingroup +% \doifelsefiledefined{#1}{}{\usefile[#1][#2]}% +% \ifhastok={#4}% +% \getdummyparameters[\c!prefix=#1,\c!label=#1,#4]% +% \else +% \getdummyparameters[\c!prefix=#4,\c!label=#4]% +% \fi +% \clf_useregister +% {#1} +% { +% filename {#2} +% class {#3} +% prefix {\dummyparameter\c!prefix} +% label {\dummyparameter\c!label} +% } +% \relax +% \endgroup} + %D Character rendering (sections): \installcorenamespace{registerindicator} @@ -1123,15 +1142,16 @@ % todo: adapt \strc_references_goto_internal to take an extra argument, the ref \permanent\protected\def\withregisterpagecommand#1#2#3#4% #1:processor #2:internal #3:realpage #4:page - {\ifcase#3\relax - {\tt [entry\space not\space flushed]}% + {\begingroup + \ifcase#3\relax + \tt [entry\space not\space flushed]% \else \def\currentregisterpageindex{#2}% \def\currentregisterrealpage{#3}% \ifchknum\currentregisterpageindex\or \lettonothing\currentregisterpageprefix \else - \def\currentregisterpageprefix{\clf_registerprefix{\currentregisterpageindex}}% + \def\currentregisterpageprefix{\clf_registerlabel{\currentregisterpageindex}}% \fi \iflocation \ifempty\currentregisterpageprefix @@ -1144,7 +1164,8 @@ \setlocationattributes \fi \applyprocessor{#1}{\currentregisterpageprefix\registerparameter\c!pagecommand{#4}}% - \fi} + \fi + \endgroup} \lettonothing\m_current_register @@ -1281,7 +1302,7 @@ \ifchknum\currentregisterseeindex\or \lettonothing\currentregisterpageprefix \else - \def\currentregisterpageprefix{\clf_registerprefix{\currentregisterseeindex}}% + \def\currentregisterpageprefix{\clf_registerlabel{\currentregisterseeindex}}% \fi \iflocation \ifempty\currentregisterpageprefix diff --git a/tex/context/base/mkxl/tabl-ntb.mkxl b/tex/context/base/mkxl/tabl-ntb.mkxl index 6e95512cd..b82dcb585 100644 --- a/tex/context/base/mkxl/tabl-ntb.mkxl +++ b/tex/context/base/mkxl/tabl-ntb.mkxl @@ -1634,13 +1634,22 @@ % enabled per 2018-02-22 -\def\tabl_ntb_table_get_max_width_step - {\advanceby\scratchdimen\tabl_ntb_get_wid\fastloopindex - \advanceby\scratchdimen\tabl_ntb_get_dis\fastloopindex} +% \def\tabl_ntb_table_get_max_width_step +% {\advanceby\scratchdimen\tabl_ntb_get_wid\fastloopindex +% \advanceby\scratchdimen\tabl_ntb_get_dis\fastloopindex} +% +% \def\tabl_ntb_table_get_max_width +% {\scratchdimen\zeropoint +% \dofastloopcs\c_tabl_ntb_maximum_col\tabl_ntb_table_get_max_width_step +% \ifdim\scratchdimen<\wd\scratchbox\relax +% \scratchdimen\wd\scratchbox\relax +% \fi} \def\tabl_ntb_table_get_max_width {\scratchdimen\zeropoint - \dofastloopcs\c_tabl_ntb_maximum_col\tabl_ntb_table_get_max_width_step + \localcontrolledloop\zerocount\c_tabl_ntb_maximum_col\plusone + {\advanceby\scratchdimen\tabl_ntb_get_wid\currentloopiterator + \advanceby\scratchdimen\tabl_ntb_get_dis\currentloopiterator}% \ifdim\scratchdimen<\wd\scratchbox\relax \scratchdimen\wd\scratchbox\relax \fi} diff --git a/tex/context/base/mkxl/tabl-tbl.mkxl b/tex/context/base/mkxl/tabl-tbl.mkxl index d353074d5..6b5e38f3a 100644 --- a/tex/context/base/mkxl/tabl-tbl.mkxl +++ b/tex/context/base/mkxl/tabl-tbl.mkxl @@ -1608,13 +1608,19 @@ \tabl_tabulate_vrule_reset_indeed \fi} +% \def\tabl_tabulate_vrule_reset_indeed +% {\gletcsname\??tabulatevrule0\endcsname\undefined +% \dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step +% \global\c_tabl_tabulate_max_vrulecolumn\zerocount} +% +% \def\tabl_tabulate_vrule_reset_step % undefined or relax +% {\gletcsname\??tabulatevrule\the\fastloopindex\endcsname\undefined} + \def\tabl_tabulate_vrule_reset_indeed - {\dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step + {\localcontrolledloop\zerocount\c_tabl_tabulate_max_vrulecolumn\plusone % start at 0 + {\gletcsname\??tabulatevrule\the\currentloopiterator\endcsname\undefined}% \global\c_tabl_tabulate_max_vrulecolumn\zerocount} -\def\tabl_tabulate_vrule_reset_step % undefined or relax - {\gletcsname\??tabulatevrule\the\fastloopindex\endcsname\undefined} - \appendtoks \tabl_tabulate_vrule_reset \to \t_tabl_tabulate_every_after_row @@ -1798,11 +1804,16 @@ \tabl_tabulate_color_reset_indeed \fi} -\def\tabl_tabulate_color_reset_indeed - {\dofastloopcs\c_tabl_tabulate_max_colorcolumn\tabl_tabulate_color_reset_step} +% \def\tabl_tabulate_color_reset_indeed +% {\dofastloopcs\c_tabl_tabulate_max_colorcolumn\tabl_tabulate_color_reset_step} +% +% \def\tabl_tabulate_color_reset_step % undefined or empty? +% {\gletcsname\??tabulatecolor\number\fastloopindex\endcsname\undefined} -\def\tabl_tabulate_color_reset_step % undefined or empty? - {\gletcsname\??tabulatecolor\number\fastloopindex\endcsname\undefined} +\def\tabl_tabulate_color_reset_indeed + {\localcontrolledloop\zerocount\c_tabl_tabulate_max_colorcolumn\plusone % start at 1 + {\gletcsname\??tabulatecolor\the\currentloopiterator\endcsname\undefined}% + \global\c_tabl_tabulate_max_colorcolumn\zerocount} % why not like vrule? \appendtoks \tabl_tabulate_color_reset @@ -2201,34 +2212,38 @@ % {\glettonothing\tabl_tabulate_flush_collected_indeed % \global\c_tabl_tabulate_column\zerocount % \tabl_tabulate_pbreak_check +% \global\setfalse\c_tabl_tabulate_split_done % new 27/12/2022 % \dofastloopcs\c_tabl_tabulate_columns\tabl_tabulate_flush_second_step +% \ifconditional\c_tabl_tabulate_split_done\else +% \glet\tabl_tabulate_tm\s!reset % new 27/12/2022 +% \fi % \global\settrue\c_tabl_tabulate_firstflushed} -% + % \protected\def\tabl_tabulate_flush_second_step -% {\ifvoid\b_tabl_tabulate_current\fastloopindex\else +% {\ifvoid\b_tabl_tabulate_current\fastloopindex +% \else % \gdef\tabl_tabulate_flush_collected_indeed{\the\t_tabl_tabulate_dummy}% +% \ifvoid\b_tabl_tabulate_current\fastloopindex \else +% \global\settrue\c_tabl_tabulate_split_done % new 27/12/2022 +% \fi % \fi} -% -% \def\tabl_tabulate_flush_second -% {\noalign{\tabl_tabulate_flush_second_indeed}% -% \tabl_tabulate_flush_collected_indeed} \protected\def\tabl_tabulate_flush_second_indeed {\glettonothing\tabl_tabulate_flush_collected_indeed \global\c_tabl_tabulate_column\zerocount \tabl_tabulate_pbreak_check \global\setfalse\c_tabl_tabulate_split_done % new 27/12/2022 - \dofastloopcs\c_tabl_tabulate_columns\tabl_tabulate_flush_second_step + \localcontrolledloop\plusone\c_tabl_tabulate_columns\plusone{\tabl_tabulate_flush_second_step}% \ifconditional\c_tabl_tabulate_split_done\else \glet\tabl_tabulate_tm\s!reset % new 27/12/2022 \fi \global\settrue\c_tabl_tabulate_firstflushed} \protected\def\tabl_tabulate_flush_second_step - {\ifvoid\b_tabl_tabulate_current\fastloopindex + {\ifvoid\b_tabl_tabulate_current\currentloopiterator \else \gdef\tabl_tabulate_flush_collected_indeed{\the\t_tabl_tabulate_dummy}% - \ifvoid\b_tabl_tabulate_current\fastloopindex \else + \ifvoid\b_tabl_tabulate_current\currentloopiterator \else \global\settrue\c_tabl_tabulate_split_done % new 27/12/2022 \fi \fi} @@ -3262,7 +3277,7 @@ %\letcsname\??tabulatespana r\endcsname\relax \noaligned\tolerant\def\tabl_tabulate_NS[#1]#*[#2]% - {\NC\loopcs{#1}\tabl_tabulate_span + {\NC\loopcs{#1}\tabl_tabulate_span % use localloop and quit \gdef\tabl_tabulate_kooh {\begincsname\??tabulatespana#2\endcsname \glet\tabl_tabulate_kooh\relax}% diff --git a/tex/context/base/mkxl/trac-vis.lmt b/tex/context/base/mkxl/trac-vis.lmt index dddb4799d..c9b68b407 100644 --- a/tex/context/base/mkxl/trac-vis.lmt +++ b/tex/context/base/mkxl/trac-vis.lmt @@ -1946,7 +1946,7 @@ do head, current = ruledkern(head,current,vertical) end end - goto next; + goto next ::list:: if id == hlist_code then local content = getlist(current) diff --git a/tex/context/base/mkxl/typo-cln.lmt b/tex/context/base/mkxl/typo-cln.lmt new file mode 100644 index 000000000..469859162 --- /dev/null +++ b/tex/context/base/mkxl/typo-cln.lmt @@ -0,0 +1,109 @@ +if not modules then modules = { } end modules ['typo-cln'] = { + version = 1.001, + comment = "companion to typo-cln.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- This quick and dirty hack took less time than listening to a CD (In +-- this case Dream Theaters' Octavium). Of course extensions will take +-- more time. + +-- This feature is probably never used so we can get rid of it. + +local tonumber = tonumber +local utfbyte = utf.byte + +local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end) +local trace_autocase = false trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end) + +local report_cleaners = logs.reporter("nodes","cleaners") +local report_autocase = logs.reporter("nodes","autocase") + +typesetters.cleaners = typesetters.cleaners or { } +local cleaners = typesetters.cleaners + +local variables = interfaces.variables + +local nodecodes = nodes.nodecodes + +local enableaction = nodes.tasks.enableaction + +local texsetattribute = tex.setattribute + +local nuts = nodes.nuts + +local getattr = nuts.getattr +local setattr = nuts.setattr + +local setchar = nuts.setchar + +local nextglyph = nuts.traversers.glyph + +local unsetvalue = attributes.unsetvalue + +local glyph_code = nodecodes.glyph +local uccodes = characters.uccodes + +local a_cleaner = attributes.private("cleaner") + +local resetter = { -- this will become an entry in char-def + [utfbyte(".")] = true +} + +-- Contrary to the casing code we need to keep track of a state. +-- We could extend the casing code with a status tracker but on +-- the other hand we might want to apply casing afterwards. So, +-- cleaning comes first. + +function cleaners.handler(head) + local inline = false + for n, char, font in nextglyph, head do + if resetter[char] then + inline = false + elseif not inline then + local a = getattr(n,a_cleaner) + if a == 1 then -- currently only one cleaner so no need to be fancy + local upper = uccodes[char] + if type(upper) == "table" then + -- some day, not much change that \SS ends up here + else + setchar(n,upper) + if trace_autocase then + report_autocase("") + end + end + end + inline = true + end + end + return head +end + +-- see typo-cap for a more advanced settings handler .. not needed now + +local enabled = false + +function cleaners.set(n) + if n == variables.reset or not tonumber(n) or n == 0 then + texsetattribute(a_cleaner,unsetvalue) + else + if not enabled then + enableaction("processors","typesetters.cleaners.handler") + if trace_cleaners then + report_cleaners("enabling cleaners") + end + enabled = true + end + texsetattribute(a_cleaner,tonumber(n)) + end +end + +-- interface + +interfaces.implement { + name = "setcharactercleaning", + actions = cleaners.set, + arguments = "string" +} diff --git a/tex/context/base/mkxl/typo-cln.mkxl b/tex/context/base/mkxl/typo-cln.mkxl index 84fc1d235..fba9d4ab8 100644 --- a/tex/context/base/mkxl/typo-cln.mkxl +++ b/tex/context/base/mkxl/typo-cln.mkxl @@ -15,7 +15,7 @@ \unprotect -\registerctxluafile{typo-cln}{} +\registerctxluafile{typo-cln}{autosuffix} \definesystemattribute[cleaner][public] diff --git a/tex/context/base/mkxl/typo-dha.lmt b/tex/context/base/mkxl/typo-dha.lmt new file mode 100644 index 000000000..e1a6662c4 --- /dev/null +++ b/tex/context/base/mkxl/typo-dha.lmt @@ -0,0 +1,481 @@ +if not modules then modules = { } end modules ['typo-dha'] = { + version = 1.001, + comment = "companion to typo-dir.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +-- Some analysis by Idris: +-- +-- 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing; +-- 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order; +-- 3. Assuming that 'BARA' represent the correct RL word order; +-- +-- Then we have, with input: LATIN ARAB +-- +-- \textdirection 1 LATIN ARAB => LATIN BARA +-- \textdirection 1 LATIN ARAB => LATIN BARA +-- \textdirection 1 LRO LATIN ARAB => LATIN ARAB +-- \textdirection 1 LRO LATIN ARAB => LATIN ARAB +-- \textdirection 1 RLO LATIN ARAB => NITAL ARAB +-- \textdirection 1 RLO LATIN ARAB => NITAL ARAB + +-- elseif d == "es" then -- European Number Separator +-- elseif d == "et" then -- European Number Terminator +-- elseif d == "cs" then -- Common Number Separator +-- elseif d == "nsm" then -- Non-Spacing Mark +-- elseif d == "bn" then -- Boundary Neutral +-- elseif d == "b" then -- Paragraph Separator +-- elseif d == "s" then -- Segment Separator +-- elseif d == "ws" then -- Whitespace +-- elseif d == "on" then -- Other Neutrals + +-- todo : use new dir functions +-- todo : make faster +-- todo : move dir info into nodes +-- todo : swappable tables and floats i.e. start-end overloads (probably loop in builders) + +-- I removed the original tracing code and now use the colorful one. If I ever want to change +-- something I will just inject prints for tracing. + +local nodes, node = nodes, node + +local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end) + +local report_directions = logs.reporter("typesetting","text directions") + +local nuts = nodes.nuts + +local getnext = nuts.getnext +local getprev = nuts.getprev +local getchar = nuts.getchar +local getid = nuts.getid +local getsubtype = nuts.getsubtype +local getlist = nuts.getlist +local getattr = nuts.getattr +local getprop = nuts.getprop +local getdirection = nuts.getdirection +local isglyph = nuts.isglyph -- or ischar + +local setprop = nuts.setprop +local setstate = nuts.setstate +local setchar = nuts.setchar + +local insertnodebefore = nuts.insertbefore +local insertnodeafter = nuts.insertafter +local remove_node = nuts.remove +local endofmath = nuts.endofmath + +local startofpar = nuts.startofpar + +local nodepool = nuts.pool + +local nodecodes = nodes.nodecodes +local gluecodes = nodes.gluecodes + +local glyph_code = nodecodes.glyph +local math_code = nodecodes.math +local kern_code = nodecodes.kern +local glue_code = nodecodes.glue +local dir_code = nodecodes.dir +local par_code = nodecodes.par + +local dirvalues = nodes.dirvalues +local lefttoright_code = dirvalues.lefttoright +local righttoleft_code = dirvalues.righttoleft + +local parfillskip_code = gluecodes.parfillskip + +local new_direction = nodepool.direction + +local insert = table.insert + +local fonthashes = fonts.hashes +local fontchar = fonthashes.characters + +local chardirections = characters.directions +local charmirrors = characters.mirrors +local charclasses = characters.textclasses + +local directions = typesetters.directions +local setcolor = directions.setcolor +local getglobal = directions.getglobal + +local a_directions = attributes.private('directions') + +local strip = false + +local s_isol = fonts.analyzers.states.isol + +local function stopdir(finish) -- we could use finish directly + local n = new_direction(finish == righttoleft_code and righttoleft_code or lefttoright_code,true) + setprop(n,"direction",true) + return n +end + +local function startdir(finish) -- we could use finish directly + local n = new_direction(finish == righttoleft_code and righttoleft_code or lefttoright_code) + setprop(n,"direction",true) + return n +end + +local function nextisright(current) + current = getnext(current) + local character, id = isglyph(current) + if character then + local direction = chardirections[character] + return direction == "r" or direction == "al" or direction == "an" + end +end + +local function previsright(current) + current = getprev(current) + local character, id = isglyph(current) + if character then + local direction = chardirections[character] + return direction == "r" or direction == "al" or direction == "an" + end +end + +local function process(start) + + local head = start + local current = head + local autodir = 0 + local embedded = 0 + local override = 0 + local pardir = 0 + local textdir = 0 + local done = false + local stack = { } + local top = 0 + local obsolete = { } + local rlo = false + local lro = false + local prevattr = false + local fences = { } + + while current do + -- no isglyph here as we test for skips first + local id = getid(current) + local next = getnext(current) + if id == math_code then + current = getnext(endofmath(next)) + elseif getprop(current,"direction") then + -- this handles unhbox etc + current = next + else + local attr = getattr(current,a_directions) + if attr and attr > 0 then + if attr ~= prevattr then + if not getglobal(a) then + lro = false + rlo = false + end + prevattr = attr + end + end + local prop = true + if id == glyph_code then + if attr and attr > 0 then + local character, font = isglyph(current) + if character == 0 then + -- skip signals + -- setprop(current,"direction",true) + else + local direction = chardirections[character] + local reversed = false + if rlo or override > 0 then + if direction == "l" then + direction = "r" + reversed = true + end + elseif lro or override < 0 then + if direction == "r" or direction == "al" then + setstate(current,s_isol) -- hm + direction = "l" + reversed = true + end + end + if direction == "on" then + local mirror = charmirrors[character] + if mirror and fontchar[font][mirror] then + local class = charclasses[character] + if class == "open" then + if nextisright(current) then + setchar(current,mirror) + -- setprop(current,"direction","r") + prop = "r" + elseif autodir < 0 then + setchar(current,mirror) + -- setprop(current,"direction","r") + prop = "r" + else + mirror = false + -- setprop(current,"direction","l") + prop = "l" + end + local fencedir = autodir == 0 and textdir or autodir + fences[#fences+1] = fencedir + elseif class == "close" and #fences > 0 then + local fencedir = fences[#fences] + fences[#fences] = nil + if fencedir < 0 then + setchar(current,mirror) + -- setprop(current,"direction","r") + prop = "r" + else + -- setprop(current,"direction","l") + prop = "l" + mirror = false + end + elseif autodir < 0 then + setchar(current,mirror) + -- setprop(current,"direction","r") + prop = "r" + else + -- setprop(current,"direction","l") + prop = "l" + mirror = false + end + else + -- setprop(current,"direction",true) + end + if trace_directions then + setcolor(current,direction,false,mirror) + end + elseif direction == "l" then + if trace_directions then + setcolor(current,"l",reversed) + end + -- setprop(current,"direction","l") + prop = "l" + elseif direction == "r" then + if trace_directions then + setcolor(current,"r",reversed) + end + -- setprop(current,"direction","r") + prop = "r" + elseif direction == "en" then -- european number + if trace_directions then + setcolor(current,"l") + end + -- setprop(current,"direction","l") + prop = "l" + elseif direction == "al" then -- arabic letter + if trace_directions then + setcolor(current,"r") + end + -- setprop(current,"direction","r") + prop = "r" + elseif direction == "an" then -- arabic number + -- needs a better scanner as it can be a float + if trace_directions then + setcolor(current,"l") -- was r + end + -- setprop(current,"direction","n") -- was r + prop = "n" + elseif direction == "lro" then -- Left-to-Right Override -> right becomes left + top = top + 1 + stack[top] = { override, embedded } + override = -1 + obsolete[#obsolete+1] = current + goto obsolete + elseif direction == "rlo" then -- Right-to-Left Override -> left becomes right + top = top + 1 + stack[top] = { override, embedded } + override = 1 + obsolete[#obsolete+1] = current + goto obsolete + elseif direction == "lre" then -- Left-to-Right Embedding -> lefttoright_code + top = top + 1 + stack[top] = { override, embedded } + embedded = 1 + obsolete[#obsolete+1] = current + goto obsolete + elseif direction == "rle" then -- Right-to-Left Embedding -> righttoleft_code + top = top + 1 + stack[top] = { override, embedded } + embedded = -1 + obsolete[#obsolete+1] = current + goto obsolete + elseif direction == "pdf" then -- Pop Directional Format + if top > 0 then + local s = stack[top] + override = s[1] + embedded = s[2] + top = top - 1 + else + override = 0 + embedded = 0 + end + obsolete[#obsolete+1] = current + goto obsolete + elseif trace_directions then + setcolor(current) + -- setprop(current,"direction",true) + else + -- setprop(current,"direction",true) + end + end + else + -- setprop(current,"direction",true) + end + elseif id == glue_code then + if getsubtype(current) == parfillskip_code then + -- setprop(current,"direction","!") + prop = "!" + else + -- setprop(current,"direction","g") + prop = "g" + end + elseif id == kern_code then + -- setprop(current,"direction","k") + prop = "k" + elseif id == dir_code then + local direction, pop = getdirection(current) + if direction == righttoleft_code then + if not pop then + autodir = -1 + elseif embedded and embedded~= 0 then + autodir = embedded + else + autodir = 0 + end + elseif direction == lefttoright_code then + if not pop then + autodir = 1 + elseif embedded and embedded~= 0 then + autodir = embedded + else + autodir = 0 + end + end + textdir = autodir + -- setprop(current,"direction",true) + elseif id == par_code and startofpar(current) then + local direction = getdirection(current) + if direction == righttoleft_code then + autodir = -1 + elseif direction == lefttoright_code then + autodir = 1 + end + pardir = autodir + textdir = pardir + -- setprop(current,"direction",true) + else + -- setprop(current,"direction",true) + end + setprop(current,"direction",prop) + ::obsolete:: + current = next + end + end + + -- todo: track if really needed + -- todo: maybe we need to set the property (as it can be a copied list) + + if done and strip then + local n = #obsolete + if n > 0 then + for i=1,n do + remove_node(head,obsolete[i],true) + end + if trace_directions then + report_directions("%s character nodes removed",n) + end + end + end + + local state = false + local last = false + local collapse = true + current = head + + -- todo: textdir + -- todo: inject before parfillskip + + while current do + local id = getid(current) + if id == math_code then + -- todo: this might be tricky nesting + current = getnext(endofmath(getnext(current))) + else + local cp = getprop(current,"direction") + if cp == "n" then + local swap = state == "r" + if swap then + head = insertnodebefore(head,current,startdir(lefttoright_code)) + end + setprop(current,"direction",true) + while true do + local n = getnext(current) + if n and getprop(n,"direction") == "n" then + current = n + setprop(current,"direction",true) + else + break + end + end + if swap then + head, current = insertnodeafter(head,current,stopdir(lefttoright_code)) + end + elseif cp == "l" then + if state ~= "l" then + if state == "r" then + head = insertnodebefore(head,last or current,stopdir(righttoleft_code)) + end + head = insertnodebefore(head,current,startdir(lefttoright_code)) + state = "l" + done = true + end + last = false + elseif cp == "r" then + if state ~= "r" then + if state == "l" then + head = insertnodebefore(head,last or current,stopdir(lefttoright_code)) + end + head = insertnodebefore(head,current,startdir(righttoleft_code)) + state = "r" + done = true + end + last = false + elseif collapse then + if cp == "k" or cp == "g" then + last = last or current + else + last = false + end + else + if state == "r" then + head = insertnodebefore(head,current,stopdir(righttoleft_code)) + elseif state == "l" then + head = insertnodebefore(head,current,stopdir(lefttoright_code)) + end + state = false + last = false + end + setprop(current,"direction",true) + end + local next = getnext(current) + if next then + current = next + else + local sd = (state == "r" and stopdir(righttoleft_code)) or (state == "l" and stopdir(lefttoright_code)) + if sd then + if id == glue_code and getsubtype(current) == parfillskip_code then + head = insertnodebefore(head,current,sd) + else + head = insertnodeafter(head,current,sd) + end + end + break + end + end + + return head + +end + +directions.installhandler(interfaces.variables.default,process) diff --git a/tex/context/base/mkxl/typo-dir.mkxl b/tex/context/base/mkxl/typo-dir.mkxl index a5a4bc568..d9937ce73 100644 --- a/tex/context/base/mkxl/typo-dir.mkxl +++ b/tex/context/base/mkxl/typo-dir.mkxl @@ -19,9 +19,7 @@ \unprotect \registerctxluafile{typo-dir}{autosuffix} -\registerctxluafile{typo-dha}{} -%registerctxluafile{typo-dua}{} -%registerctxluafile{typo-dub}{} +\registerctxluafile{typo-dha}{autosuffix} \registerctxluafile{typo-duc}{autosuffix} \definesystemattribute[directions][public,pickup] diff --git a/tex/context/base/mkxl/typo-prc.mklx b/tex/context/base/mkxl/typo-prc.mklx index f2df32986..f9a8f8e5e 100644 --- a/tex/context/base/mkxl/typo-prc.mklx +++ b/tex/context/base/mkxl/typo-prc.mklx @@ -54,6 +54,8 @@ \installcommandhandler \??processor {processor} \??processor +\mutable\let\currentprocessor\empty % weird that this is needed + \appendtoks \letcsname\??processorcheck\currentprocessor\endcsname\relax \clf_registerstructureprocessor{\currentprocessor}% global, but it permits using processor that are yet undefined |