@@ -639,8 +614,8 @@
\fi\fi\fi
\next}
-%D For very obscure applications (see for an application \type
-%D {lang-sla.tex}) we provide:
+%D For very obscure applications (see for an application \type {lang-sla.tex}) we
+%D provide:
\def\simplifiedcompoundcharacter#1#2%
{\ifcsname\??compoundsingle\string#1\string#2\endcsname
@@ -652,8 +627,8 @@
%D \macros
%D {disablediscretionaries,disablecompoundcharacter}
%D
-%D Occasionally we need to disable this mechanism. For the
-%D moment we assume that \type {|} is used.
+%D Occasionally we need to disable this mechanism. For the moment we assume that
+%D \type {|} is used.
\let\disablediscretionaries \ignorediscretionaries
\let\disablecompoundcharacters\ignorecompoundcharacter
@@ -668,9 +643,8 @@
%D \macros
%D {compound}
%D
-%D We will overload the already active \type {|} so we have
-%D to save its meaning in order to be able to use this handy
-%D macro.
+%D We will overload the already active \type {|} so we have to save its meaning in
+%D order to be able to use this handy macro.
%D
%D \starttyping
%D so test\compound{}test can be used instead of test||test
@@ -688,8 +662,7 @@
\egroup
-%D Here we hook some code into the clean up mechanism needed
-%D for verbatim data.
+%D Here we hook some code into the clean up mechanism needed for verbatim data.
\appendtoks
\disablecompoundcharacters
diff --git a/tex/context/base/lang-rep.lua b/tex/context/base/lang-rep.lua
index 31ae36e6d..95a5e545a 100644
--- a/tex/context/base/lang-rep.lua
+++ b/tex/context/base/lang-rep.lua
@@ -7,9 +7,21 @@ if not modules then modules = { } end modules ['lang-rep'] = {
}
-- A BachoTeX 2013 experiment, probably not that useful. Eventually I used a simpler
--- more generic example.
+-- more generic example. I'm sure no one ever notices of even needs this code.
+--
+-- As a follow up on a question by Alan about special treatment of dropped caps I wonder
+-- if I can make this one more clever (probably in a few more dev steps). For instance
+-- injecting nodes or replacing nodes. It's a prelude to a kind of lpeg for nodes,
+-- although (given experiences so far) we don't really need that. After all, each problem
+-- is somewhat unique.
+local type, tonumber = type, tonumber
local utfbyte, utfsplit = utf.byte, utf.split
+local P, C, U, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.match
+local find = string.find
+
+local grouped = P("{") * ( Ct((U/utfbyte-P("}"))^1) + Cc(false) ) * P("}")-- grouped
+local splitter = Ct((Ct(Cc("discretionary") * grouped * grouped * grouped) + U/utfbyte)^1)
local trace_replacements = false trackers.register("languages.replacements", function(v) trace_replacements = v end)
local trace_detail = false trackers.register("languages.replacements.detail", function(v) trace_detail = v end)
@@ -18,15 +30,34 @@ local report_replacement = logs.reporter("languages","replacements")
local glyph_code = nodes.nodecodes.glyph
-local insert_node_before = nodes.insert_before
-local remove_node = nodes.remove
-local copy_node = nodes.copy
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local flush_list = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local nodepool = nuts.pool
+local new_glyph = nodepool.glyph
+local new_disc = nodepool.disc
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local implement = interfaces.implement
+
local replacements = languages.replacements or { }
languages.replacements = replacements
@@ -46,23 +77,32 @@ table.setmetatableindex(lists,function(lists,name)
return data
end)
+lists[v_reset].attribute = unsetvalue -- so we discard 0
+
+-- todo: glue kern
+
local function add(root,word,replacement)
local list = utfsplit(word,true)
- for i=1,#list do
+ local size = #list
+ for i=1,size do
local l = utfbyte(list[i])
if not root[l] then
root[l] = { }
end
- if i == #list then
- local newlist = utfsplit(replacement,true)
- for i=1,#newlist do
- newlist[i] = utfbyte(newlist[i])
- end
+ if i == size then
+ -- local newlist = utfsplit(replacement,true)
+ -- for i=1,#newlist do
+ -- newlist[i] = utfbyte(newlist[i])
+ -- end
+ local special = find(replacement,"{",1,true)
+ local newlist = lpegmatch(splitter,replacement)
+ --
root[l].final = {
word = word,
replacement = replacement,
- oldlength = #list,
+ oldlength = size,
newcodes = newlist,
+ special = special,
}
end
root = root[l]
@@ -83,13 +123,13 @@ end
local function hit(a,head)
local tree = trees[a]
if tree then
- local root = tree[head.char]
+ local root = tree[getchar(head)]
if root then
- local current = head.next
+ local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and current.id == glyph_code do
- local newroot = root[current.char]
+ while current and getid(current) == glyph_code do
+ local newroot = root[getchar(current)]
if not newroot then
return lastrun, lastfinal
else
@@ -104,7 +144,7 @@ local function hit(a,head)
root = newroot
end
end
- current = current.next
+ current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
@@ -113,11 +153,27 @@ local function hit(a,head)
end
end
+local function tonodes(list,template)
+ local head, current
+ for i=1,#list do
+ local new = copy_node(template)
+ setfield(new,"char",list[i])
+ if head then
+ head, current = insert_after(head,current,new)
+ else
+ head, current = new, new
+ end
+ end
+ return head
+end
+
+
function replacements.handler(head)
+ head = tonut(head)
local current = head
local done = false
while current do
- if current.id == glyph_code then
+ if getid(current) == glyph_code then
local a = getattr(current,a_replacements)
if a then
local last, final = hit(a,current)
@@ -125,46 +181,90 @@ function replacements.handler(head)
local oldlength = final.oldlength
local newcodes = final.newcodes
local newlength = #newcodes
- if report_replacement then
+ if trace_replacement then
report_replacement("replacing word %a by %a",final.word,final.replacement)
end
- if oldlength == newlength then -- #old == #new
+ if final.special then
+ -- easier is to delete and insert (a simple callout to tex would be more efficient)
+ -- maybe just walk over a replacement string instead
+ local prev = getprev(current)
+ local next = getnext(last)
+ local list = current
+ setfield(last,"next",nil)
+ setfield(prev,"next",next)
+ if next then
+ setfield(next,"prev",prev)
+ end
+ current = prev
+ if not current then
+ head = nil
+ end
+ for i=1,newlength do
+ local codes = newcodes[i]
+ local new = nil
+ if type(codes) == "table" then
+ local method = codes[1]
+ if method == "discretionary" then
+ local pre, post, replace = codes[2], codes[3], codes[4]
+ new = new_disc()
+ if pre then
+ setfield(new,"pre",tonodes(pre,last))
+ end
+ if post then
+ setfield(new,"post",tonodes(post,last))
+ end
+ if replace then
+ setfield(new,"replace",tonodes(replace,last))
+ end
+ else
+ -- todo
+ end
+ else
+ new = copy_node(last)
+ setfield(new,"char",codes)
+ end
+ if new then
+ head, current = insert_after(head,current,new)
+ end
+ end
+ flush_list(list)
+ elseif oldlength == newlength then -- #old == #new
for i=1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
elseif oldlength < newlength then -- #old < #new
for i=1,newlength-oldlength do
local n = copy_node(current)
- n.char = newcodes[i]
+ setfield(n,"char",newcodes[i])
head, current = insert_node_before(head,current,n)
- current = current.next
+ current = getnext(current)
end
for i=newlength-oldlength+1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
else -- #old > #new
for i=1,oldlength-newlength do
head, current = remove_node(head,current,true)
end
for i=1,newlength do
- current.char = newcodes[i]
- current = current.next
+ setfield(current,"char",newcodes[i])
+ current = getnext(current)
end
end
done = true
end
end
end
- current = current.next
+ current = getnext(current)
end
- return head, done
+ return tonode(head), done
end
local enabled = false
-function replacements.set(n) -- number or 'reset'
+function replacements.set(n)
if n == v_reset then
n = unsetvalue
else
@@ -182,8 +282,14 @@ end
-- interface
-commands.setreplacements = replacements.set
-commands.addreplacements = replacements.add
+implement {
+ name = "setreplacements",
+ actions = replacements.set,
+ arguments = "string"
+}
-nodes.tasks.prependaction("processors","words","languages.replacements.handler")
-nodes.tasks.disableaction("processors","languages.replacements.handler")
+implement {
+ name = "addreplacements",
+ actions = replacements.add,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/lang-rep.mkiv b/tex/context/base/lang-rep.mkiv
new file mode 100644
index 000000000..a98d51f6c
--- /dev/null
+++ b/tex/context/base/lang-rep.mkiv
@@ -0,0 +1,75 @@
+%D \module
+%D [ file=lang-rep,
+%D version=2013.04.28,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Substitution,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D As I needed an example of messing with nodes for the bacho\TEX\ tutorial
+%D I cooked up this. In the end I decided to stick to a simpler example and
+%D just finished this off in case someone really needs it.
+
+\writestatus{loading}{ConTeXt Language Macros / Replacements}
+
+\unprotect
+
+\registerctxluafile{lang-rep}{1.001}
+
+\definesystemattribute[replacements][public]
+
+%D \startluacode
+%D
+%D -- todo: other nodes (prelude to more experiments with auto dropped caps)
+%D
+%D languages.replacements.add("basics", {
+%D ["aap"] = "monkey",
+%D ["noot"] = "nut",
+%D ["never"] = "forever",
+%D ["newer"] = "cooler",
+%D ["new"] = "cool",
+%D -- ["special"] = "veryspe{>>>}{<<<}{=}cial",
+%D })
+%D
+%D \stopluacode
+%D
+%D \replaceword[more][this][that]
+%D \replaceword[more][crap][support]
+%D \replaceword[more][---][—]
+%D \replaceword[basics][special][veryspe{>>>}{<<<}{=}cial]
+%D
+%D \starttyping
+%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \par \stop
+%D \start \setreplacements[more] Do we --- {\it really} --- need this kind of crap? \par \stop
+%D \start \setreplacements[basics] All kinds of special thingies! \par \stop
+%D \start \setreplacements[basics] \hsize1mm special \par \stop
+%D \stoptyping
+
+\unexpanded\def\setreplacements[#1]%
+ {\clf_setreplacements{#1}}
+
+\unexpanded\def\resetreplacements
+ {\attribute\replacementsattribute\attributeunsetvalue}
+
+\unexpanded\def\replaceword
+ {\dotripleargument\languages_replacements_replace}
+
+\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
+ {\ifthirdargument
+ \clf_addreplacements{#1}{#2}{#3}%
+ \fi}
+
+\appendtoks
+ \resetreplacements
+\to \everyresettypesetting
+
+\appendtoks
+ \resetreplacements
+\to \everyinitializeverbatim
+
+\protect \endinput
diff --git a/tex/context/base/lang-txt.lua b/tex/context/base/lang-txt.lua
index 4c3a3a985..2938550ee 100644
--- a/tex/context/base/lang-txt.lua
+++ b/tex/context/base/lang-txt.lua
@@ -181,6 +181,11 @@ data.labels={
sk="cotg",
},
},
+ diff={
+ labels={
+ en="d",
+ },
+ },
deg={
labels={
cz="deg",
@@ -415,6 +420,27 @@ data.labels={
},
},
texts={
+ ["year"]={
+ labels={
+ en="year",
+ nl="jaar",
+ kr="년",
+ },
+ },
+ ["month"]={
+ labels={
+ en="month",
+ nl="maand",
+ kr="월",
+ },
+ },
+ ["day"]={
+ labels={
+ en="day",
+ nl="dag",
+ kr="일",
+ },
+ },
["and"]={
labels={
af="",
@@ -506,7 +532,7 @@ data.labels={
hu="április",
it="aprile",
ja="4",
- kr="4월",
+ kr="4",
la="Aprilis",
lt="balandžio",
nb="april",
@@ -613,7 +639,7 @@ data.labels={
hu="augusztus",
it="agosto",
ja="8",
- kr="8월",
+ kr="8",
la="Augustus",
lt="rugpjūčio",
nb="august",
@@ -681,7 +707,7 @@ data.labels={
fr="Chapitre ",
gr="Κεφάλαιο",
hr="Poglavlje ",
- hu=",. fejezet:",
+ hu={""," fejezet"},
it="",
ja={"第","章"},
kr={"제","장"},
@@ -756,7 +782,7 @@ data.labels={
hu="december",
it="dicembre",
ja="12",
- kr="12월",
+ kr="12",
la="December",
lt="gruodžio",
nb="desember",
@@ -828,7 +854,7 @@ data.labels={
hu="február",
it="febbraio",
ja="2",
- kr="2월",
+ kr="2",
la="Februarius",
lt="vasario",
nb="februar",
@@ -895,12 +921,12 @@ data.labels={
fr="Figure ",
gr="Σχήμα",
hr="Slika ",
- hu=",. ábra:",
+ hu={""," ábra"},
it="Fig. ",
ja="図",
- kr="그림",
+ kr="그림 ",
la="Imago ",
- lt=", pav.",
+ lt={""," pav."},
nb="Figur ",
nl="Figuur ",
nn="Figur ",
@@ -969,7 +995,7 @@ data.labels={
fr="Illustration ",
gr="Γραφικό",
hr="Slika ",
- hu=",. kép:",
+ hu={""," kép"},
it="Grafico ",
ja="イラスト",
la="Typus ",
@@ -990,6 +1016,18 @@ data.labels={
vi="Đồ thị",
},
},
+ precedingpage={
+ labels={
+ en="on a preceding page",
+ nl="op een voorgaande bladzijde",
+ },
+ },
+ followingpage={
+ labels={
+ en="on a following page",
+ nl="op een volgende bladzijde",
+ },
+ },
hencefore={
labels={
af="",
@@ -1041,6 +1079,7 @@ data.labels={
hr="vidi ispod",
hu="lásd lejjebb",
it="come mostrato sotto",
+ kr="이후로",
la="",
lt="kaip parodyta žemiau",
nb="som vist under",
@@ -1074,7 +1113,7 @@ data.labels={
fr="Intermède ",
gr="Παύση",
hr="Intermeco ",
- hu=",. intermezzo:",
+ hu={""," intermezzo"},
it="Intermezzo ",
ja="間奏曲",
kr="간주곡",
@@ -1116,7 +1155,7 @@ data.labels={
hu="január",
it="gennaio",
ja="1",
- kr="1월",
+ kr="1",
la="Ianuarius",
lt="sausio",
nb="januar",
@@ -1189,7 +1228,7 @@ data.labels={
hu="július",
it="luglio",
ja="7",
- kr="7월",
+ kr="7",
la="Iulius",
lt="liepos",
nb="juli",
@@ -1261,7 +1300,7 @@ data.labels={
hu="június",
it="giugno",
ja="6",
- kr="6월",
+ kr="6",
la="Iunius",
lt="birželio",
nb="juni",
@@ -1328,7 +1367,7 @@ data.labels={
fr="ligne ",
gr="Γραμμή",
hr="redak ",
- hu=",. sor:",
+ hu={""," sor"},
it="riga ",
ja="線",
kr="행",
@@ -1406,7 +1445,7 @@ data.labels={
hu="március",
it="marzo",
ja="3",
- kr="3월",
+ kr="3",
la="Martius",
lt="kovo",
nb="mars",
@@ -1479,7 +1518,7 @@ data.labels={
hu="május",
it="maggio",
ja="5",
- kr="5월",
+ kr="5",
la="Maius",
lt="gegužės",
nb="mai",
@@ -1588,7 +1627,7 @@ data.labels={
hu="november",
it="novembre",
ja="11",
- kr="11월",
+ kr="11",
la="November",
lt="lapkričio",
nb="november",
@@ -1659,7 +1698,7 @@ data.labels={
hu="október",
it="ottobre",
ja="10",
- kr="10월",
+ kr="10",
la="October",
lt="spalio",
nb="oktober",
@@ -1761,12 +1800,12 @@ data.labels={
fr="Partie ",
gr="Μέρος",
hr="Dio ",
- hu=",. rész:",
+ hu={""," rész"},
it="Parte ",
ja={"第","パート"},
kr={"제","부"},
la="Pars ",
- lt=", dalis",
+ lt={""," dalis"},
nb="Del",
nl="Deel ",
nn="Del",
@@ -1913,7 +1952,7 @@ data.labels={
hu="szeptember",
it="settembre",
ja="9",
- kr="9월",
+ kr="9",
la="September",
lt="rugsėjo",
nb="september",
@@ -2128,12 +2167,12 @@ data.labels={
fr="Tableau ",
gr="Πίνακας",
hr="Tablica ",
- hu=",. táblázat:",
+ hu={""," táblázat"},
it="Tabella ",
ja="表",
- kr="표",
+ kr="표 ",
la="Tabula ",
- lt=", lentelė.",
+ lt={""," lentelė."},
nb="Tabell ",
nl="Tabel ",
nn="Tabell ",
@@ -2355,7 +2394,7 @@ data.labels={
hu="Ábrák",
it="Figure",
ja="図",
- kr="그림",
+ kr="그림 ",
la="Imagines",
lt="Iliustracijos",
nb="Figurer",
@@ -2392,7 +2431,7 @@ data.labels={
hu="Grafikák",
it="Grafici",
ja="グラフ",
- kr="그래픽",
+ kr="그래픽 ",
la="Typi",
lt="Graphics",
nb="Bilde",
@@ -2429,7 +2468,7 @@ data.labels={
hu="Index",
it="Indice",
ja="目次",
- kr="색인",
+ kr="찾아보기",
la="Indices",
lt="Rodyklė",
nb="Register",
@@ -2537,6 +2576,7 @@ data.labels={
hr="Literatura",
hu="Bibliográfia",
it="Bibliografia",
+ kr="참고문헌",
la="",
lt="Literatūra",
nb="",
@@ -2573,7 +2613,7 @@ data.labels={
hu="Táblázatok",
it="Tabelle",
ja="机",
- kr="표",
+ kr="표 ",
la="Tabulae",
lt="Lentelės",
nb="Tabeller",
@@ -2630,4 +2670,135 @@ data.labels={
},
},
},
+ btx = {
+ ["mastersthesis"] = {
+ labels = {
+ en = "Master's thesis",
+ fr = "Thèse de master (DEA, DESS, master)",
+ de = "Masterarbeit",
+ },
+ },
+ ["phdthesis"] = {
+ labels = {
+ en = "PhD thesis",
+ fr = "Thèse de doctorat",
+ de = "Dissertation",
+ },
+ },
+ ["technicalreport"] = {
+ labels = {
+ en = "Technical report",
+ fr = "Rapport technique",
+ de = "Technischer Bericht",
+ },
+ },
+ --
+ ["editor"] = {
+ labels = {
+ en = "editor",
+ fr = "éditeur",
+ de = "Herausgeber",
+ },
+ },
+ ["editors"] = {
+ labels = {
+ en = "editors",
+ fr = "éditeurs",
+ de = "Herausgeber",
+ },
+ },
+ ["edition"] = {
+ labels = {
+ en = "edition",
+ fr = "édition",
+ de = "Auflage",
+ },
+ },
+ --
+ ["volume"] = {
+ labels = {
+ en = "volume",
+ de = "Band",
+ },
+ },
+ ["Volume"] = {
+ labels = {
+ en = "Volume",
+ de = "Band",
+ },
+ },
+ ["number"] = {
+ labels = {
+ en = "number",
+ fr = "numéro",
+ de = "Numer",
+ },
+ },
+ ["Number"] = {
+ labels = {
+ en = "Number",
+ fr = "Numéro",
+ de = "Numer",
+ },
+ },
+ ["in"] = {
+ labels = {
+ en = "in",
+ fr = "dans",
+ de = "in",
+ },
+ },
+ ["of"] = {
+ labels = {
+ en = "of",
+ fr = "de",
+ de = "von",
+ },
+ },
+ --
+ ["In"] = {
+ labels = {
+ en = "In",
+ fr = "Dans",
+ de = "In",
+ },
+ },
+ --
+ ["p"] = {
+ labels = {
+ en = "p",
+ de = "S",
+ },
+ },
+ ["pp"] = {
+ labels = {
+ en = "pp",
+ de = "S",
+ },
+ },
+ ["pages"] = {
+ labels = {
+ en = "pages",
+ de = "Seiten",
+ },
+ },
+ --
+ ["and"] = {
+ labels = {
+ en = "and",
+ de = "und",
+ },
+ },
+ ["others"] = {
+ labels = {
+ en = "et al.",
+ },
+ },
+ }
}
+
+local functions = data.labels.functions
+
+functions.asin = functions.arcsin
+functions.acos = functions.arccos
+functions.atan = functions.arctan
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index 4ed5cdea1..39418beef 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -8,10 +8,16 @@ if not modules then modules = { } end modules ['lang-url'] = {
local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
-commands = commands or { }
-local commands = commands
+local commands = commands
+local context = context
-context = context
+local implement = interfaces.implement
+
+local variables = interfaces.variables
+local v_before = variables.before
+local v_after = variables.after
+
+local is_letter = characters.is_letter
--[[
Hyphenating 's is somewhat tricky and a matter of taste. I did
@@ -24,39 +30,39 @@ commands.hyphenatedurl = commands.hyphenatedurl or { }
local hyphenatedurl = commands.hyphenatedurl
local characters = utilities.storage.allocate {
- ["!"] = 1,
- ["\""] = 1,
- ["#"] = 1,
- ["$"] = 1,
- ["%"] = 1,
- ["&"] = 1,
- ["("] = 1,
- ["*"] = 1,
- ["+"] = 1,
- [","] = 1,
- ["-"] = 1,
- ["."] = 1,
- ["/"] = 1,
- [":"] = 1,
- [";"] = 1,
- ["<"] = 1,
- ["="] = 1,
- [">"] = 1,
- ["?"] = 1,
- ["@"] = 1,
- ["["] = 1,
- ["\\"] = 1,
- ["^"] = 1,
- ["_"] = 1,
- ["`"] = 1,
- ["{"] = 1,
- ["|"] = 1,
- ["~"] = 1,
-
- ["'"] = 2,
- [")"] = 2,
- ["]"] = 2,
- ["}"] = 2,
+ ["!"] = "before",
+ ["\""] = "before",
+ ["#"] = "before",
+ ["$"] = "before",
+ ["%"] = "before",
+ ["&"] = "before",
+ ["("] = "before",
+ ["*"] = "before",
+ ["+"] = "before",
+ [","] = "before",
+ ["-"] = "before",
+ ["."] = "before",
+ ["/"] = "before",
+ [":"] = "before",
+ [";"] = "before",
+ ["<"] = "before",
+ ["="] = "before",
+ [">"] = "before",
+ ["?"] = "before",
+ ["@"] = "before",
+ ["["] = "before",
+ ["\\"] = "before",
+ ["^"] = "before",
+ ["_"] = "before",
+ ["`"] = "before",
+ ["{"] = "before",
+ ["|"] = "before",
+ ["~"] = "before",
+
+ ["'"] = "after",
+ [")"] = "after",
+ ["]"] = "after",
+ ["}"] = "after",
}
local mapping = utilities.storage.allocate {
@@ -72,27 +78,81 @@ hyphenatedurl.discretionary = nil
-- more fun is to write nodes .. maybe it's nicer to do this
-- in an attribute handler anyway
+-- local ctx_a = context.a
+-- local ctx_b = context.b
+-- local ctx_d = context.d
+-- local ctx_n = context.n
+-- local ctx_s = context.s
+
+-- local function action(hyphenatedurl,str,left,right,disc)
+-- local n = 0
+-- local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
+-- local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
+-- local d = disc or hyphenatedurl.discretionary
+-- local p = nil
+-- for s in utfcharacters(str) do
+-- n = n + 1
+-- s = mapping[s] or s
+-- if n > 1 then
+-- ctx_s() -- can be option
+-- end
+-- if s == d then
+-- ctx_d(utfbyte(s))
+-- else
+-- local c = characters[s]
+-- if not c or n <= b or n >= e then
+-- ctx_n(utfbyte(s))
+-- elseif c == 1 then
+-- ctx_b(utfbyte(s))
+-- elseif c == 2 then
+-- ctx_a(utfbyte(s))
+-- end
+-- end
+-- p = s
+-- end
+-- end
+
+local ctx_a = context.a
+local ctx_b = context.b
+local ctx_d = context.d
+local ctx_c = context.c
+local ctx_l = context.l
+local ctx_C = context.C
+local ctx_L = context.L
+
local function action(hyphenatedurl,str,left,right,disc)
local n = 0
local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
local d = disc or hyphenatedurl.discretionary
+ local p = nil
for s in utfcharacters(str) do
n = n + 1
s = mapping[s] or s
- if n > 1 then
- context.s() -- can be option
- end
if s == d then
- context.d(utfbyte(s))
+ ctx_d(utfbyte(s))
else
local c = characters[s]
- if not c or n<=b or n>=e then
- context.n(utfbyte(s))
- elseif c == 1 then
- context.b(utfbyte(s))
- elseif c == 2 then
- context.a(utfbyte(s))
+ if c == v_before then
+ p = false
+ ctx_b(utfbyte(s))
+ elseif c == v_after then
+ p = false
+ ctx_a(utfbyte(s))
+ else
+ local l = is_letter[s]
+ if n <= b or n >= e then
+ if p and l then
+ ctx_L(utfbyte(s))
+ else
+ ctx_C(utfbyte(s))
+ end
+ elseif p and l then
+ ctx_l(utfbyte(s))
+ else
+ ctx_c(utfbyte(s))
+ end
+ p = l
end
end
end
@@ -106,8 +166,21 @@ table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller
function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after
for s in utfcharacters(str) do
- characters[s] = value or 1
+ characters[s] = value or v_before
end
end
-- .hyphenatedurl.setcharacters("')]}",2)
+
+implement {
+ name = "sethyphenatedurlcharacters",
+ actions = hyphenatedurl.setcharacters,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "hyphenatedurl",
+ scope = "private",
+ actions = function(...) action(hyphenatedurl,...) end,
+ arguments = { "string", "integer", "integer", "string" }
+}
diff --git a/tex/context/base/lang-url.mkiv b/tex/context/base/lang-url.mkiv
index 1bbe16838..e7d62ba01 100644
--- a/tex/context/base/lang-url.mkiv
+++ b/tex/context/base/lang-url.mkiv
@@ -44,18 +44,21 @@
\newtoks\everyhyphenatedurl
\appendtoks
- \let\&\letterampersand
- \let\#\letterhash
- \let\~\lettertilde
- \let\\\letterbackslash
- \let\$\letterdollar
- \let\^\letterhat
- \let\_\letterunderscore
- \let\{\letterleftbrace
- \let\}\letterrightbrace
- \let\|\letterbar
- \let~=\lettertilde
- \let|=\letterbar
+ \resetfontfeature
+ \resetcharacterspacing
+ %
+ \let\&\letterampersand
+ \let\#\letterhash
+ \let\~\lettertilde
+ \let\\\letterbackslash
+ \let\$\letterdollar
+ \let\^\letterhat
+ \let\_\letterunderscore
+ \let\{\letterleftbrace
+ \let\}\letterrightbrace
+ \let\|\letterbar
+ \let~=\lettertilde
+ \let|=\letterbar
\to \everyhyphenatedurl
\let\hyphenatedurlseparator \empty % \periodcentered
@@ -64,47 +67,54 @@
\setnewconstant\hyphenatedurllefthyphenmin \plusthree
\setnewconstant\hyphenatedurlrighthyphenmin\plusthree
-\def\lang_url_space {\nobreak\hskip\zeropoint plus\onepoint\nobreak}
-\def\lang_url_after #1{\char#1\discretionary{}{\hyphenatedurlseparator}{}}
-\def\lang_url_before #1{\discretionary{\hyphenatedurlseparator}{}{}\char#1\relax}
-\def\lang_url_normal #1{\char#1\relax}
-\def\lang_url_disc #1{\discretionary{}{}{}}
-
-\def\lang_url_space_trace {\nobreak\begingroup\darkyellow\ruledhskip\zeropoint plus\onepoint\endgroup\nobreak}
-\def\lang_url_after_trace #1{\char#1\hsmash{\darkblue\vl}\discretionary{}{\hyphenatedurlseparator}{}}
-\def\lang_url_before_trace#1{\discretionary{\hyphenatedurlseparator}{}{}\hsmash{\darkred\vl}\char#1\relax}
-\def\lang_url_normal_trace#1{\char#1\relax}
-\def\lang_url_disc_trace #1{\discretionary{\hsmash{\darkgreen\vl}}{\hsmash{\darkgreen\vl}}{\hsmash{\darkgreen\vl}}}
-
-\def\showhyphenatedurlbreaks
- {\let\lang_url_space \lang_url_space_trace
- \let\lang_url_after \lang_url_after_trace
- \let\lang_url_before\lang_url_before_trace
- \let\lang_url_normal\lang_url_normal_trace
- \let\lang_url_disc \lang_url_disc_trace}
-
-\def\sethyphenatedurlnormal#1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,0)}} % Lua names will change
-\def\sethyphenatedurlbefore#1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,1)}} % Lua names will change
-\def\sethyphenatedurlafter #1{\ctxcommand{hyphenatedurl.setcharacters(\!!bs#1\!!es,2)}} % Lua names will change
+\def\lang_url_more{\penalty\plustenthousand\hskip\scratchskipone\penalty\plustenthousand}
+\def\lang_url_less{\penalty\plustenthousand\hskip\scratchskiptwo\penalty\plustenthousand}
+\def\lang_url_trac{\penalty\plustenthousand\hsmash{\darkred\vl}\penalty\plustenthousand}
+
+\def\lang_url_a#1{\lang_url_more\char#1\lang_url_show\discretionary{}{\hyphenatedurlseparator}{}}
+\def\lang_url_b#1{\lang_url_more\discretionary{\hyphenatedurlseparator}{}{}\lang_url_show\char#1\relax}
+\def\lang_url_d#1{\lang_url_more\discretionary{\lang_url_show}{\lang_url_show}{\lang_url_show}}
+\def\lang_url_l#1{\lang_url_less\char#1\relax}
+\def\lang_url_c#1{\lang_url_more\char#1\relax}
+\def\lang_url_L#1{\lang_url_less\char#1\relax}
+\def\lang_url_C#1{\lang_url_more\char#1\relax}
+
+\unexpanded\def\showhyphenatedurlbreaks % for old times sake
+ {\let\lang_url_show\lang_url_trac}
+
+\installtextracker
+ {languages.url}
+ {\let\lang_url_show\lang_url_trac}
+ {\let\lang_url_show\relax}
+
+\let\lang_url_show\relax
+
+\unexpanded\def\sethyphenatedurlnormal#1{\clf_sethyphenatedurlcharacters{#1}{\v!normal}}
+\unexpanded\def\sethyphenatedurlbefore#1{\clf_sethyphenatedurlcharacters{#1}{\v!before}}
+\unexpanded\def\sethyphenatedurlafter #1{\clf_sethyphenatedurlcharacters{#1}{\v!after }}
% \sethyphenatedurlafter{ABCDEF}
-\unexpanded \def\hyphenatedurl#1%
+\unexpanded\def\hyphenatedurl#1%
{\dontleavehmode
\begingroup
\the\everyhyphenatedurl
\normallanguage\zerocount
- \let\s\lang_url_space
- \let\a\lang_url_after
- \let\b\lang_url_before
- \let\n\lang_url_normal
- \let\d\lang_url_disc
- \normalexpanded{\noexpand\ctxcommand{hyphenatedurl(
- \!!bs\noexpand\detokenize{#1}\!!es,
- \number\hyphenatedurllefthyphenmin,
- \number\hyphenatedurlrighthyphenmin,
- "\hyphenatedurldiscretionary"
- )}}%
+ \let\a\lang_url_a % after
+ \let\b\lang_url_b % before
+ \let\d\lang_url_d % disc
+ \let\l\lang_url_l % letter
+ \let\c\lang_url_c % character
+ \let\L\lang_url_L % letter_nobreak
+ \let\C\lang_url_C % character_nobreak
+ \scratchskipone\zeropoint\s!plus\dimexpr\emwidth/12\s!minus\dimexpr\emwidth/24\relax % 1.00pt @ 12pt
+ \scratchskiptwo\zeropoint\s!plus\dimexpr\emwidth/48\s!minus\dimexpr\emwidth/96\relax % 0.25pt @ 12pt
+ \normalexpanded{\noexpand\clf_hyphenatedurl
+ {\noexpand\detokenize{#1}}%
+ \hyphenatedurllefthyphenmin
+ \hyphenatedurlrighthyphenmin
+ {\hyphenatedurldiscretionary}%
+ }%
\endgroup}
%D \macros
@@ -131,3 +141,31 @@
% \dorecurse{100}{\test{a} \test{ab} \test{abc} \test{abcd} \test{abcde} \test{abcdef}}
\protect \endinput
+
+% \setuppapersize[A7]
+%
+% \unexpanded\def\WhateverA#1%
+% {\dontleavehmode
+% \begingroup
+% \prehyphenchar"B7\relax
+% \setbox\scratchbox\hbox{\tttf#1}%
+% \prehyphenchar`-\relax
+% \unhbox\scratchbox
+% \endgroup}
+%
+% \unexpanded\def\WhateverB#1%
+% {\dontleavehmode
+% \begingroup
+% \tttf
+% \prehyphenchar\minusone
+% % \localrightbox{\llap{_}}%
+% \localrightbox{\llap{\smash{\lower1.5ex\hbox{\char"2192}}}}%
+% \setbox\scratchbox\hbox{#1}%
+% \prehyphenchar`-\relax
+% \unhbox\scratchbox
+% \endgroup}
+%
+% \begingroup \hsize1cm
+% \WhateverA{thisisaboringandverylongcommand}\par
+% \WhateverB{thisisaboringandverylongcommand}\par
+% \endgroup
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index bf066fc09..b564a02ae 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -14,7 +14,10 @@ local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs
local report_words = logs.reporter("languages","words")
-local nodes, node, languages = nodes, node, languages
+local nodes = nodes
+local languages = languages
+
+local implement = interfaces.implement
languages.words = languages.words or { }
local words = languages.words
@@ -26,7 +29,18 @@ words.threshold = 4
local numbers = languages.numbers
local registered = languages.registered
-local traverse_nodes = node.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+
local wordsdata = words.data
local chardata = characters.data
local tasks = nodes.tasks
@@ -96,7 +110,7 @@ end
-- there is an n=1 problem somewhere in nested boxes
local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = head, nil, nil, 0, false
+ local current, language, done = tonut(head), nil, nil, 0, false
local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
local function action()
if s > 0 then
@@ -112,9 +126,9 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n, s = 0, 0
end
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local a = current.lang
+ local a = getfield(current,"lang")
if a then
if a ~= language then
if s > 0 then
@@ -126,16 +140,16 @@ local function mark_words(head,whenfound) -- can be optimized and shared
action()
language = a
end
- local components = current.components
+ local components = getfield(current,"components")
if components then
n = n + 1
nds[n] = current
for g in traverse_nodes(components) do
s = s + 1
- str[s] = utfchar(g.char)
+ str[s] = utfchar(getchar(g))
end
else
- local code = current.char
+ local code = getchar(current)
local data = chardata[code]
if is_letter[data.category] then
n = n + 1
@@ -151,12 +165,12 @@ local function mark_words(head,whenfound) -- can be optimized and shared
n = n + 1
nds[n] = current
end
- elseif id == kern_code and current.subtype == kerning_code and s > 0 then
+ elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
-- ok
elseif s > 0 then
action()
end
- current = current.next
+ current = getnext(current)
end
if s > 0 then
action()
@@ -176,6 +190,8 @@ local enabled = false
function words.check(head)
if enabled then
return methods[wordmethod](head)
+ elseif not head then
+ return head, false
else
return head, false
end
@@ -207,7 +223,7 @@ table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
else
c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
end
- local v = c and function(n) n[a_color] = c end or false
+ local v = c and function(n) setattr(n,a_color,c) end or false
t[k] = v
return v
end)
@@ -226,7 +242,7 @@ end
methods[1] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue -- hm, not that selective (reset color)
+ setattr(n,a_color,unsetvalue) -- hm, not that selective (reset color)
end
return mark_words(head,sweep)
end
@@ -327,7 +343,7 @@ end
methods[3] = function(head)
for n in traverse_nodes(head) do
- n[a_color] = unsetvalue
+ setattr(n,a_color,unsetvalue)
end
return mark_words(head,sweep)
end
@@ -348,6 +364,24 @@ end
-- interface
-commands.enablespellchecking = words.enable
-commands.disablespellchecking = words.disable
-commands.loadspellchecklist = words.load
+implement {
+ name = "enablespellchecking",
+ actions = words.enable,
+ arguments = {
+ {
+ { "method" },
+ { "list" }
+ }
+ }
+}
+
+implement {
+ name = "disablespellchecking",
+ actions = words.disable
+}
+
+implement {
+ name = "loadspellchecklist",
+ arguments = { "string", "string" },
+ actions = words.load
+}
diff --git a/tex/context/base/lang-wrd.mkiv b/tex/context/base/lang-wrd.mkiv
index 682489912..8c544773b 100644
--- a/tex/context/base/lang-wrd.mkiv
+++ b/tex/context/base/lang-wrd.mkiv
@@ -40,15 +40,18 @@
\appendtoks
\doifelse{\directspellcheckingparameter\c!state}\v!start
- {\ctxcommand{enablespellchecking { method = "\directspellcheckingparameter\c!method", list = "\directspellcheckingparameter\c!list" }}}
- {\ctxcommand{disablespellchecking()}}%
+ {\clf_enablespellchecking
+ method {\directspellcheckingparameter\c!method}%
+ list {\directspellcheckingparameter\c!list}%
+ \relax}
+ {\clf_disablespellchecking}%
\to \everysetupspellchecking
\unexpanded\def\loadspellchecklist
{\dodoubleempty\lang_spellchecking_load_list}
\def\lang_spellchecking_load_list[#1][#2]%
- {\ctxcommand{loadspellchecklist("#1","#2")}}
+ {\clf_loadspellchecklist{#1}{#2}}
\setupspellchecking
[\c!state=\v!stop,
diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua
index 3f0e718b3..f5e320a00 100644
--- a/tex/context/base/lpdf-ano.lua
+++ b/tex/context/base/lpdf-ano.lua
@@ -10,46 +10,66 @@ if not modules then modules = { } end modules ['lpdf-ano'] = {
-- todo: /AA << WC << ... >> >> : WillClose actions etc
-local next, tostring = next, tostring
-local rep, format = string.rep, string.format
+-- internal references are indicated by a number (and turned into )
+-- we only flush internal destinations that are referred
+
+local next, tostring, tonumber, rawget = next, tostring, tonumber, rawget
+local rep, format, find = string.rep, string.format, string.find
+local min = math.min
local lpegmatch = lpeg.match
local formatters = string.formatters
local backends, lpdf = backends, lpdf
-local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+
+local log_destinations = false directives.register("destinations.log", function(v) log_destinations = v end)
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_bookmark = logs.reporter("backend","bookmarks")
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_bookmark = logs.reporter("backend","bookmarks")
local variables = interfaces.variables
-local constants = interfaces.constants
+local v_auto = variables.auto
+local v_page = variables.page
+
+local factor = number.dimenfactors.bp
local settings_to_array = utilities.parsers.settings_to_array
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
+local getpos = codeinjections.getpos
+local gethpos = codeinjections.gethpos
+local getvpos = codeinjections.getvpos
+
local javascriptcode = interactions.javascripts.code
local references = structures.references
local bookmarks = structures.bookmarks
+local flaginternals = references.flaginternals
+local usedinternals = references.usedinternals
+local usedviews = references.usedviews
+
local runners = references.runners
local specials = references.specials
local handlers = references.handlers
local executers = references.executers
-local getinnermethod = references.getinnermethod
local nodepool = nodes.pool
-local pdfannotation_node = nodepool.pdfannotation
-local pdfdestination_node = nodepool.pdfdestination
-local latelua_node = nodepool.latelua
+----- pdfannotation_node = nodepool.pdfannotation
+----- pdfdestination_node = nodepool.pdfdestination
+----- latelua_node = nodepool.latelua
+local latelua_function_node = nodepool.lateluafunction -- still node ... todo
local texgetcount = tex.getcount
@@ -63,7 +83,12 @@ local pdfshareobjectreference = lpdf.shareobjectreference
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfdelayedobject = lpdf.delayedobject
-local pdfregisterannotation = lpdf.registerannotation
+local pdfregisterannotation = lpdf.registerannotation -- forward definition (for the moment)
+local pdfnull = lpdf.null
+local pdfaddtocatalog = lpdf.addtocatalog
+local pdfaddtonames = lpdf.addtonames
+local pdfaddtopageattributes = lpdf.addtopageattributes
+local pdfrectangle = lpdf.rectangle
-- todo: 3dview
@@ -79,102 +104,439 @@ local pdf_t = pdfconstant("T")
local pdf_fit = pdfconstant("Fit")
local pdf_named = pdfconstant("Named")
-local pdf_border = pdfarray { 0, 0, 0 }
+local autoprefix = "#"
-local cache = { }
+-- Bah, I hate this kind of features .. anyway, as we have delayed resolving we
+-- only support a document-wide setup and it has to be set before the first one
+-- is used. Also, we default to a non-intrusive gray and the outline is kept
+-- thin without dashing lines. This is as far as I'm prepared to go. This way
+-- it can also be used as a debug feature.
-local function pagedestination(n) -- only cache fit
- if n > 0 then
- local pd = cache[n]
- if not pd then
- local a = pdfarray {
- pdfreference(pdfpagereference(n)),
- pdf_fit,
- }
- pd = pdfshareobjectreference(a)
- cache[n] = pd
+local pdf_border_style = pdfarray { 0, 0, 0 } -- radius radius linewidth
+local pdf_border_color = nil
+local set_border = false
+
+local function pdfborder()
+ set_border = true
+ return pdf_border_style, pdf_border_color
+end
+
+lpdf.border = pdfborder
+
+directives.register("references.border",function(v)
+ if v and not set_border then
+ if type(v) == "string" then
+ local m = attributes.list[attributes.private('color')] or { }
+ local c = m and m[v]
+ local v = c and attributes.colors.value(c)
+ if v then
+ local r, g, b = v[3], v[4], v[5]
+ -- if r == g and g == b then
+ -- pdf_border_color = pdfarray { r } -- reduced, not not ... bugged viewers
+ -- else
+ pdf_border_color = pdfarray { r, g, b } -- always rgb
+ -- end
+ end
+ end
+ if not pdf_border_color then
+ pdf_border_color = pdfarray { .6, .6, .6 } -- no reduce to { 0.6 } as there are buggy viewers out there
end
- return pd
+ pdf_border_style = pdfarray { 0, 0, .5 } -- < 0.5 is not show by acrobat (at least not in my version)
end
-end
+end)
+
+-- the used and flag code here is somewhat messy in the sense
+-- that it belongs in strc-ref but at the same time depends on
+-- the backend so we keep it here
+
+-- the caching is somewhat memory intense on the one hand but
+-- it saves many small temporary tables so it might pay off
+
+local pagedestinations = allocate()
+local pagereferences = allocate() -- annots are cached themselves
+
+setmetatableindex(pagedestinations, function(t,k)
+ k = tonumber(k)
+ if not k or k <= 0 then
+ return pdfnull()
+ end
+ local v = rawget(t,k)
+ if v then
+ -- report_reference("page number expected, got %s: %a",type(k),k)
+ return v
+ end
+ local v = k > 0 and pdfarray {
+ pdfreference(pdfpagereference(k)),
+ pdf_fit,
+ } or pdfnull()
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(pagereferences,function(t,k)
+ k = tonumber(k)
+ if not k or k <= 0 then
+ return nil
+ end
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ local v = pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = pagedestinations[k],
+ }
+ t[k] = v
+ return v
+end)
-lpdf.pagedestination = pagedestination
+lpdf.pagereferences = pagereferences -- table
+lpdf.pagedestinations = pagedestinations -- table
local defaultdestination = pdfarray { 0, pdf_fit }
-local function link(url,filename,destination,page,actions)
- if filename and filename ~= "" then
- if file.basename(filename) == tex.jobname then
- return false
- else
- filename = file.addsuffix(filename,"pdf")
+-- fit is default (see lpdf-nod)
+
+local destinations = { } -- to be used soon
+
+local function pdfregisterdestination(name,reference)
+ local d = destinations[name]
+ if d then
+ report_destination("ignoring duplicate destination %a with reference %a",name,reference)
+ else
+ destinations[name] = reference
+ end
+end
+
+lpdf.registerdestination = pdfregisterdestination
+
+local maxslice = 32 -- could be made configureable ... 64 is also ok
+
+luatex.registerstopactions(function()
+ if log_destinations and next(destinations) then
+ local logsnewline = logs.newline
+ local log_destinations = logs.reporter("system","references")
+ local log_destination = logs.reporter("destination")
+ logs.pushtarget("logfile")
+ logsnewline()
+ log_destinations("start used destinations")
+ logsnewline()
+ local n = 0
+ for destination, pagenumber in table.sortedhash(destinations) do
+ log_destination("% 4i : %-5s : %s",pagenumber,usedviews[destination] or defaultview,destination)
+ n = n + 1
+ end
+ logsnewline()
+ log_destinations("stop used destinations")
+ logsnewline()
+ logs.poptarget()
+ report_destination("%s destinations saved in log file",n)
+ end
+end)
+
+
+local function pdfnametree(destinations)
+ local slices = { }
+ local sorted = table.sortedkeys(destinations)
+ local size = #sorted
+
+ if size <= 1.5*maxslice then
+ maxslice = size
+ end
+
+ for i=1,size,maxslice do
+ local amount = min(i+maxslice-1,size)
+ local names = pdfarray { }
+ for j=i,amount do
+ local destination = sorted[j]
+ local pagenumber = destinations[destination]
+ names[#names+1] = tostring(destination) -- tostring is a safeguard
+ names[#names+1] = pdfreference(pagenumber)
+ end
+ local first = sorted[i]
+ local last = sorted[amount]
+ local limits = pdfarray {
+ first,
+ last,
+ }
+ local d = pdfdictionary {
+ Names = names,
+ Limits = limits,
+ }
+ slices[#slices+1] = {
+ reference = pdfreference(pdfflushobject(d)),
+ limits = limits,
+ }
+ end
+ local function collectkids(slices,first,last)
+ local k = pdfarray()
+ local d = pdfdictionary {
+ Kids = k,
+ Limits = pdfarray {
+ slices[first].limits[1],
+ slices[last ].limits[2],
+ },
+ }
+ for i=first,last do
+ k[#k+1] = slices[i].reference
+ end
+ return d
+ end
+ if #slices == 1 then
+ return slices[1].reference
+ else
+ while true do
+ if #slices > maxslice then
+ local temp = { }
+ local size = #slices
+ for i=1,size,maxslice do
+ local kids = collectkids(slices,i,min(i+maxslice-1,size))
+ temp[#temp+1] = {
+ reference = pdfreference(pdfflushobject(kids)),
+ limits = kids.Limits,
+ }
+ end
+ slices = temp
+ else
+ return pdfreference(pdfflushobject(collectkids(slices,1,#slices)))
+ end
+ end
+ end
+end
+
+local function pdfdestinationspecification()
+ if next(destinations) then -- safeguard
+ local r = pdfnametree(destinations)
+ -- pdfaddtocatalog("Dests",r)
+ pdfaddtonames("Dests",r)
+ if not log_destinations then
+ destinations = nil
+ end
+ end
+end
+
+lpdf.nametree = pdfnametree
+lpdf.destinationspecification = pdfdestinationspecification
+
+lpdf.registerdocumentfinalizer(pdfdestinationspecification,"collect destinations")
+
+-- todo
+
+local destinations = { }
+
+local f_xyz = formatters["<< /D [ %i 0 R /XYZ %0.3F %0.3F null ] >>"]
+local f_fit = formatters["<< /D [ %i 0 R /Fit ] >>"]
+local f_fitb = formatters["<< /D [ %i 0 R /FitB ] >>"]
+local f_fith = formatters["<< /D [ %i 0 R /FitH %0.3F ] >>"]
+local f_fitv = formatters["<< /D [ %i 0 R /FitV %0.3F ] >>"]
+local f_fitbh = formatters["<< /D [ %i 0 R /FitBH %0.3F ] >>"]
+local f_fitbv = formatters["<< /D [ %i 0 R /FitBV %0.3F ] >>"]
+local f_fitr = formatters["<< /D [ %i 0 R /FitR %0.3F %0.3F %0.3F %0.3F ] >>"]
+
+local v_standard = variables.standard
+local v_frame = variables.frame
+local v_width = variables.width
+local v_minwidth = variables.minwidth
+local v_height = variables.height
+local v_minheight = variables.minheight
+local v_fit = variables.fit
+local v_tight = variables.tight
+
+-- nicer is to create dictionaries and set properties but it's a bit overkill
+
+-- The problem with the following settings is that they are guesses: we never know
+-- if a box is part of something larger that needs to be in view, or that we are
+-- dealing with a vbox or vtop so the used h/d values cannot be trusted in a tight
+-- view. Of course some decent additional offset would be nice so maybe i'll add
+-- that some day. I never use anything else than 'fit' anyway as I think that the
+-- document should fit the device (and vice versa). In fact, with todays swipe
+-- and finger zooming this whole view is rather useless and as with any zooming
+-- one looses the overview and keeps zooming.
+
+local destinationactions = {
+ -- [v_standard] = function(r,w,h,d) return f_xyz (r,pdfrectangle(w,h,d)) end, -- local left,top with zoom (0 in our case)
+ [v_standard] = function(r,w,h,d) return f_xyz (r,gethpos()*factor,(getvpos()+h)*factor) end, -- local left,top with no zoom
+ [v_frame] = function(r,w,h,d) return f_fitr (r,pdfrectangle(w,h,d)) end, -- fit rectangle in window
+ -- [v_width] = function(r,w,h,d) return f_fith (r,gethpos()*factor) end, -- top coordinate, fit width of page in window
+ [v_width] = function(r,w,h,d) return f_fith (r,(getvpos()+h)*factor) end, -- top coordinate, fit width of page in window
+ -- [v_minwidth] = function(r,w,h,d) return f_fitbh(r,gethpos()*factor) end, -- top coordinate, fit width of content in window
+ [v_minwidth] = function(r,w,h,d) return f_fitbh(r,(getvpos()+h)*factor) end, -- top coordinate, fit width of content in window
+ -- [v_height] = function(r,w,h,d) return f_fitv (r,(getvpos()+h)*factor) end, -- left coordinate, fit height of page in window
+ [v_height] = function(r,w,h,d) return f_fitv (r,gethpos()*factor) end, -- left coordinate, fit height of page in window
+ -- [v_minheight] = function(r,w,h,d) return f_fitbv(r,(getvpos()+h)*factor) end, -- left coordinate, fit height of content in window
+ [v_minheight] = function(r,w,h,d) return f_fitbv(r,gethpos()*factor) end, -- left coordinate, fit height of content in window [v_fit] = f_fit, -- fit page in window
+ [v_tight] = f_fitb, -- fit content in window
+}
+
+local mapping = {
+ [v_standard] = v_standard, xyz = v_standard,
+ [v_frame] = v_frame, fitr = v_frame,
+ [v_width] = v_width, fith = v_width,
+ [v_minwidth] = v_minwidth, fitbh = v_minwidth,
+ [v_height] = v_height, fitv = v_height,
+ [v_minheight] = v_minheight, fitbv = v_minheight,
+ [v_fit] = v_fit, fit = v_fit,
+ [v_tight] = v_tight, fitb = v_tight,
+}
+
+local defaultview = v_fit
+local defaultaction = destinationactions[defaultview]
+
+-- A complication is that we need to use named destinations when we have views so we
+-- end up with a mix. A previous versions just output multiple destinations but not
+-- that we noved all to here we can be more sparse.
+
+local pagedestinations = { }
+
+table.setmetatableindex(pagedestinations,function(t,k)
+ local v = pdfdelayedobject(f_fit(k))
+ t[k] = v
+ return v
+end)
+
+local function flushdestination(width,height,depth,names,view)
+ local r = pdfpagereference(texgetcount("realpageno"))
+ if view == defaultview or not view or view == "" then
+ r = pagedestinations[r]
+ else
+ local action = view and destinationactions[view] or defaultaction
+ r = pdfdelayedobject(action(r,width,height,depth))
+ end
+ for n=1,#names do
+ local name = names[n]
+ if name then
+ pdfregisterdestination(name,r)
end
end
- if url and url ~= "" then
- if filename and filename ~= "" then
- if destination and destination ~= "" then
- url = file.join(url,filename).."#"..destination
+end
+
+function nodeinjections.destination(width,height,depth,names,view)
+ -- todo check if begin end node / was comment
+ view = view and mapping[view] or defaultview
+ if trace_destinations then
+ report_destination("width %p, height %p, depth %p, names %|t, view %a",width,height,depth,names,view)
+ end
+ local method = references.innermethod
+ local noview = view == defaultview
+ local doview = false
+ -- we could save some aut's by using a name when given but it doesn't pay off apart
+ -- from making the code messy and tracing hard .. we only save some destinations
+ -- which we already share anyway
+ for n=1,#names do
+ local name = names[n]
+ if usedviews[name] then
+ -- already done, maybe a warning
+ elseif type(name) == "number" then
+ if noview then
+ usedviews[name] = view
+ names[n] = false
+ elseif method == v_page then
+ usedviews[name] = view
+ names[n] = false
else
- url = file.join(url,filename)
+ local used = usedinternals[name]
+ if used and used ~= defaultview then
+ usedviews[name] = view
+ names[n] = autoprefix .. name
+ doview = true
+ else
+ -- names[n] = autoprefix .. name
+ names[n] = false
+ end
end
+ elseif method == v_page then
+ usedviews[name] = view
+ else
+ usedviews[name] = view
+ doview = true
end
- return pdfdictionary {
- S = pdf_uri,
- URI = url,
- }
- elseif filename and filename ~= "" then
- -- no page ?
- if destination == "" then
+ end
+ if doview then
+ return latelua_function_node(function() flushdestination(width,height,depth,names,view) end)
+ end
+end
+
+-- we could share dictionaries ... todo
+
+local function somedestination(destination,internal,page) -- no view anyway
+ if references.innermethod ~= v_page then
+ if type(destination) == "number" then
+ if not internal then
+ internal = destination
+ end
destination = nil
end
- if not destination and page then
- destination = pdfarray { page - 1, pdf_fit }
+ if internal then
+ flaginternals[internal] = true -- for bookmarks and so
+ local used = usedinternals[internal]
+ if used == defaultview or used == true then
+ return pagereferences[page]
+ end
+ if type(destination) ~= "string" then
+ destination = autoprefix .. internal
+ end
+ return pdfdictionary {
+ S = pdf_goto,
+ D = destination,
+ }
end
- return pdfdictionary {
- S = pdf_gotor, -- can also be pdf_launch
- F = filename,
- D = destination or defaultdestination, -- D is mandate
- NewWindow = (actions.newwindow and true) or nil,
- }
- elseif destination and destination ~= "" then
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = destination,
- }
- else
- local p = tonumber(page)
- if p and p > 0 then
- return pdfdictionary { -- can be cached
+ if destination then
+ -- hopefully this one is flushed
+ return pdfdictionary {
S = pdf_goto,
- D = pdfarray {
- pdfreference(pdfpagereference(p)),
- pdf_fit,
- }
+ D = destination,
}
- elseif trace_references then
- report_reference("invalid page reference %a",page)
end
end
- return false
+ return pagereferences[page]
end
-lpdf.link = link
+-- annotations
-function lpdf.launch(program,parameters)
- if program and program ~= "" then
- local d = pdfdictionary {
- S = pdf_launch,
- F = program,
- D = ".",
- }
- if parameters and parameters ~= "" then
- d.P = parameters
- end
- return d
+local pdflink = somedestination
+
+local function pdffilelink(filename,destination,page,actions)
+ if not filename or filename == "" or file.basename(filename) == tex.jobname then
+ return false
+ end
+ filename = file.addsuffix(filename,"pdf")
+ if not destination or destination == "" then
+ destination = pdfarray { (page or 0) - 1, pdf_fit }
+ end
+ return pdfdictionary {
+ S = pdf_gotor, -- can also be pdf_launch
+ F = filename,
+ D = destination or defaultdestination, -- D is mandate
+ NewWindow = actions.newwindow and true or nil,
+ }
+end
+
+local function pdfurllink(url,destination,page)
+ if not url or url == "" then
+ return false
+ end
+ if destination and destination ~= "" then
+ url = url .. "#" .. destination
end
+ return pdfdictionary {
+ S = pdf_uri,
+ URI = url,
+ }
+end
+
+local function pdflaunch(program,parameters)
+ if not program or program == "" then
+ return false
+ end
+ return pdfdictionary {
+ S = pdf_launch,
+ F = program,
+ D = ".",
+ P = parameters ~= "" and parameters or nil
+ }
end
-function lpdf.javascript(name,arguments)
+local function pdfjavascript(name,arguments)
local script = javascriptcode(name,arguments) -- make into object (hash)
if script then
return pdfdictionary {
@@ -200,7 +562,9 @@ local function pdfaction(actions)
if what then
what = what(a,actions)
end
- if what then
+ if action == what then
+ -- ignore this one, else we get a loop
+ elseif what then
action.Next = what
action = what
else
@@ -219,9 +583,11 @@ function codeinjections.prerollreference(actions) -- share can become option
if actions then
local main, n = pdfaction(actions)
if main then
- main = pdfdictionary {
+ local bs, bc = pdfborder()
+ main = pdfdictionary {
Subtype = pdf_link,
- Border = pdf_border,
+ Border = bs,
+ C = bc,
H = (not actions.highlight and pdf_n) or nil,
A = pdfshareobjectreference(main),
F = 4, -- print (mandate in pdf/a)
@@ -231,157 +597,146 @@ function codeinjections.prerollreference(actions) -- share can become option
end
end
-local function use_normal_annotations()
-
- local function reference(width,height,depth,prerolled) -- keep this one
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- return pdfannotation_node(width,height,depth,prerolled)
- end
- end
-
- local function finishreference()
- end
-
- return reference, finishreference
-
-end
+-- local function use_normal_annotations()
+--
+-- local function reference(width,height,depth,prerolled) -- keep this one
+-- if prerolled then
+-- if trace_references then
+-- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+-- end
+-- return pdfannotation_node(width,height,depth,prerolled)
+-- end
+-- end
+--
+-- local function finishreference()
+-- end
+--
+-- return reference, finishreference
+--
+-- end
-- eventually we can do this for special refs only
-local hashed, nofunique, nofused = { }, 0, 0
-
-local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
-local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
+local hashed = { }
+local nofunique = 0
+local nofused = 0
+local nofspecial = 0
+local share = true
-local function use_shared_annotations()
+local f_annot = formatters["<< /Type /Annot %s /Rect [ %0.3F %0.3F %0.3F %0.3F ] >>"]
- local factor = number.dimenfactors.bp
+directives.register("refences.sharelinks", function(v) share = v end)
- local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
- local h, v = pdf.h, pdf.v
- local llx, lly = h*factor, (v - depth)*factor
- local urx, ury = (h + width)*factor, (v + height)*factor
- local annot = f_annot(prerolled,llx,lly,urx,ury)
- local n = hashed[annot]
- if not n then
- n = pdfdelayedobject(annot)
- hashed[annot] = n
- nofunique = nofunique + 1
- end
- nofused = nofused + 1
- pdfregisterannotation(n)
+table.setmetatableindex(hashed,function(t,k)
+ local v = pdfdelayedobject(k)
+ if share then
+ t[k] = v
end
+ nofunique = nofunique + 1
+ return v
+end)
+
+local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
+ local annot = hashed[f_annot(prerolled,pdfrectangle(width,height,depth))]
+ nofused = nofused + 1
+ return pdfregisterannotation(annot)
+end
- _bpnf_ = finishreference
-
- local function reference(width,height,depth,prerolled)
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- local luacode = f_bpnf(width,height,depth,prerolled)
- return latelua_node(luacode)
- end
+local function finishannotation(width,height,depth,prerolled,r)
+ local annot = f_annot(prerolled,pdfrectangle(width,height,depth))
+ if r then
+ pdfdelayedobject(annot,r)
+ else
+ r = pdfdelayedobject(annot)
end
+ nofspecial = nofspecial + 1
+ return pdfregisterannotation(r)
+end
- statistics.register("pdf annotations", function()
- if nofused > 0 then
- return format("%s embedded, %s unique",nofused,nofunique)
- else
- return nil
+function nodeinjections.reference(width,height,depth,prerolled)
+ if prerolled then
+ if trace_references then
+ report_reference("link: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
-
-
- return reference, finishreference
-
+ return latelua_function_node(function() finishreference(width,height,depth,prerolled) end)
+ end
end
-local lln = latelua_node() if node.has_field(lln,'string') then
-
- directives.register("refences.sharelinks", function(v)
- if v then
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
- else
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function nodeinjections.annotation(width,height,depth,prerolled,r)
+ if prerolled then
+ if trace_references then
+ report_reference("special: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
- end)
+ return latelua_function_node(function() finishannotation(width,height,depth,prerolled,r or false) end)
+ end
+end
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
+-- beware, we register during a latelua sweep so we have to make sure that
+-- we finalize after that (also in a latelua for the moment as we have no
+-- callback yet)
-else
+local annotations = nil
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+function lpdf.registerannotation(n)
+ if annotations then
+ annotations[#annotations+1] = pdfreference(n)
+ else
+ annotations = pdfarray { pdfreference(n) } -- no need to use lpdf.array cum suis
+ end
+end
-end node.free(lln)
+pdfregisterannotation = lpdf.registerannotation
--- -- -- --
--- -- -- --
+function lpdf.annotationspecification()
+ if annotations then
+ local r = pdfdelayedobject(tostring(annotations)) -- delayed so okay in latelua
+ pdfaddtopageattributes("Annots",pdfreference(r))
+ annotations = nil
+ end
+end
-local done = { } -- prevent messages
+lpdf.registerpagefinalizer(lpdf.annotationspecification,"finalize annotations")
-function nodeinjections.destination(width,height,depth,name,view)
- if not done[name] then
- done[name] = true
- if trace_destinations then
- report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
- end
- return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
+statistics.register("pdf annotations", function()
+ if nofused > 0 or nofspecial > 0 then
+ return format("%s links (%s unique), %s special",nofused,nofunique,nofspecial)
+ else
+ return nil
end
-end
+end)
-- runners and specials
--- runners["inner"] = function(var,actions)
--- if getinnermethod() == "names" then
--- local vi = var.i
--- if vi then
--- local vir = vi.references
--- if vir then
--- local internal = vir.internal
--- if internal then
--- var.inner = "aut:" .. internal
--- end
--- end
--- end
--- else
--- var.inner = nil
--- end
--- local prefix = var.p
--- local inner = var.inner
--- if inner and prefix and prefix ~= "" then
--- inner = prefix .. ":" .. inner -- might not always be ok
--- end
--- return link(nil,nil,inner,var.r,actions)
--- end
-
runners["inner"] = function(var,actions)
local internal = false
- if getinnermethod() == "names" then
+ local inner = nil
+ if references.innermethod == v_auto then
local vi = var.i
if vi then
local vir = vi.references
if vir then
-- todo: no need for it when we have a real reference
+ local reference = vir.reference
+ if reference and reference ~= "" then
+ var.inner = reference
+ local prefix = var.p
+ if prefix and prefix ~= "" then
+ var.prefix = prefix
+ inner = prefix .. ":" .. reference
+ else
+ inner = reference
+ end
+ end
internal = vir.internal
if internal then
- var.inner = "aut:" .. internal
+ flaginternals[internal] = true
end
end
end
else
var.inner = nil
end
- local prefix = var.p
- local inner = var.inner
- if not internal and inner and prefix and prefix ~= "" then
- -- no prefix with e.g. components
- inner = prefix .. ":" .. inner
- end
- return link(nil,nil,inner,var.r,actions)
+ return pdflink(inner,internal,var.r)
end
runners["inner with arguments"] = function(var,actions)
@@ -391,12 +746,15 @@ end
runners["outer"] = function(var,actions)
local file, url = references.checkedfileorurl(var.outer,var.outer)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
runners["outer with inner"] = function(var,actions)
- local file = references.checkedfile(var.outer) -- was var.f but fails ... why
- return link(nil,file,var.inner,var.r,actions)
+ return pdffilelink(references.checkedfile(var.outer),var.inner,var.r,actions)
end
runners["special outer with operation"] = function(var,actions)
@@ -442,13 +800,10 @@ function specials.internal(var,actions) -- better resolve in strc-ref
local v = i and references.internals[i]
if not v then
-- error
- report_reference("no internal reference %a",i)
- elseif getinnermethod() == "names" then
- -- named
- return link(nil,nil,"aut:"..i,v.references.realpage,actions)
+ report_reference("no internal reference %a",i or "")
else
- -- page
- return link(nil,nil,nil,v.references.realpage,actions)
+ flaginternals[i] = true
+ return pdflink(nil,i,v.references.realpage)
end
end
@@ -461,8 +816,7 @@ local pages = references.pages
function specials.page(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -472,29 +826,24 @@ function specials.page(var,actions)
else
p = references.realpageofpage(tonumber(p))
end
- -- if p then
- -- var.r = p
- -- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
function specials.realpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
- return link(nil,nil,nil,var.operation,actions)
+ return pdflink(nil,nil,var.operation)
end
end
function specials.userpage(var,actions)
local file = var.f
if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
+ return pdffilelink(references.checkedfile(file),nil,var.operation,actions)
else
local p = var.r
if not p then -- todo: call special from reference code
@@ -506,7 +855,7 @@ function specials.userpage(var,actions)
-- var.r = p
-- end
end
- return link(nil,nil,nil,p or var.operation,actions)
+ return pdflink(nil,nil,p or var.operation)
end
end
@@ -514,7 +863,7 @@ function specials.deltapage(var,actions)
local p = tonumber(var.operation)
if p then
p = references.checkedrealpage(p + texgetcount("realpageno"))
- return link(nil,nil,nil,p,actions)
+ return pdflink(nil,nil,p)
end
end
@@ -554,27 +903,29 @@ function specials.order(var,actions) -- references.specials !
end
function specials.url(var,actions)
- local url = references.checkedurl(var.operation)
- return link(url,nil,var.arguments,nil,actions)
+ return pdfurllink(references.checkedurl(var.operation),var.arguments,nil,actions)
end
function specials.file(var,actions)
- local file = references.checkedfile(var.operation)
- return link(nil,file,var.arguments,nil,actions)
+ return pdffilelink(references.checkedfile(var.operation),var.arguments,nil,actions)
end
function specials.fileorurl(var,actions)
local file, url = references.checkedfileorurl(var.operation,var.operation)
- return link(url,file,var.arguments,nil,actions)
+ if file then
+ return pdffilelink(file,var.arguments,nil,actions)
+ elseif url then
+ return pdfurllink(url,var.arguments,nil,actions)
+ end
end
function specials.program(var,content)
local program = references.checkedprogram(var.operation)
- return lpdf.launch(program,var.arguments)
+ return pdflaunch(program,var.arguments)
end
function specials.javascript(var)
- return lpdf.javascript(var.operation,var.arguments)
+ return pdfjavascript(var.operation,var.arguments)
end
specials.JS = specials.javascript
@@ -698,61 +1049,109 @@ function specials.action(var)
end
end
---~ entry.A = pdfdictionary {
---~ S = pdf_goto,
---~ D = ....
---~ }
-
-local function build(levels,start,parent,method)
- local startlevel = levels[start][1]
+local function build(levels,start,parent,method,nested)
+ local startlevel = levels[start].level
local i, n = start, 0
local child, entry, m, prev, first, last, f, l
while i and i <= #levels do
- local li = levels[i]
- local level, title, reference, open = li[1], li[2], li[3], li[4]
- if level < startlevel then
- pdfflushobject(child,entry)
- return i, n, first, last
- elseif level == startlevel then
- if trace_bookmarks then
- report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title)
- end
- local prev = child
- child = pdfreserveobject()
- if entry then
- entry.Next = child and pdfreference(child)
- pdfflushobject(prev,entry)
- end
- entry = pdfdictionary {
- Title = pdfunicode(title),
- Parent = parent,
- Prev = prev and pdfreference(prev),
- }
- if method == "internal" then
- entry.Dest = "aut:" .. reference.internal
- else -- if method == "page" then
- entry.Dest = pagedestination(reference.realpage)
- end
- if not first then first, last = child, child end
- prev = child
- last = prev
- n = n + 1
+ local current = levels[i]
+ if current.usedpage == false then
+ -- safeguard
i = i + 1
- elseif i < #levels and level > startlevel then
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
- end
else
- -- missing intermediate level but ok
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
+ local level = current.level
+ local title = current.title
+ local reference = current.reference
+ local opened = current.opened
+ local reftype = type(reference)
+ local variant = "unknown"
+ if reftype == "table" then
+ -- we're okay
+ variant = "list"
+ elseif reftype == "string" then
+ local resolved = references.identify("",reference)
+ local realpage = resolved and structures.references.setreferencerealpage(resolved) or 0
+ if realpage > 0 then
+ variant = "realpage"
+ realpage = realpage
+ end
+ elseif reftype == "number" then
+ if reference > 0 then
+ variant = "realpage"
+ realpage = reference
+ end
+ else
+ -- error
+ end
+ if variant == "unknown" then
+ -- error, ignore
+ i = i + 1
+ elseif level <= startlevel then
+ if level < startlevel then
+ if nested then -- could be an option but otherwise we quit too soon
+ if entry then
+ pdfflushobject(child,entry)
+ else
+ report_bookmark("error 1")
+ end
+ return i, n, first, last
+ else
+ report_bookmark("confusing level change at level %a around %a",level,title)
+ end
+ end
+ if trace_bookmarks then
+ report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(opened and "+") or "-",title)
+ end
+ local prev = child
+ child = pdfreserveobject()
+ if entry then
+ entry.Next = child and pdfreference(child)
+ pdfflushobject(prev,entry)
+ end
+ local action = nil
+ if variant == "list" then
+ action = somedestination(reference.internal,reference.internal,reference.realpage)
+ elseif variant == "realpage" then
+ action = pagereferences[realpage]
+ end
+ entry = pdfdictionary {
+ Title = pdfunicode(title),
+ Parent = parent,
+ Prev = prev and pdfreference(prev),
+ A = action,
+ }
+ -- entry.Dest = somedestination(reference.internal,reference.internal,reference.realpage)
+ if not first then first, last = child, child end
+ prev = child
+ last = prev
+ n = n + 1
+ i = i + 1
+ elseif i < #levels and level > startlevel then
+ i, m, f, l = build(levels,i,pdfreference(child),method,true)
+ if entry then
+ entry.Count = (opened and m) or -m
+ if m > 0 then
+ entry.First = pdfreference(f)
+ entry.Last = pdfreference(l)
+ end
+ else
+ report_bookmark("error 2")
+ end
+ else
+ -- missing intermediate level but ok
+ i, m, f, l = build(levels,i,pdfreference(child),method,true)
+ if entry then
+ entry.Count = (opened and m) or -m
+ if m > 0 then
+ entry.First = pdfreference(f)
+ entry.Last = pdfreference(l)
+ end
+ pdfflushobject(child,entry)
+ else
+ report_bookmark("error 3")
+ end
+ return i, n, first, last
end
- pdfflushobject(child,entry)
- return i, n, first, last
end
end
pdfflushobject(child,entry)
@@ -760,10 +1159,9 @@ local function build(levels,start,parent,method)
end
function codeinjections.addbookmarks(levels,method)
- if #levels > 0 then
- structures.bookmarks.flatten(levels) -- dirty trick for lack of structure
+ if levels and #levels > 0 then
local parent = pdfreserveobject()
- local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal")
+ local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal",false)
local dict = pdfdictionary {
Type = pdfconstant("Outlines"),
First = pdfreference(first),
@@ -771,10 +1169,10 @@ function codeinjections.addbookmarks(levels,method)
Count = m,
}
pdfflushobject(parent,dict)
- lpdf.addtocatalog("Outlines",lpdf.reference(parent))
+ pdfaddtocatalog("Outlines",lpdf.reference(parent))
end
end
-- this could also be hooked into the frontend finalizer
-lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks")
+lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks") -- hm, why indirect call
diff --git a/tex/context/base/lpdf-col.lua b/tex/context/base/lpdf-col.lua
index b358d0820..877c01a1c 100644
--- a/tex/context/base/lpdf-col.lua
+++ b/tex/context/base/lpdf-col.lua
@@ -14,42 +14,50 @@ local formatters = string.formatters
local backends, lpdf, nodes = backends, lpdf, nodes
-local allocate = utilities.storage.allocate
-local formatters = string.formatters
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdfconstant = lpdf.constant
-local pdfstring = lpdf.string
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfverbose = lpdf.verbose
-local pdfflushobject = lpdf.flushobject
-local pdfflushstreamobject = lpdf.flushstreamobject
-
-local colors = attributes.colors
-local transparencies = attributes.transparencies
-local registertransparancy = transparencies.register
-local registercolor = colors.register
-local colorsvalue = colors.value
-local transparenciesvalue = transparencies.value
-local forcedmodel = colors.forcedmodel
-
-local c_transparency = pdfconstant("Transparency")
-
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdfconstant = lpdf.constant
+local pdfstring = lpdf.string
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfverbose = lpdf.verbose
+local pdfflushobject = lpdf.flushobject
+local pdfdelayedobject = lpdf.delayedobject
+local pdfflushstreamobject = lpdf.flushstreamobject
+
+local pdfshareobjectreference = lpdf.shareobjectreference
+
+local addtopageattributes = lpdf.addtopageattributes
+local adddocumentcolorspace = lpdf.adddocumentcolorspace
+local adddocumentextgstate = lpdf.adddocumentextgstate
+
+local colors = attributes.colors
+local transparencies = attributes.transparencies
+local registertransparancy = transparencies.register
+local registercolor = colors.register
+local colorsvalue = colors.value
+local transparenciesvalue = transparencies.value
+local forcedmodel = colors.forcedmodel
+local getpagecolormodel = colors.getpagecolormodel
+
+local c_transparency = pdfconstant("Transparency")
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
local f_tr = formatters["Tr%s"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_effect = formatters["%s Tc %s w %s Tr"]
local f_tr_gs = formatters["/Tr%s gs"]
local f_num_1 = tostring
@@ -76,11 +84,13 @@ lpdf.transparencygroups = transparencygroups
table.setmetatableindex(transparencygroups, function(transparencygroups,colormodel)
local cs = colorspaceconstants[colormodel]
if cs then
- local g = pdfreference(pdfflushobject(pdfdictionary {
+ local d = pdfdictionary {
S = c_transparency,
CS = cs,
I = true,
- }))
+ }
+ -- local g = pdfreference(pdfflushobject(tostring(d)))
+ local g = pdfreference(pdfdelayedobject(tostring(d)))
transparencygroups[colormodel] = g
return g
else
@@ -89,26 +99,18 @@ table.setmetatableindex(transparencygroups, function(transparencygroups,colormod
end
end)
-local currentgroupcolormodel
-
local function addpagegroup()
- if currentgroupcolormodel then
- local g = transparencygroups[currentgroupcolormodel]
+ local model = getpagecolormodel()
+ if model then
+ local g = transparencygroups[model]
if g then
- lpdf.addtopageattributes("Group",g)
+ addtopageattributes("Group",g)
end
end
end
lpdf.registerpagefinalizer(addpagegroup,3,"pagegroup")
-local function synchronizecolormodel(model)
- currentgroupcolormodel = model
-end
-
-backends.codeinjections.synchronizecolormodel = synchronizecolormodel
-commands.synchronizecolormodel = synchronizecolormodel
-
-- injection code (needs a bit reordering)
-- color injection
@@ -175,7 +177,7 @@ local f_gray_function = formatters["%s mul"]
local documentcolorspaces = pdfdictionary()
-local spotcolorhash = { } -- not needed
+local spotcolorhash = { } -- not needed
local spotcolornames = { }
local indexcolorhash = { }
local delayedindexcolors = { }
@@ -193,7 +195,7 @@ end
-- This should become delayed i.e. only flush when used; in that case we need
-- need to store the specification and then flush them when accesssomespotcolor
-- is called. At this moment we assume that splotcolors that get defined are
--- also used which keeps the overhad small anyway.
+-- also used which keeps the overhad small anyway. Tricky for mp ...
local processcolors
@@ -224,13 +226,14 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
else
local cnames = pdfarray()
local domain = pdfarray()
local colorants = pdfdictionary()
for n in gmatch(names,"[^,]+") do
local name = spotcolornames[n] or n
+ -- the cmyk names assume that they are indeed these colors
if n == "cyan" then
name = "Cyan"
elseif n == "magenta" then
@@ -241,9 +244,15 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
name = "Black"
else
local sn = spotcolorhash[name] or spotcolorhash[n]
+ if not sn then
+ report_color("defining %a as colorant",name)
+ colors.definespotcolor("",name,"p=1",true)
+ sn = spotcolorhash[name] or spotcolorhash[n]
+ end
if sn then
colorants[name] = pdfreference(sn)
else
+ -- maybe some day generate colorants (spot colors for multi) automatically
report_color("unknown colorant %a, using black instead",name or n)
name = "Black"
end
@@ -280,13 +289,13 @@ local function registersomespotcolor(name,noffractions,names,p,colorspace,range,
cnames,
colorspace,
pdfreference(calculation),
- lpdf.shareobjectreference(tostring(channels)), -- optional but needed for shades
+ pdfshareobjectreference(tostring(channels)), -- optional but needed for shades
}
local m = pdfflushobject(array)
local mr = pdfreference(m)
spotcolorhash[name] = m
documentcolorspaces[name] = mr
- lpdf.adddocumentcolorspace(name,mr)
+ adddocumentcolorspace(name,mr)
end
end
@@ -336,7 +345,7 @@ local function registersomeindexcolor(name,noffractions,names,p,colorspace,range
end
vector = pdfverbose { "<", concat(vector, " "), ">" }
local n = pdfflushobject(pdfarray{ pdf_indexed, a, 255, vector })
- lpdf.adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
+ adddocumentcolorspace(format("%s_indexed",name),pdfreference(n))
return n
end
@@ -455,7 +464,7 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[0] = m
documenttransparencies[0] = mr
- lpdf.adddocumentextgstate("Tr0",mr)
+ adddocumentextgstate("Tr0",mr)
done = true
end
if n > 0 and not transparencyhash[n] then
@@ -470,13 +479,16 @@ function registrations.transparency(n,a,t)
local mr = pdfreference(m)
transparencyhash[n] = m
documenttransparencies[n] = mr
- lpdf.adddocumentextgstate(f_tr(n),mr)
+ adddocumentextgstate(f_tr(n),mr)
end
end
statistics.register("page group warning", function()
- if done and not transparencygroups[currentgroupcolormodel] then
- return "transparencies are used but no pagecolormodel is set"
+ if done then
+ local model = getpagecolormodel()
+ if model and not transparencygroups[model] then
+ return "transparencies are used but no pagecolormodel is set"
+ end
end
end)
@@ -520,6 +532,12 @@ end
lpdf.color = lpdfcolor
+interfaces.implement {
+ name = "lpdf_color",
+ actions = { lpdfcolor, context },
+ arguments = "integer"
+}
+
function lpdf.colorspec(model,ca,default)
if ca and ca > 0 then
local cv = colors.value(ca)
@@ -544,7 +562,7 @@ function lpdf.colorspec(model,ca,default)
end
function lpdf.pdfcolor(attribute) -- bonus, for pgf and friends
- context(lpdfcolor(1,attribute))
+ return lpdfcolor(1,attribute)
end
function lpdf.transparency(ct,default) -- kind of overlaps with transparencycode
@@ -689,7 +707,7 @@ end
-- this will move to lpdf-spe.lua
-local f_slant = formatters["pdf: q 1 0 %f 1 0 0 cm"]
+local f_slant = formatters["pdf: q 1 0 %F 1 0 0 cm"]
backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua
index 61d57b8d3..dd5ecc609 100644
--- a/tex/context/base/lpdf-epa.lua
+++ b/tex/context/base/lpdf-epa.lua
@@ -10,30 +10,43 @@ if not modules then modules = { } end modules ['lpdf-epa'] = {
-- change.
local type, tonumber = type, tonumber
-local format, gsub = string.format, string.gsub
+local format, gsub, lower = string.format, string.gsub, string.lower
local formatters = string.formatters
----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
-local report_link = logs.reporter("backend","merging")
+local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
+local trace_outlines = false trackers.register("figures.outliness", function(v) trace_outlines = v end)
+local report_link = logs.reporter("backend","link")
+local report_outline = logs.reporter("backend","outline")
+
+local epdf = epdf
local backends = backends
local lpdf = lpdf
local context = context
+local loadpdffile = lpdf.epdf.load
+
+local nameonly = file.nameonly
+
local variables = interfaces.variables
local codeinjections = backends.pdf.codeinjections
----- urlescaper = lpegpatterns.urlescaper
----- utftohigh = lpegpatterns.utftohigh
local escapetex = characters.filters.utf.private.escape
+local bookmarks = structures.bookmarks
+
local layerspec = { -- predefining saves time
"epdflinks"
}
+local f_namespace = formatters["lpdf-epa-%s-"]
+
local function makenamespace(filename)
- return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename)))
+ filename = gsub(lower(nameonly(filename)),"[^%a%d]+","-")
+ return f_namespace(filename)
end
local function add_link(x,y,w,h,destination,what)
@@ -71,7 +84,7 @@ local function link_goto(x,y,w,h,document,annotation,pagedata,namespace)
if type(destination) == "string" then
local destinations = document.destinations
local wanted = destinations[destination]
- destination = wanted and wanted.D
+ destination = wanted and wanted.D -- is this ok? isn't it destination already a string?
if destination then what = "named" end
end
local pagedata = destination and destination[1]
@@ -94,10 +107,17 @@ local function link_uri(x,y,w,h,document,annotation)
end
end
+-- The rules in PDF on what a 'file specification' is, is in fact quite elaborate
+-- (see section 3.10 in the 1.7 reference) so we need to test for string as well
+-- as a table. TH/20140916
+
local function link_file(x,y,w,h,document,annotation)
local a = annotation.A
if a then
local filename = a.F
+ if type(filename) == "table" then
+ filename = filename.F
+ end
if filename then
filename = escapetex(filename)
local destination = a.D
@@ -124,7 +144,7 @@ function codeinjections.mergereferences(specification)
end
if specification then
local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
+ local document = loadpdffile(fullname) -- costs time
if document then
local pagenumber = specification.page or 1
local xscale = specification.yscale or 1
@@ -132,22 +152,31 @@ function codeinjections.mergereferences(specification)
local size = specification.size or "crop" -- todo
local pagedata = document.pages[pagenumber]
local annotations = pagedata and pagedata.Annots
+ local namespace = makenamespace(fullname)
+ local reference = namespace .. pagenumber
if annotations and annotations.n > 0 then
- local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
- local reference = namespace .. pagenumber
- local mediabox = pagedata.MediaBox
- local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4]
- local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
+ local mediabox = pagedata.MediaBox
+ local llx = mediabox[1]
+ local lly = mediabox[2]
+ local urx = mediabox[3]
+ local ury = mediabox[4]
+ local width = xscale * (urx - llx) -- \\overlaywidth, \\overlayheight
+ local height = yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
for i=1,annotations.n do
local annotation = annotations[i]
if annotation then
- local subtype = annotation.Subtype
+ local subtype = annotation.Subtype
local rectangle = annotation.Rect
- local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
- local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
- local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
- if subtype == "Link" then
+ local a_llx = rectangle[1]
+ local a_lly = rectangle[2]
+ local a_urx = rectangle[3]
+ local a_ury = rectangle[4]
+ local x = xscale * (a_llx - llx)
+ local y = yscale * (a_lly - lly)
+ local w = xscale * (a_urx - a_llx)
+ local h = yscale * (a_ury - a_lly)
+ if subtype == "Link" then
local a = annotation.A
if a then
local linktype = a.S
@@ -161,7 +190,7 @@ function codeinjections.mergereferences(specification)
report_link("unsupported link annotation %a",linktype)
end
else
- report_link("mising link annotation")
+ report_link("missing link annotation")
end
elseif trace_links then
report_link("unsupported annotation %a",subtype)
@@ -171,21 +200,21 @@ function codeinjections.mergereferences(specification)
end
end
context.flushlayer { "epdflinks" }
- -- context("\\gdef\\figurereference{%s}",reference) -- global
- context.setgvalue("figurereference",reference) -- global
- if trace_links then
- report_link("setting figure reference to %a",reference)
- end
- specification.reference = reference
- return namespace
end
+ -- moved outside previous test
+ context.setgvalue("figurereference",reference) -- global
+ if trace_links then
+ report_link("setting figure reference to %a",reference)
+ end
+ specification.reference = reference
+ return namespace
end
end
return ""-- no namespace, empty, not nil
end
function codeinjections.mergeviewerlayers(specification)
- -- todo: parse included page for layers
+ -- todo: parse included page for layers .. or only for whole document inclusion
if true then
return
end
@@ -195,9 +224,9 @@ function codeinjections.mergeviewerlayers(specification)
end
if specification then
local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
+ local document = loadpdffile(fullname)
if document then
- local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname)))
+ local namespace = makenamespace(fullname)
local layers = document.layers
if layers then
for i=1,layers.n do
@@ -225,3 +254,160 @@ function codeinjections.mergeviewerlayers(specification)
end
end
+-- new: for taco
+
+-- Beware, bookmarks can be in pdfdoc encoding or in unicode. However, in mkiv we
+-- write out the strings in unicode (hex). When we read them in, we check for a bom
+-- and convert to utf.
+
+function codeinjections.getbookmarks(filename)
+
+ -- The first version built a nested tree and flattened that afterwards ... but I decided
+ -- to keep it simple and flat.
+
+ local list = bookmarks.extras.get(filename)
+
+ if list then
+ return list
+ else
+ list = { }
+ end
+
+ local document = nil
+
+ if lfs.isfile(filename) then
+ document = loadpdffile(filename)
+ else
+ report_outline("unknown file %a",filename)
+ bookmarks.extras.register(filename,list)
+ return list
+ end
+
+ local outlines = document.Catalog.Outlines
+ local pages = document.pages
+ local nofpages = pages.n -- we need to access once in order to initialize
+ local destinations = document.destinations
+
+ -- I need to check this destination analyzer with the one in annotations .. best share
+ -- code (and not it's inconsistent). On the todo list ...
+
+ local function setdestination(current,entry)
+ local destination = nil
+ local action = current.A
+ if action then
+ local subtype = action.S
+ if subtype == "GoTo" then
+ destination = action.D
+ if type(destination) == "string" then
+ entry.destination = destination
+ destination = destinations[destination]
+ local pagedata = destination and destination[1]
+ if pagedata then
+ entry.realpage = pagedata.number
+ end
+ else
+ -- maybe
+ end
+ else
+ -- maybe
+ end
+ else
+ local destination = current.Dest
+ if destination then
+ if type(destination) == "string" then
+ local wanted = destinations[destination]
+ destination = wanted and wanted.D
+ if destination then
+ entry.destination = destination
+ end
+ else
+ local pagedata = destination and destination[1]
+ if pagedata and pagedata.Type == "Page" then
+ entry.realpage = pagedata.number
+ end
+ end
+ end
+ end
+ end
+
+ local function traverse(current,depth)
+ while current do
+ -- local title = current.Title
+ local title = current("Title") -- can be pdfdoc or unicode
+ if title then
+ local entry = {
+ level = depth,
+ title = title,
+ }
+ list[#list+1] = entry
+ setdestination(current,entry)
+ if trace_outlines then
+ report_outline("%w%s",2*depth,title)
+ end
+ end
+ local first = current.First
+ if first then
+ local current = first
+ while current do
+ local title = current.Title
+ if title and trace_outlines then
+ report_outline("%w%s",2*depth,title)
+ end
+ local entry = {
+ level = depth,
+ title = title,
+ }
+ setdestination(current,entry)
+ list[#list+1] = entry
+ traverse(current.First,depth+1)
+ current = current.Next
+ end
+ end
+ current = current.Next
+ end
+ end
+
+ if outlines then
+ if trace_outlines then
+ report_outline("outline of %a:",document.filename)
+ report_outline()
+ end
+ traverse(outlines,0)
+ if trace_outlines then
+ report_outline()
+ end
+ elseif trace_outlines then
+ report_outline("no outline in %a",document.filename)
+ end
+
+ bookmarks.extras.register(filename,list)
+
+ return list
+
+end
+
+function codeinjections.mergebookmarks(specification)
+ -- codeinjections.getbookmarks(document)
+ if not specification then
+ specification = figures and figures.current()
+ specification = specification and specification.status
+ end
+ if specification then
+ local fullname = specification.fullname
+ local bookmarks = backends.codeinjections.getbookmarks(fullname)
+ local realpage = tonumber(specification.page) or 1
+ for i=1,#bookmarks do
+ local b = bookmarks[i]
+ if not b.usedpage then
+ if b.realpage == realpage then
+ if trace_options then
+ report_outline("using %a at page %a of file %a",b.title,realpage,fullname)
+ end
+ b.usedpage = true
+ b.section = structures.sections.currentsectionindex()
+ b.pageindex = specification.pageindex
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua
index a7399f6b4..1dc20bc26 100644
--- a/tex/context/base/lpdf-epd.lua
+++ b/tex/context/base/lpdf-epd.lua
@@ -6,124 +6,287 @@ if not modules then modules = { } end modules ['lpdf-epd'] = {
license = "see context related readme files"
}
--- This is an experimental layer around the epdf library. The reason for
--- this layer is that I want to be independent of the library (which
--- implements a selection of what a file provides) and also because I
--- want an interface closer to Lua's table model while the API stays
--- close to the original xpdf library. Of course, after prototyping a
--- solution, we can optimize it using the low level epdf accessors.
-
--- It will be handy when we have a __length and __next that can trigger
--- the resolve till then we will provide .n as #.
-
--- As there can be references to the parent we cannot expand a tree. I
--- played with some expansion variants but it does to pay off.
-
--- Maybe we need a close().
--- We cannot access all destinations in one run.
-
-local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber
-local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub
+-- This is an experimental layer around the epdf library. The reason for this layer is that
+-- I want to be independent of the library (which implements a selection of what a file
+-- provides) and also because I want an interface closer to Lua's table model while the API
+-- stays close to the original xpdf library. Of course, after prototyping a solution, we can
+-- optimize it using the low level epdf accessors. However, not all are accessible (this will
+-- be fixed).
+--
+-- It will be handy when we have a __length and __next that can trigger the resolve till then
+-- we will provide .n as #; maybe in Lua 5.3 or later.
+--
+-- As there can be references to the parent we cannot expand a tree. I played with some
+-- expansion variants but it does not pay off; adding extra checks is nto worth the trouble.
+--
+-- The document stays over. In order to free memory one has to explicitly onload the loaded
+-- document.
+--
+-- We have much more checking then needed in the prepare functions because occasionally
+-- we run into bugs in poppler or the epdf interface. It took us a while to realize that
+-- there was a long standing gc issue the on long runs with including many pages could
+-- crash the analyzer.
+--
+-- Normally a value is fetched by key, as in foo.Title but as it can be in pdfdoc encoding
+-- a safer bet is foo("Title") which will return a decoded string (or the original if it
+-- already was unicode).
+
+local setmetatable, rawset, rawget, type = setmetatable, rawset, rawget, type
+local tostring, tonumber = tostring, tonumber
+local lower, match, char, byte, find = string.lower, string.match, string.char, string.byte, string.find
+local abs = math.abs
local concat = table.concat
-local toutf = string.toutf
+local toutf, toeight, utfchar = string.toutf, utf.toeight, utf.char
+
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, C, S, R, Ct, Cc, V, Carg, Cs, Cf, Cg = lpeg.P, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cc, lpeg.V, lpeg.Carg, lpeg.Cs, lpeg.Cf, lpeg.Cg
+
+local epdf = epdf
+ lpdf = lpdf or { }
+local lpdf = lpdf
+local lpdf_epdf = { }
+lpdf.epdf = lpdf_epdf
+
+local pdf_open = epdf.open
+
+local report_epdf = logs.reporter("epdf")
+
+local getDict, getArray, getReal, getNum, getString, getBool, getName, getRef, getRefNum
+local getType, getTypeName
+local dictGetLength, dictGetVal, dictGetValNF, dictGetKey
+local arrayGetLength, arrayGetNF, arrayGet
+local streamReset, streamGetDict, streamGetChar
+
+do
+ local object = epdf.Object()
+ --
+ getDict = object.getDict
+ getArray = object.getArray
+ getReal = object.getReal
+ getNum = object.getNum
+ getString = object.getString
+ getBool = object.getBool
+ getName = object.getName
+ getRef = object.getRef
+ getRefNum = object.getRefNum
+ --
+ getType = object.getType
+ getTypeName = object.getTypeName
+ --
+ streamReset = object.streamReset
+ streamGetDict = object.streamGetDict
+ streamGetChar = object.streamGetChar
+ --
+end
-local report_epdf = logs.reporter("epdf")
+local function initialize_methods(xref)
+ local dictionary = epdf.Dict(xref)
+ local array = epdf.Array(xref)
+ --
+ dictGetLength = dictionary.getLength
+ dictGetVal = dictionary.getVal
+ dictGetValNF = dictionary.getValNF
+ dictGetKey = dictionary.getKey
+ --
+ arrayGetLength = array.getLength
+ arrayGetNF = array.getNF
+ arrayGet = array.get
+ --
+ initialize_methods = function()
+ -- already done
+ end
+end
--- a bit of protection
+local typenames = { [0] =
+ "boolean",
+ "integer",
+ "real",
+ "string",
+ "name",
+ "null",
+ "array",
+ "dictionary",
+ "stream",
+ "ref",
+ "cmd",
+ "error",
+ "eof",
+ "none",
+ "integer64",
+}
-local limited = false
+local typenumbers = table.swapped(typenames)
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- epdf.open = i_limiter.protect(epdf.open)
- limited = true
- end
- end
-end)
+local null_code = typenumbers.null
+local ref_code = typenumbers.ref
---
+local function fatal_error(...)
+ report_epdf(...)
+ report_epdf("aborting job in order to avoid crash")
+ os.exit()
+end
+
+-- epdf is the built-in library
function epdf.type(o)
local t = lower(match(tostring(o),"[^ :]+"))
return t or "?"
end
-lpdf = lpdf or { }
-local lpdf = lpdf
+local checked_access
+
+-- dictionaries (can be optimized: ... resolve and redefine when all locals set)
-lpdf.epdf = { }
+local frompdfdoc = lpdf.frompdfdoc
-local checked_access
+local function get_flagged(t,f,k)
+ local fk = f[k]
+ if not fk then
+ return t[k]
+ elseif fk == "rawtext" then
+ return frompdfdoc(t[k])
+ else -- no other flags yet
+ return t[k]
+ end
+end
-local function prepare(document,d,t,n,k,mt)
+local function prepare(document,d,t,n,k,mt,flags)
for i=1,n do
- local v = d:getVal(i)
- local r = d:getValNF(i)
- if r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
+ local v = dictGetVal(d,i)
+ if v then
+ local r = dictGetValNF(d,i)
+ local kind = getType(v)
+ if kind == null_code then
+ -- ignore
else
- c = checked_access[v:getTypeName()](v,document,r)
- if c then
- document.cache[r] = c
- document.xrefs[c] = r
+ local key = dictGetKey(d,i)
+ if kind then
+ if r and getType(r) == ref_code then
+ local objnum = getRefNum(r)
+ local cached = document.__cache__[objnum]
+ if not cached then
+ cached = checked_access[kind](v,document,objnum,mt)
+ if c then
+ document.__cache__[objnum] = cached
+ document.__xrefs__[cached] = objnum
+ end
+ end
+ t[key] = cached
+ else
+ local v, flag = checked_access[kind](v,document)
+ t[key] = v
+ if flag and flags then
+ flags[key] = flag -- flags
+ end
+ end
+ else
+ report_epdf("warning: nil value for key %a in dictionary",key)
end
end
- t[d:getKey(i)] = c
else
- t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
+ fatal_error("error: invalid value at index %a in dictionary of %a",i,document.filename)
end
end
- getmetatable(t).__index = nil -- ?? weird
-setmetatable(t,mt)
+ if mt then
+ setmetatable(t,mt)
+ else
+ getmetatable(t).__index = nil
+ end
return t[k]
end
-local function some_dictionary(d,document,r,mt)
- local n = d and d:getLength() or 0
+local function some_dictionary(d,document)
+ local n = d and dictGetLength(d) or 0
+ if n > 0 then
+ local t = { }
+ local f = { }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,d,t,n,k,_,_,f)
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ } )
+ return t
+ end
+end
+
+local function get_dictionary(object,document,r,mt)
+ local d = getDict(object)
+ local n = d and dictGetLength(d) or 0
if n > 0 then
local t = { }
- setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k,mt) end } )
+ local f = { }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,d,t,n,k,mt,f)
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ } )
return t
end
end
-local done = { }
+-- arrays (can be optimized: ... resolve and redefine when all locals set)
local function prepare(document,a,t,n,k)
for i=1,n do
- local v = a:get(i)
- local r = a:getNF(i)
- if v:getTypeName() == "null" then
- -- TH: weird, but appears possible
- elseif r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
+ local v = arrayGet(a,i)
+ if v then
+ local kind = getType(v)
+ if kind == null_code then
+ -- ignore
+ elseif kind then
+ local r = arrayGetNF(a,i)
+ if r and getType(r) == ref_code then
+ local objnum = getRefNum(r)
+ local cached = document.__cache__[objnum]
+ if not cached then
+ cached = checked_access[kind](v,document,objnum)
+ document.__cache__[objnum] = cached
+ document.__xrefs__[cached] = objnum
+ end
+ t[i] = cached
+ else
+ t[i] = checked_access[kind](v,document)
+ end
else
- c = checked_access[v:getTypeName()](v,document,r)
- document.cache[r] = c
- document.xrefs[c] = r
+ report_epdf("warning: nil value for index %a in array",i)
end
- t[i] = c
else
- t[i] = checked_access[v:getTypeName()](v,document)
+ fatal_error("error: invalid value at index %a in array of %a",i,document.filename)
end
end
getmetatable(t).__index = nil
return t[k]
end
-local function some_array(a,document,r)
- local n = a and a:getLength() or 0
+local function some_array(a,document)
+ local n = a and arrayGetLength(a) or 0
if n > 0 then
local t = { n = n }
- setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } )
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,a,t,n,k)
+ end
+ } )
+ return t
+ end
+end
+
+local function get_array(object,document)
+ local a = getArray(object)
+ local n = a and arrayGetLength(a) or 0
+ if n > 0 then
+ local t = { n = n }
+ setmetatable(t, {
+ __index = function(t,k)
+ return prepare(document,a,t,n,k)
+ end
+ } )
return t
end
end
@@ -131,9 +294,9 @@ end
local function streamaccess(s,_,what)
if not what or what == "all" or what == "*all" then
local t, n = { }, 0
- s:streamReset()
+ streamReset(s)
while true do
- local c = s:streamGetChar()
+ local c = streamGetChar(s)
if c < 0 then
break
else
@@ -145,56 +308,96 @@ local function streamaccess(s,_,what)
end
end
-local function some_stream(d,document,r)
+local function get_stream(d,document)
if d then
- d:streamReset()
- local s = some_dictionary(d:streamGetDict(),document,r)
+ streamReset(d)
+ local s = some_dictionary(streamGetDict(d),document)
getmetatable(s).__call = function(...) return streamaccess(d,...) end
return s
end
end
--- we need epdf.boolean(v) in addition to v:getBool() [dictionary, array, stream, real, integer, string, boolean, name, ref, null]
-
-checked_access = {
- dictionary = function(d,document,r)
- return some_dictionary(d:getDict(),document,r)
- end,
- array = function(a,document,r)
- return some_array(a:getArray(),document,r)
- end,
- stream = function(v,document,r)
- return some_stream(v,document,r)
- end,
- real = function(v)
- return v:getReal()
- end,
- integer = function(v)
- return v:getNum()
- end,
- string = function(v)
- return toutf(v:getString())
- end,
- boolean = function(v)
- return v:getBool()
- end,
- name = function(v)
- return v:getName()
- end,
- ref = function(v)
- return v:getRef()
- end,
- null = function()
- return nil
- end,
-}
+-- We need to convert the string from utf16 although there is no way to
+-- check if we have a regular string starting with a bom. So, we have
+-- na dilemma here: a pdf doc encoded string can be invalid utf.
--- checked_access.real = epdf.real
--- checked_access.integer = epdf.integer
--- checked_access.string = epdf.string
--- checked_access.boolean = epdf.boolean
--- checked_access.name = epdf.name
--- checked_access.ref = epdf.ref
+-- : implicit 0 appended if odd
+-- (byte encoded) : \( \) \\ escaped
+--
+-- : utf16be
+--
+-- \r \r \t \b \f \( \) \\ \NNN and \ : append next line
+--
+-- the getString function gives back bytes so we don't need to worry about
+-- the hex aspect.
+
+local u_pattern = lpeg.patterns.utfbom_16_be * lpeg.patterns.utf16_to_utf8_be
+----- b_pattern = lpeg.patterns.hextobytes
+
+local function get_string(v)
+ -- the toutf function only converts a utf16 string and leves the original
+ -- untouched otherwise; one might want to apply lpdf.frompdfdoc to a
+ -- non-unicode string
+ local s = getString(v)
+ if not s or s == "" then
+ return ""
+ end
+ local u = lpegmatch(u_pattern,s)
+ if u then
+ return u -- , "unicode"
+ end
+ -- this is too tricky and fails on e.g. reload of url www.pragma-ade.com)
+ -- local b = lpegmatch(b_pattern,s)
+ -- if b then
+ -- return b, "rawtext"
+ -- end
+ return s, "rawtext"
+end
+
+local function get_null()
+ return nil
+end
+
+-- we have dual access: by typenumber and by typename
+
+local function invalidaccess(k,document)
+ local fullname = type(document) == "table" and document.fullname
+ if fullname then
+ fatal_error("error, asking for key %a in checker of %a",k,fullname)
+ else
+ fatal_error("error, asking for key %a in checker",k)
+ end
+end
+
+checked_access = table.setmetatableindex(function(t,k)
+ return function(v,document)
+ invalidaccess(k,document)
+ end
+end)
+
+checked_access[typenumbers.boolean] = getBool
+checked_access[typenumbers.integer] = getNum
+checked_access[typenumbers.real] = getReal
+checked_access[typenumbers.string] = get_string -- getString
+checked_access[typenumbers.name] = getName
+checked_access[typenumbers.null] = get_null
+checked_access[typenumbers.array] = get_array -- d,document,r
+checked_access[typenumbers.dictionary] = get_dictionary -- d,document,r
+checked_access[typenumbers.stream] = get_stream
+checked_access[typenumbers.ref] = getRef
+
+for i=0,#typenames do
+ local checker = checked_access[i]
+ if not checker then
+ checker = function()
+ return function(v,document)
+ invalidaccess(i,document)
+ end
+ end
+ checked_access[i] = checker
+ end
+ checked_access[typenames[i]] = checker
+end
local function getnames(document,n,target) -- direct
if n then
@@ -252,7 +455,6 @@ local function getlayers(document)
local n = layers.n
for i=1,n do
local layer = layers[i]
---~ print(document.xrefs[layer])
t[i] = layer.Name
end
t.n = n
@@ -261,52 +463,39 @@ local function getlayers(document)
end
end
+local function getstructure(document)
+ -- this might become a tree
+ return document.Catalog.StructTreeRoot
+end
local function getpages(document,Catalog)
- local data = document.data
- local xrefs = document.xrefs
- local cache = document.cache
- local cata = data:getCatalog()
- local xref = data:getXRef()
- local pages = { }
- local nofpages = cata:getNumPages()
--- local function getpagestuff(pagenumber,k)
--- if k == "MediaBox" then
--- local pageobj = cata:getPage(pagenumber)
--- local pagebox = pageobj:getMediaBox()
--- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
--- elseif k == "CropBox" then
--- local pageobj = cata:getPage(pagenumber)
--- local pagebox = pageobj:getMediaBox()
--- return { pagebox.x1, pagebox.y1, pagebox.x2, pagebox.y2 }
--- elseif k == "Resources" then
--- print("todo page resources from parent")
--- -- local pageobj = cata:getPage(pagenumber)
--- -- local resources = pageobj:getResources()
--- end
--- end
--- for pagenumber=1,nofpages do
--- local mt = { __index = function(t,k)
--- local v = getpagestuff(pagenumber,k)
--- if v then
--- t[k] = v
--- end
--- return v
--- end }
- local mt = { __index = Catalog.Pages }
+ local __data__ = document.__data__
+ local __xrefs__ = document.__xrefs__
+ local __cache__ = document.__cache__
+ local __xref__ = document.__xref__
+ --
+ local catalog = __data__:getCatalog()
+ local pages = { }
+ local nofpages = catalog:getNumPages()
+ local metatable = { __index = Catalog.Pages }
+ --
for pagenumber=1,nofpages do
- local pagereference = cata:getPageRef(pagenumber).num
- local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference,mt)
+ local pagereference = catalog:getPageRef(pagenumber).num
+ local pageobject = __xref__:fetch(pagereference,0)
+ local pagedata = get_dictionary(pageobject,document,pagereference,metatable)
if pagedata then
- pagedata.number = pagenumber
- pages[pagenumber] = pagedata
- xrefs[pagedata] = pagereference
- cache[pagereference] = pagedata
+ -- rawset(pagedata,"number",pagenumber)
+ pagedata.number = pagenumber
+ pages[pagenumber] = pagedata
+ __xrefs__[pagedata] = pagereference
+ __cache__[pagereference] = pagedata
else
report_epdf("missing pagedata at slot %i",i)
end
end
+ --
pages.n = nofpages
+ --
return pages
end
@@ -329,23 +518,29 @@ end
local loaded = { }
-function lpdf.epdf.load(filename)
+function lpdf_epdf.load(filename)
local document = loaded[filename]
if not document then
- statistics.starttiming(lpdf.epdf)
- local data = epdf.open(filename) -- maybe resolvers.find_file
- if data then
+ statistics.starttiming(lpdf_epdf)
+ local __data__ = pdf_open(filename) -- maybe resolvers.find_file
+ if __data__ then
+ local __xref__ = __data__:getXRef()
document = {
- filename = filename,
- cache = { },
- xrefs = { },
- data = data,
+ filename = filename,
+ __cache__ = { },
+ __xrefs__ = { },
+ __fonts__ = { },
+ __data__ = __data__,
+ __xref__ = __xref__,
}
- local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document)
- local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document)
- document.Catalog = Catalog
- document.Info = Info
- -- document.catalog = Catalog
+ --
+ initialize_methods(__xref__)
+ --
+ local Catalog = some_dictionary(__xref__:getCatalog():getDict(),document)
+ local Info = some_dictionary(__xref__:getDocInfo():getDict(),document)
+ --
+ document.Catalog = Catalog
+ document.Info = Info
-- a few handy helper tables
document.pages = delayed(document,"pages", function() return getpages(document,Catalog) end)
document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
@@ -353,28 +548,292 @@ function lpdf.epdf.load(filename)
document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end)
document.layers = delayed(document,"layers", function() return getlayers(document) end)
+ document.structure = delayed(document,"structure", function() return getstructure(document) end)
else
document = false
end
loaded[filename] = document
- statistics.stoptiming(lpdf.epdf)
- -- print(statistics.elapsedtime(lpdf.epdf))
+ loaded[document] = document
+ statistics.stoptiming(lpdf_epdf)
+ -- print(statistics.elapsedtime(lpdf_epdf))
+ end
+ return document or nil
+end
+
+function lpdf_epdf.unload(filename)
+ local document = loaded[filename]
+ if document then
+ loaded[document] = nil
+ loaded[filename] = nil
end
- return document
end
-- for k, v in next, expand(t) do
-function lpdf.epdf.expand(t)
+local function expand(t)
if type(t) == "table" then
local dummy = t.dummy
end
return t
end
+-- for k, v in expanded(t) do
+
+local function expanded(t)
+ if type(t) == "table" then
+ local dummy = t.dummy
+ end
+ return next, t
+end
+
+lpdf_epdf.expand = expand
+lpdf_epdf.expanded = expanded
+
+-- we could resolve the text stream in one pass if we directly handle the
+-- font but why should we complicate things
+
+local hexdigit = R("09","AF")
+local numchar = ( P("\\") * ( (R("09")^3/tonumber) + C(1) ) ) + C(1)
+local number = lpegpatterns.number / tonumber
+local spaces = lpegpatterns.whitespace^1
+local optspaces = lpegpatterns.whitespace^0
+local keyword = P("/") * C(R("AZ","az","09")^1)
+local operator = C((R("AZ","az")+P("'")+P('"'))^1)
+
+local grammar = P { "start",
+ start = (keyword + number + V("dictionary") + V("unicode") + V("string") + V("unicode")+ V("array") + spaces)^1,
+ -- keyvalue = (keyword * spaces * V("start") + spaces)^1,
+ keyvalue = optspaces * Cf(Ct("") * Cg(keyword * optspaces * V("start") * optspaces)^1,rawset),
+ array = P("[") * Ct(V("start")^1) * P("]"),
+ dictionary = P("<<") * V("keyvalue") * P(">>"),
+ unicode = P("<") * Ct(Cc("hex") * C((1-P(">"))^1)) * P(">"),
+ string = P("(") * Ct(Cc("dec") * C((V("string")+numchar)^1)) * P(")"), -- untested
+}
+
+local operation = Ct(grammar^1 * operator)
+local parser = Ct((operation + P(1))^1)
+
+-- beginbfrange :
+-- [ ]
+-- beginbfchar :
+
+local fromsixteen = lpdf.fromsixteen -- maybe inline the lpeg ... but not worth it
+
+local function f_bfchar(t,a,b)
+ t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local function f_bfrange_1(t,a,b,c)
+ print("todo 1",a,b,c)
+ -- c is string
+ -- todo t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local function f_bfrange_2(t,a,b,c)
+ print("todo 2",a,b,c)
+ -- c is table
+ -- todo t[tonumber(a,16)] = fromsixteen(b)
+end
+
+local optionals = spaces^0
+local hexstring = optionals * P("<") * C((1-P(">"))^1) * P(">")
+local bfchar = Carg(1) * hexstring * hexstring / f_bfchar
+local bfrange = Carg(1) * hexstring * hexstring * hexstring / f_bfrange_1
+ + Carg(1) * hexstring * hexstring * optionals * P("[") * Ct(hexstring^1) * optionals * P("]") / f_bfrange_2
+local fromunicode = (
+ P("beginbfchar" ) * bfchar ^1 * optionals * P("endbfchar" ) +
+ P("beginbfrange") * bfrange^1 * optionals * P("endbfrange") +
+ spaces +
+ P(1)
+)^1 * Carg(1)
+
+local function analyzefonts(document,resources) -- unfinished
+ local fonts = document.__fonts__
+ if resources then
+ local fontlist = resources.Font
+ if fontlist then
+ for id, data in expanded(fontlist) do
+ if not fonts[id] then
+ -- a quck hack ... I will look into it more detail if I find a real
+ -- -application for it
+ local tounicode = data.ToUnicode()
+ if tounicode then
+ tounicode = lpegmatch(fromunicode,tounicode,1,{})
+ end
+ fonts[id] = {
+ tounicode = type(tounicode) == "table" and tounicode or { }
+ }
+ table.setmetatableindex(fonts[id],"self")
+ end
+ end
+ end
+ end
+ return fonts
+end
+
+local more = 0
+local unic = nil -- cheaper than passing each time as Carg(1)
+
+local p_hex_to_utf = C(4) / function(s) -- needs checking !
+ local now = tonumber(s,16)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return unic[now] or utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ -- return ""
+ else
+ return unic[now] or utfchar(now)
+ end
+end
+
+local p_dec_to_utf = C(1) / function(s) -- needs checking !
+ local now = byte(s)
+ return unic[now] or utfchar(now)
+end
+
+local p_hex_to_utf = P(true) / function() more = 0 end * Cs(p_hex_to_utf^1)
+local p_dec_to_utf = P(true) / function() more = 0 end * Cs(p_dec_to_utf^1)
+
+function lpdf_epdf.getpagecontent(document,pagenumber)
+
+ local page = document.pages[pagenumber]
+
+ if not page then
+ return
+ end
+
+ local fonts = analyzefonts(document,page.Resources)
+
+ local content = page.Contents() or ""
+ local list = lpegmatch(parser,content)
+ local font = nil
+ -- local unic = nil
+
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "Tf" then
+ font = fonts[entry[1]]
+ unic = font.tounicode
+ elseif operator == "TJ" then -- { array, TJ }
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "table" then
+ if li[1] == "hex" then
+ list[i] = lpegmatch(p_hex_to_utf,li[2])
+ else
+ list[i] = lpegmatch(p_dec_to_utf,li[2])
+ end
+ else
+ -- kern
+ end
+ end
+ elseif operator == "Tj" or operator == "'" or operator == '"' then -- { string, Tj } { string, ' } { n, m, string, " }
+ local list = entry[size-1]
+ if list[1] == "hex" then
+ list[2] = lpegmatch(p_hex_to_utf,li[2])
+ else
+ list[2] = lpegmatch(p_dec_to_utf,li[2])
+ end
+ end
+ end
+
+ unic = nil -- can be collected
+
+ return list
+
+end
+
+-- This is also an experiment. When I really neet it I can improve it, fo rinstance
+-- with proper position calculating. It might be usefull for some search or so.
+
+local softhyphen = utfchar(0xAD) .. "$"
+local linefactor = 1.3
+
+function lpdf_epdf.contenttotext(document,list) -- maybe signal fonts
+ local last_y = 0
+ local last_f = 0
+ local text = { }
+ local last = 0
+
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "Tf" then
+ last_f = entry[2]
+ elseif operator == "TJ" then
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "string" then
+ last = last + 1
+ text[last] = li
+ elseif li < -50 then
+ last = last + 1
+ text[last] = " "
+ end
+ end
+ line = concat(list)
+ elseif operator == "Tj" then
+ last = last + 1
+ text[last] = entry[size-1]
+ elseif operator == "cm" or operator == "Tm" then
+ local ty = entry[6]
+ local dy = abs(last_y - ty)
+ if dy > linefactor*last_f then
+ if last > 0 then
+ if find(text[last],softhyphen) then
+ -- ignore
+ else
+ last = last + 1
+ text[last] = "\n"
+ end
+ end
+ end
+ last_y = ty
+ end
+ end
+
+ return concat(text)
+end
+
+function lpdf_epdf.getstructure(document,list) -- just a test
+ local depth = 0
+ for i=1,#list do
+ local entry = list[i]
+ local size = #entry
+ local operator = entry[size]
+ if operator == "BDC" then
+ report_epdf("%w%s : %s",depth,entry[1] or "?",entry[2].MCID or "?")
+ depth = depth + 1
+ elseif operator == "EMC" then
+ depth = depth - 1
+ elseif operator == "TJ" then
+ local list = entry[1]
+ for i=1,#list do
+ local li = list[i]
+ if type(li) == "string" then
+ report_epdf("%w > %s",depth,li)
+ elseif li < -50 then
+ report_epdf("%w >",depth,li)
+ end
+ end
+ elseif operator == "Tj" then
+ report_epdf("%w > %s",depth,entry[size-1])
+ end
+ end
+end
+
+-- document.Catalog.StructTreeRoot.ParentTree.Nums[2][1].A.P[1])
+
-- helpers
--- function lpdf.epdf.getdestinationpage(document,name)
--- local destination = document.data:findDest(name)
+-- function lpdf_epdf.getdestinationpage(document,name)
+-- local destination = document.__data__:findDest(name)
-- return destination and destination.number
-- end
diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua
index a9b9fd72d..f0aad3623 100644
--- a/tex/context/base/lpdf-fld.lua
+++ b/tex/context/base/lpdf-fld.lua
@@ -55,7 +55,8 @@ if not modules then modules = { } end modules ['lpdf-fld'] = {
-- for printing especially when highlighting (those colorfull foregrounds) is
-- on.
-local gmatch, lower, format = string.gmatch, string.lower, string.format
+local tostring, next = tostring, next
+local gmatch, lower, format, formatters = string.gmatch, string.lower, string.format, string.formatters
local lpegmatch = lpeg.match
local utfchar = utf.char
local bpfactor, todimen = number.dimenfactors.bp, string.todimen
@@ -92,14 +93,13 @@ local pdfflushobject = lpdf.flushobject
local pdfshareobjectreference = lpdf.shareobjectreference
local pdfshareobject = lpdf.shareobject
local pdfreserveobject = lpdf.reserveobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfaction = lpdf.action
-local hpack_node = node.hpack
-
-local nodepool = nodes.pool
+local pdfcolor = lpdf.color
+local pdfcolorvalues = lpdf.colorvalues
+local pdflayerreference = lpdf.layerreference
-local pdfannotation_node = nodepool.pdfannotation
+local hpack_node = node.hpack
local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
@@ -125,39 +125,39 @@ function codeinjections.setformsmethod(name)
end
local flag = { -- /Ff
- ReadOnly = 1, -- 1
- Required = 2, -- 2
- NoExport = 4, -- 3
- MultiLine = 4096, -- 13
- Password = 8192, -- 14
- NoToggleToOff = 16384, -- 15
- Radio = 32768, -- 16
- PushButton = 65536, -- 17
- PopUp = 131072, -- 18
- Edit = 262144, -- 19
- Sort = 524288, -- 20
- FileSelect = 1048576, -- 21
- DoNotSpellCheck = 4194304, -- 23
- DoNotScroll = 8388608, -- 24
- Comb = 16777216, -- 25
- RichText = 33554432, -- 26
- RadiosInUnison = 33554432, -- 26
- CommitOnSelChange = 67108864, -- 27
+ ReadOnly = 2^ 0, -- 1
+ Required = 2^ 1, -- 2
+ NoExport = 2^ 2, -- 3
+ MultiLine = 2^12, -- 13
+ Password = 2^13, -- 14
+ NoToggleToOff = 2^14, -- 15
+ Radio = 2^15, -- 16
+ PushButton = 2^16, -- 17
+ PopUp = 2^17, -- 18
+ Edit = 2^18, -- 19
+ Sort = 2^19, -- 20
+ FileSelect = 2^20, -- 21
+ DoNotSpellCheck = 2^22, -- 23
+ DoNotScroll = 2^23, -- 24
+ Comb = 2^24, -- 25
+ RichText = 2^25, -- 26
+ RadiosInUnison = 2^25, -- 26
+ CommitOnSelChange = 2^26, -- 27
}
local plus = { -- /F
- Invisible = 1, -- 1
- Hidden = 2, -- 2
- Printable = 4, -- 3
- Print = 4, -- 3
- NoZoom = 8, -- 4
- NoRotate = 16, -- 5
- NoView = 32, -- 6
- ReadOnly = 64, -- 7
- Locked = 128, -- 8
- ToggleNoView = 256, -- 9
- LockedContents = 512, -- 10,
- AutoView = 256, -- 288 (6+9)
+ Invisible = 2^0, -- 1
+ Hidden = 2^1, -- 2
+ Printable = 2^2, -- 3
+ Print = 2^2, -- 3
+ NoZoom = 2^3, -- 4
+ NoRotate = 2^4, -- 5
+ NoView = 2^5, -- 6
+ ReadOnly = 2^6, -- 7
+ Locked = 2^7, -- 8
+ ToggleNoView = 2^8, -- 9
+ LockedContents = 2^9, -- 10,
+ AutoView = 2^8, -- 6 + 9 ?
}
-- todo: check what is interfaced
@@ -198,43 +198,90 @@ local function fieldplus(specification) -- /F
return n
end
-local function checked(what)
- local set, bug = references.identify("",what)
- if not bug and #set > 0 then
- local r, n = pdfaction(set)
- return pdfshareobjectreference(r)
- end
-end
+-- keep:
+--
+-- local function checked(what)
+-- local set, bug = references.identify("",what)
+-- if not bug and #set > 0 then
+-- local r, n = pdfaction(set)
+-- return pdfshareobjectreference(r)
+-- end
+-- end
+--
+-- local function fieldactions(specification) -- share actions
+-- local d, a = { }, nil
+-- a = specification.mousedown
+-- or specification.clickin if a and a ~= "" then d.D = checked(a) end
+-- a = specification.mouseup
+-- or specification.clickout if a and a ~= "" then d.U = checked(a) end
+-- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
+-- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
+-- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
+-- a = specification.format if a and a ~= "" then d.F = checked(a) end
+-- a = specification.validate if a and a ~= "" then d.V = checked(a) end
+-- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
+-- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
+-- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
+-- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
+-- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
+-- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
+-- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
+-- return next(d) and pdfdictionary(d)
+-- end
+
+local mapping = {
+ mousedown = "D", clickin = "D",
+ mouseup = "U", clickout = "U",
+ regionin = "E",
+ regionout = "X",
+ afterkey = "K",
+ format = "F",
+ validate = "V",
+ calculate = "C",
+ focusin = "Fo",
+ focusout = "Bl",
+ openpage = "PO",
+ closepage = "PC",
+ -- visiblepage = "PV",
+ -- invisiblepage = "PI",
+}
local function fieldactions(specification) -- share actions
- local d, a = { }, nil
- a = specification.mousedown
- or specification.clickin if a and a ~= "" then d.D = checked(a) end
- a = specification.mouseup
- or specification.clickout if a and a ~= "" then d.U = checked(a) end
- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
- a = specification.format if a and a ~= "" then d.F = checked(a) end
- a = specification.validate if a and a ~= "" then d.V = checked(a) end
- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
- return next(d) and pdfdictionary(d)
+ local d = nil
+ for key, target in next, mapping do
+ local code = specification[key]
+ if code and code ~= "" then
+ -- local a = checked(code)
+ local set, bug = references.identify("",code)
+ if not bug and #set > 0 then
+ local a = pdfaction(set) -- r, n
+ if a then
+ local r = pdfshareobjectreference(a)
+ if d then
+ d[target] = r
+ else
+ d = pdfdictionary { [target] = r }
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,2)
+ end
+ else
+ report_fields("invalid field action %a, case %s",code,1)
+ end
+ end
+ end
+ -- if d then
+ -- d = pdfshareobjectreference(d) -- not much overlap or maybe only some patterns
+ -- end
+ return d
end
-- fonts and color
local pdfdocencodingvector, pdfdocencodingcapsule
--- The pdf doc encoding vector is needed in order to
--- trigger propper unicode. Interesting is that when
--- a glyph is not in the vector, it is still visible
--- as it is taken from some other font. Messy.
+-- The pdf doc encoding vector is needed in order to trigger propper unicode. Interesting is that when
+-- a glyph is not in the vector, it is still visible as it is taken from some other font. Messy.
-- To be checked: only when text/line fields.
@@ -285,7 +332,7 @@ local function fieldsurrounding(specification)
local fontsize = specification.fontsize or "12pt"
local fontstyle = specification.fontstyle or "rm"
local fontalternative = specification.fontalternative or "tf"
- local colorvalue = specification.colorvalue
+ local colorvalue = tonumber(specification.colorvalue)
local s = fontnames[fontstyle]
if not s then
fontstyle, s = "rm", fontnames.rm
@@ -298,16 +345,16 @@ local function fieldsurrounding(specification)
fontsize = todimen(fontsize)
fontsize = fontsize and (bpfactor * fontsize) or 12
fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp
- local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise)
+ local fontcode = formatters["%0.4f Tf %0.4f Ts"](fontsize,fontraise)
-- we could test for colorvalue being 1 (black) and omit it then
- local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
+ local colorcode = pdfcolor(3,colorvalue) -- we force an rgb color space
if trace_fields then
report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
report_fields("using color, value %a, code %a",colorvalue,colorcode)
end
local stream = pdfstream {
pdfconstant(tag),
- format("%s %s",fontcode,colorcode)
+ formatters["%s %s"](fontcode,colorcode)
}
usedfonts[tag] = a -- the name
-- move up with "x.y Ts"
@@ -570,17 +617,14 @@ local function todingbat(n)
end
end
--- local zero_bc = pdfarray { 0, 0, 0 }
--- local zero_bg = pdfarray { 1, 1, 1 }
-
local function fieldrendering(specification)
local bvalue = tonumber(specification.backgroundcolorvalue)
local fvalue = tonumber(specification.framecolorvalue)
local svalue = specification.fontsymbol
if bvalue or fvalue or (svalue and svalue ~= "") then
return pdfdictionary {
- BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg,
- BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc,
+ BG = bvalue and pdfarray { pdfcolorvalues(3,bvalue) } or nil, -- or zero_bg,
+ BC = fvalue and pdfarray { pdfcolorvalues(3,fvalue) } or nil, -- or zero_bc,
CA = svalue and pdfstring (svalue) or nil,
}
end
@@ -590,7 +634,7 @@ end
local function fieldlayer(specification) -- we can move this in line
local layer = specification.layer
- return (layer and lpdf.layerreference(layer)) or nil
+ return (layer and pdflayerreference(layer)) or nil
end
-- defining
@@ -611,7 +655,7 @@ local xfdftemplate = [[
function codeinjections.exportformdata(name)
local result = { }
for k, v in table.sortedhash(fields) do
- result[#result+1] = format(" %s",v.name or k,v.default or "")
+ result[#result+1] = formatters[" %s"](v.name or k,v.default or "")
end
local base = file.basename(tex.jobname)
local xfdf = format(xfdftemplate,base,table.concat(result,"\n"))
@@ -912,7 +956,7 @@ local function save_parent(field,specification,d,hasopt)
end
local function save_kid(field,specification,d,optname)
- local kn = pdfreserveannotation()
+ local kn = pdfreserveobject()
field.kids[#field.kids+1] = pdfreference(kn)
if optname then
local opt = field.opt
@@ -921,7 +965,7 @@ local function save_kid(field,specification,d,optname)
end
end
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d(),kn))
box.width, box.height, box.depth = width, height, depth -- redundant
return box
end
@@ -969,6 +1013,8 @@ local function makelinechild(name,specification)
if trace_fields then
report_fields("using child text %a",name)
end
+ -- we could save a little by not setting some key/value when it's the
+ -- same as parent but it would cost more memory to keep track of it
local d = pdfdictionary {
Subtype = pdf_widget,
Parent = pdfreference(parent.pobj),
diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua
index b444f03c3..862c011b8 100644
--- a/tex/context/base/lpdf-fmt.lua
+++ b/tex/context/base/lpdf-fmt.lua
@@ -349,7 +349,7 @@ local filenames = {
}
local function locatefile(filename)
- local fullname = resolvers.findfile(filename,"icc")
+ local fullname = resolvers.findfile(filename,"icc",1,true)
if not fullname or fullname == "" then
fullname = resolvers.finders.byscheme("loc",filename) -- could be specific to the project
end
@@ -710,7 +710,9 @@ function codeinjections.setformat(s)
end
end
function codeinjections.setformat(noname)
- report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format)
+ if trace_format then
+ report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format)
+ end
end
else
report_backend("error, format %a is not supported",format)
@@ -732,9 +734,11 @@ directives.register("backend.format", function(v) -- table !
end
end)
-function commands.setformat(s)
- codeinjections.setformat(s)
-end
+interfaces.implement {
+ name = "setformat",
+ actions = codeinjections.setformat,
+ arguments = { { "*" } }
+}
function codeinjections.getformatoption(key)
return formatspecification and formatspecification[key]
@@ -743,7 +747,7 @@ end
function codeinjections.supportedformats()
local t = { }
for k, v in table.sortedhash(formats) do
- if find(k,"pdf") then
+ if find(k,"pdf",1,true) then
t[#t+1] = k
end
end
diff --git a/tex/context/base/lpdf-grp.lua b/tex/context/base/lpdf-grp.lua
index fed5e6a46..36c3507be 100644
--- a/tex/context/base/lpdf-grp.lua
+++ b/tex/context/base/lpdf-grp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['lpdf-grp'] = {
license = "see context related readme files"
}
-local format, gsub = string.format, string.gsub
+local formatters, gsub = string.formatters, string.gsub
local concat = table.concat
local round = math.round
@@ -118,7 +118,7 @@ function nodeinjections.injectbitmap(t)
height = width * yresolution / xresolution
end
local image = img.new {
- stream = format(template,d(),t.data),
+ stream = formatters[template](d(),t.data),
width = width,
height = height,
bbox = { 0, 0, urx, ury },
@@ -236,7 +236,7 @@ function img.package(image) -- see lpdf-u3d **
local height = boundingbox[4]
local xform = img.scan {
attr = resources(),
- stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
+ stream = formatters["%F 0 0 %F 0 0 cm /%s Do"](width,height,imagetag),
bbox = { 0, 0, width/factor, height/factor },
}
img.immediatewrite(xform)
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index 23fe6c177..834f845c5 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -6,98 +6,308 @@ if not modules then modules = { } end modules ['lpdf-ini'] = {
license = "see context related readme files"
}
+-- beware of "too many locals" here
+
local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
-local utfchar, utfvalues = utf.char, utf.values
-local sind, cosd, floor = math.sind, math.cosd, math.floor
+local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values
+local sind, cosd, floor, max, min = math.sind, math.cosd, math.floor, math.max, math.min
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
local formatters = string.formatters
+local isboolean = string.is_boolean
+
+local report_objects = logs.reporter("backend","objects")
+local report_finalizing = logs.reporter("backend","finalizing")
+local report_blocked = logs.reporter("backend","blocked")
+
+local implement = interfaces.implement
+local two_strings = interfaces.strings[2]
+
+-- In ConTeXt MkIV we use utf8 exclusively so all strings get mapped onto a hex
+-- encoded utf16 string type between <>. We could probably save some bytes by using
+-- strings between () but then we end up with escaped ()\ too.
+
+-- gethpos : used
+-- getpos : used
+-- getvpos : used
+--
+-- getmatrix : used
+-- hasmatrix : used
+--
+-- mapfile : used in font-ctx.lua
+-- mapline : used in font-ctx.lua
+--
+-- maxobjnum : not used
+-- obj : used
+-- immediateobj : used
+-- objtype : not used
+-- pageref : used
+-- print : can be used
+-- refobj : used
+-- registerannot : not to be used
+-- reserveobj : used
+
+-- pdf.catalog : used
+-- pdf.info : used
+-- pdf.trailer : used
+-- pdf.names : not to be used
+
+-- pdf.setinfo : used
+-- pdf.setcatalog : used
+-- pdf.setnames : not to be used
+-- pdf.settrailer : used
+
+-- pdf.getinfo : used
+-- pdf.getcatalog : used
+-- pdf.getnames : not to be used
+-- pdf.gettrailer : used
+
+local pdf = pdf
+local factor = number.dimenfactors.bp
+
+if pdf.setinfo then
+ -- table.setmetatablenewindex(pdf,function(t,k,v)
+ -- report_blocked("'pdf.%s' is not supported",k)
+ -- end)
+ -- the getters are harmless
+end
+
+if not pdf.setinfo then
+ function pdf.setinfo (s) pdf.info = s end
+ function pdf.setcatalog(s) pdf.catalog = s end
+ function pdf.setnames (s) pdf.names = s end
+ function pdf.settrailer(s) pdf.trailer = s end
+end
-local pdfreserveobject = pdf.reserveobj
-local pdfimmediateobject = pdf.immediateobj
-local pdfdeferredobject = pdf.obj
-local pdfreferenceobject = pdf.refobj
+if not pdf.getpos then
+ function pdf.getpos () return pdf.h, pdf.v end
+ function pdf.gethpos () return pdf.h end
+ function pdf.getvpos () return pdf.v end
+ function pdf.hasmatrix() return false end
+ function pdf.getmatrix() return 1, 0, 0, 1, 0, 0 end
+end
+
+if not pdf.setpageresources then
+ function pdf.setpageresources (s) pdf.pageresources = s end
+ function pdf.setpageattributes (s) pdf.pageattributes = s end
+ function pdf.setpagesattributes(s) pdf.pagesattributes = s end
+end
+
+local pdfsetinfo = pdf.setinfo
+local pdfsetcatalog = pdf.setcatalog
+local pdfsetnames = pdf.setnames
+local pdfsettrailer = pdf.settrailer
+
+local pdfsetpageresources = pdf.setpageresources
+local pdfsetpageattributes = pdf.setpageattributes
+local pdfsetpagesattributes = pdf.setpagesattributes
+
+local pdfgetpos = pdf.getpos
+local pdfgethpos = pdf.gethpos
+local pdfgetvpos = pdf.getvpos
+local pdfgetmatrix = pdf.getmatrix
+local pdfhasmatrix = pdf.hasmatrix
+
+local pdfreserveobject = pdf.reserveobj
+local pdfimmediateobject = pdf.immediateobj
+local pdfdeferredobject = pdf.obj
+local pdfreferenceobject = pdf.refobj
+
+-- function pdf.setinfo () report_blocked("'pdf.%s' is not supported","setinfo") end -- use lpdf.addtoinfo etc
+-- function pdf.setcatalog () report_blocked("'pdf.%s' is not supported","setcatalog") end
+-- function pdf.setnames () report_blocked("'pdf.%s' is not supported","setnames") end
+-- function pdf.settrailer () report_blocked("'pdf.%s' is not supported","settrailer") end
+-- function pdf.setpageresources () report_blocked("'pdf.%s' is not supported","setpageresources") end
+-- function pdf.setpageattributes () report_blocked("'pdf.%s' is not supported","setpageattributes") end
+-- function pdf.setpagesattributes() report_blocked("'pdf.%s' is not supported","setpagesattributes") end
+-- function pdf.registerannot () report_blocked("'pdf.%s' is not supported","registerannot") end
+
+local function pdfdisablecommand(command)
+ pdf[command] = function() report_blocked("'pdf.%s' is not supported",command) end
+end
+
+pdfdisablecommand("setinfo")
+pdfdisablecommand("setcatalog")
+pdfdisablecommand("setnames")
+pdfdisablecommand("settrailer")
+pdfdisablecommand("setpageresources")
+pdfdisablecommand("setpageattributes")
+pdfdisablecommand("setpagesattributes")
+pdfdisablecommand("registerannot")
local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
-local report_objects = logs.reporter("backend","objects")
-local report_finalizing = logs.reporter("backend","finalizing")
-
-local backends = backends
-
-backends.pdf = backends.pdf or {
+local backends = backends
+local pdfbackend = {
comment = "backend for directly generating pdf output",
nodeinjections = { },
codeinjections = { },
registrations = { },
tables = { },
}
+backends.pdf = pdfbackend
+lpdf = lpdf or { }
+local lpdf = lpdf
+
+local codeinjections = pdfbackend.codeinjections
+local nodeinjections = pdfbackend.nodeinjections
+
+codeinjections.getpos = pdfgetpos lpdf.getpos = pdfgetpos
+codeinjections.gethpos = pdfgethpos lpdf.gethpos = pdfgethpos
+codeinjections.getvpos = pdfgetvpos lpdf.getvpos = pdfgetvpos
+codeinjections.hasmatrix = pdfhasmatrix lpdf.hasmatrix = pdfhasmatrix
+codeinjections.getmatrix = pdfgetmatrix lpdf.getmatrix = pdfgetmatrix
+
+function lpdf.transform(llx,lly,urx,ury)
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ local w, h = urx - llx, ury - lly
+ return llx, lly, llx + sy*w - ry*h, lly + sx*h - rx*w
+ else
+ return llx, lly, urx, ury
+ end
+end
-lpdf = lpdf or { }
-local lpdf = lpdf
+-- function lpdf.rectangle(width,height,depth)
+-- local h, v = pdfgetpos()
+-- local llx, lly, urx, ury
+-- if pdfhasmatrix() then
+-- local sx, rx, ry, sy = pdfgetmatrix()
+-- llx = 0
+-- lly = -depth
+-- -- llx = ry * depth
+-- -- lly = -sx * depth
+-- urx = sy * width - ry * height
+-- ury = sx * height - rx * width
+-- else
+-- llx = 0
+-- lly = -depth
+-- urx = width
+-- ury = height
+-- return (h+llx)*factor, (v+lly)*factor, (h+urx)*factor, (v+ury)*factor
+-- end
+-- end
-local function tosixteen(str) -- an lpeg might be faster (no table)
- if not str or str == "" then
- return "" -- not () as we want an indication that it's unicode
+function lpdf.rectangle(width,height,depth)
+ local h, v = pdfgetpos()
+ if pdfhasmatrix() then
+ local sx, rx, ry, sy = pdfgetmatrix()
+ -- return (h+ry*depth)*factor, (v-sx*depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
+ return h *factor, (v- depth)*factor, (h+sy*width-ry*height)*factor, (v+sx*height-rx*width)*factor
else
- local r, n = { ""
- return concat(r)
+ return h *factor, (v- depth)*factor, (h+ width )*factor, (v+ height )*factor
end
end
-lpdf.tosixteen = tosixteen
-
--- lpeg is some 5 times faster than gsub (in test) on escaping
+-- we could use a hash of predefined unicodes
--- local escapes = {
--- ["\\"] = "\\\\",
--- ["/"] = "\\/", ["#"] = "\\#",
--- ["<"] = "\\<", [">"] = "\\>",
--- ["["] = "\\[", ["]"] = "\\]",
--- ["("] = "\\(", [")"] = "\\)",
--- }
---
--- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")"))
---
--- local function toeight(str)
+-- local function tosixteen(str) -- an lpeg might be faster (no table)
-- if not str or str == "" then
--- return "()"
+-- return "" -- not () as we want an indication that it's unicode
-- else
--- return lpegmatch(escaped,str)
+-- local r, n = { ""
+-- return concat(r)
-- end
-- end
---
--- -- no need for escaping .. just use unicode instead
--- \0 \t \n \r \f ( ) [ ] { } / %
+local cache = table.setmetatableindex(function(t,k) -- can be made weak
+ local v = utfbyte(k)
+ if v < 0x10000 then
+ v = format("%04x",v)
+ else
+ -- v = format("%04x%04x",v/1024+0xD800,v%1024+0xDC00)
+ v = format("%04x%04x",floor(v/1024),v%1024+0xDC00)
+ end
+ t[k] = v
+ return v
+end)
+
+local escaped = Cs(Cc("(") * (S("\\()")/"\\%0" + P(1))^0 * Cc(")"))
+local unified = Cs(Cc(""))
-local function toeight(str)
- return "(" .. str .. ")"
+local function tosixteen(str) -- an lpeg might be faster (no table)
+ if not str or str == "" then
+ return "" -- not () as we want an indication that it's unicode
+ else
+ return lpegmatch(unified,str)
+ end
+end
+
+local more = 0
+
+local pattern = C(4) / function(s) -- needs checking !
+ local now = tonumber(s,16)
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ return utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ return "" -- else the c's end up in the stream
+ else
+ return utfchar(now)
+ end
end
-lpdf.toeight = toeight
+local pattern = P(true) / function() more = 0 end * Cs(pattern^0)
+
+local function fromsixteen(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+end
+
+local toregime = regimes.toregime
+local fromregime = regimes.fromregime
+
+local function topdfdoc(str,default)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(escaped,toregime("pdfdoc",str,default)) -- could be combined if needed
+ end
+end
---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
+local function frompdfdoc(str)
+ if not str or str == "" then
+ return ""
+ else
+ return fromregime("pdfdoc",str)
+ end
+end
---~ local function cleaned(str)
---~ return (str and str ~= "" and lpegmatch(escaped,str)) or ""
---~ end
+if not toregime then topdfdoc = function(s) return s end end
+if not fromregime then frompdfdoc = function(s) return s end end
---~ lpdf.cleaned = cleaned -- not public yet
+local function toeight(str)
+ if not str or str == "" then
+ return "()"
+ else
+ return lpegmatch(escaped,str)
+ end
+end
+
+lpdf.tosixteen = tosixteen
+lpdf.toeight = toeight
+lpdf.topdfdoc = topdfdoc
+lpdf.fromsixteen = fromsixteen
+lpdf.frompdfdoc = frompdfdoc
local function merge_t(a,b)
local t = { }
@@ -106,34 +316,44 @@ local function merge_t(a,b)
return setmetatable(t,getmetatable(a))
end
+local f_key_null = formatters["/%s null"]
local f_key_value = formatters["/%s %s"]
local f_key_dictionary = formatters["/%s << % t >>"]
local f_dictionary = formatters["<< % t >>"]
local f_key_array = formatters["/%s [ % t ]"]
local f_array = formatters["[ % t ]"]
+local f_key_number = formatters["/%s %F"]
+local f_tonumber = formatters["%F"]
+
+-- local f_key_value = formatters["/%s %s"]
+-- local f_key_dictionary = formatters["/%s <<% t>>"]
+-- local f_dictionary = formatters["<<% t>>"]
+-- local f_key_array = formatters["/%s [% t]"]
+-- local f_array = formatters["[% t]"]
local tostring_a, tostring_d
tostring_d = function(t,contentonly,key)
- if not next(t) then
- if contentonly then
- return ""
- else
- return "<< >>"
- end
- else
+ if next(t) then
local r, rn = { }, 0
for k, v in next, t do
rn = rn + 1
local tv = type(v)
if tv == "string" then
r[rn] = f_key_value(k,toeight(v))
- elseif tv == "unicode" then
- r[rn] = f_key_value(k,tosixteen(v))
+ elseif tv == "number" then
+ r[rn] = f_key_number(k,v)
+ -- elseif tv == "unicode" then -- can't happen
+ -- r[rn] = f_key_value(k,tosixteen(v))
elseif tv == "table" then
local mv = getmetatable(v)
if mv and mv.__lpdftype then
- r[rn] = f_key_value(k,tostring(v))
+ -- if v == t then
+ -- report_objects("ignoring circular reference in dirctionary")
+ -- r[rn] = f_key_null(k)
+ -- else
+ r[rn] = f_key_value(k,tostring(v))
+ -- end
elseif v[1] then
r[rn] = f_key_value(k,tostring_a(v))
else
@@ -150,31 +370,36 @@ tostring_d = function(t,contentonly,key)
else
return f_dictionary(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "<< >>"
end
end
tostring_a = function(t,contentonly,key)
local tn = #t
- if tn == 0 then
- if contentonly then
- return ""
- else
- return "[ ]"
- end
- else
+ if tn ~= 0 then
local r = { }
for k=1,tn do
local v = t[k]
local tv = type(v)
if tv == "string" then
r[k] = toeight(v)
- elseif tv == "unicode" then
- r[k] = tosixteen(v)
+ elseif tv == "number" then
+ r[k] = f_tonumber(v)
+ -- elseif tv == "unicode" then
+ -- r[k] = tosixteen(v)
elseif tv == "table" then
local mv = getmetatable(v)
local mt = mv and mv.__lpdftype
if mt then
- r[k] = tostring(v)
+ -- if v == t then
+ -- report_objects("ignoring circular reference in array")
+ -- r[k] = "null"
+ -- else
+ r[k] = tostring(v)
+ -- end
elseif v[1] then
r[k] = tostring_a(v)
else
@@ -191,40 +416,47 @@ tostring_a = function(t,contentonly,key)
else
return f_array(r)
end
+ elseif contentonly then
+ return ""
+ else
+ return "[ ]"
end
end
-local tostring_x = function(t) return concat(t, " ") end
-local tostring_s = function(t) return toeight(t[1]) end
-local tostring_u = function(t) return tosixteen(t[1]) end
-local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
-local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
-local tostring_z = function() return "null" end
-local tostring_t = function() return "true" end
-local tostring_f = function() return "false" end
-local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end
+local tostring_x = function(t) return concat(t," ") end
+local tostring_s = function(t) return toeight(t[1]) end
+local tostring_p = function(t) return topdfdoc(t[1],t[2]) end
+local tostring_u = function(t) return tosixteen(t[1]) end
+----- tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
+local tostring_n = function(t) return f_tonumber(t[1]) end -- tostring not needed
+local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
+local tostring_z = function() return "null" end
+local tostring_t = function() return "true" end
+local tostring_f = function() return "false" end
+local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "null" end
local tostring_v = function(t)
local s = t[1]
if type(s) == "table" then
- return concat(s,"")
+ return concat(s)
else
return s
end
end
-local function value_x(t) return t end -- the call is experimental
-local function value_s(t,key) return t[1] end -- the call is experimental
-local function value_u(t,key) return t[1] end -- the call is experimental
-local function value_n(t,key) return t[1] end -- the call is experimental
-local function value_c(t) return sub(t[1],2) end -- the call is experimental
-local function value_d(t) return tostring_d(t,true) end -- the call is experimental
-local function value_a(t) return tostring_a(t,true) end -- the call is experimental
-local function value_z() return nil end -- the call is experimental
-local function value_t(t) return t.value or true end -- the call is experimental
-local function value_f(t) return t.value or false end -- the call is experimental
-local function value_r() return t[1] or 0 end -- the call is experimental -- NULL
-local function value_v() return t[1] end -- the call is experimental
+local function value_x(t) return t end
+local function value_s(t) return t[1] end
+local function value_p(t) return t[1] end
+local function value_u(t) return t[1] end
+local function value_n(t) return t[1] end
+local function value_c(t) return sub(t[1],2) end
+local function value_d(t) return tostring_d(t,true) end
+local function value_a(t) return tostring_a(t,true) end
+local function value_z() return nil end
+local function value_t(t) return t.value or true end
+local function value_f(t) return t.value or false end
+local function value_r() return t[1] or 0 end -- null
+local function value_v() return t[1] end
local function add_x(t,k,v) rawset(t,k,tostring(v)) end
@@ -233,6 +465,7 @@ local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = valu
local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a }
local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u }
local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s }
+local mt_p = { __lpdftype = "docstring", __tostring = tostring_p, __call = value_p }
local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n }
local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c }
local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z }
@@ -266,8 +499,12 @@ local function pdfstring(str,default)
return setmetatable({ str or default or "" },mt_s)
end
+local function pdfdocstring(str,default,defaultchar)
+ return setmetatable({ str or default or "", defaultchar or " " },mt_p)
+end
+
local function pdfunicode(str,default)
- return setmetatable({ str or default or "" },mt_u)
+ return setmetatable({ str or default or "" },mt_u) -- could be a string
end
local cache = { } -- can be weak
@@ -325,17 +562,33 @@ local function pdfboolean(b,default)
end
end
-local function pdfreference(r)
- return setmetatable({ r or 0 },mt_r)
+local r_zero = setmetatable({ 0 },mt_r)
+
+local function pdfreference(r) -- maybe make a weak table
+ if r and r ~= 0 then
+ return setmetatable({ r },mt_r)
+ else
+ return r_zero
+ end
end
+local v_zero = setmetatable({ 0 },mt_v)
+local v_empty = setmetatable({ "" },mt_v)
+
local function pdfverbose(t) -- maybe check for type
- return setmetatable({ t or "" },mt_v)
+ if t == 0 then
+ return v_zero
+ elseif t == "" then
+ return v_empty
+ else
+ return setmetatable({ t },mt_v)
+ end
end
lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE
lpdf.dictionary = pdfdictionary
lpdf.array = pdfarray
+lpdf.docstring = pdfdocstring
lpdf.string = pdfstring
lpdf.unicode = pdfunicode
lpdf.number = pdfnumber
@@ -345,37 +598,19 @@ lpdf.boolean = pdfboolean
lpdf.reference = pdfreference
lpdf.verbose = pdfverbose
--- n = pdf.obj(n, str)
--- n = pdf.obj(n, "file", filename)
--- n = pdf.obj(n, "stream", streamtext, attrtext)
--- n = pdf.obj(n, "streamfile", filename, attrtext)
-
--- we only use immediate objects
-
--- todo: tracing
-
local names, cache = { }, { }
function lpdf.reserveobject(name)
- if name == "annot" then
- -- catch misuse
- return pdfreserveobject("annot")
- else
- local r = pdfreserveobject()
- if name then
- names[name] = r
- if trace_objects then
- report_objects("reserving number %a under name %a",r,name)
- end
- elseif trace_objects then
- report_objects("reserving number %a",r)
+ local r = pdfreserveobject() -- we don't support "annot"
+ if name then
+ names[name] = r
+ if trace_objects then
+ report_objects("reserving number %a under name %a",r,name)
end
- return r
+ elseif trace_objects then
+ report_objects("reserving number %a",r)
end
-end
-
-function lpdf.reserveannotation()
- return pdfreserveobject("annot")
+ return r
end
-- lpdf.immediateobject = pdfimmediateobject
@@ -383,11 +618,29 @@ end
-- lpdf.object = pdfdeferredobject
-- lpdf.referenceobject = pdfreferenceobject
-lpdf.pagereference = pdf.pageref or tex.pdfpageref
-lpdf.registerannotation = pdf.registerannot
+local pagereference = pdf.pageref -- tex.pdfpageref is obsolete
+local nofpages = 0
+
+function lpdf.pagereference(n)
+ if nofpages == 0 then
+ nofpages = structures.pages.nofpages
+ if nofpages == 0 then
+ nofpages = 1
+ end
+ end
+ if n > nofpages then
+ return pagereference(nofpages) -- or 1, could be configureable
+ else
+ return pagereference(n)
+ end
+end
-function lpdf.delayedobject(data) -- we will get rid of this one
- local n = pdfdeferredobject(data)
+function lpdf.delayedobject(data,n)
+ if n then
+ pdfdeferredobject(n,data)
+ else
+ n = pdfdeferredobject(data)
+ end
pdfreferenceobject(n)
return n
end
@@ -484,60 +737,10 @@ function lpdf.shareobjectreference(content)
end
end
---~ local d = lpdf.dictionary()
---~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
---~ local f = lpdf.dictionary { ["f"] = "ABC" }
---~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } }
-
---~ print(a)
---~ os.exit()
-
---~ d["test"] = lpdf.string ("test")
---~ d["more"] = "more"
---~ d["bool"] = true
---~ d["numb"] = 1234
---~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" }
---~ d["whow"] = lpdf.array { lpdf.string("ton") }
-
---~ a[#a+1] = lpdf.string("xxx")
---~ a[#a+1] = lpdf.string("yyy")
-
---~ d.what = a
-
---~ print(e)
-
---~ local d = lpdf.dictionary()
---~ d["abcd"] = { 1, 2, 3, "test" }
---~ print(d)
---~ print(d())
-
---~ local d = lpdf.array()
---~ d[#d+1] = 1
---~ d[#d+1] = 2
---~ d[#d+1] = 3
---~ d[#d+1] = "test"
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { 1, 2, 3, "test" }
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { a=1, b=2, c=3, d="test" }
---~ print(d)
-
---~ local s = lpdf.constant("xx")
---~ print(s) -- fails somehow
---~ print(s()) -- fails somehow
-
---~ local s = lpdf.boolean(false)
---~ s.value = true
---~ print(s)
---~ print(s())
-
-- three priority levels, default=2
-local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } }
+local pagefinalizers = { { }, { }, { } }
+local documentfinalizers = { { }, { }, { } }
local pageresources, pageattributes, pagesattributes
@@ -550,9 +753,9 @@ end
resetpageproperties()
local function setpageproperties()
- pdf.pageresources = pageresources ()
- pdf.pageattributes = pageattributes ()
- pdf.pagesattributes = pagesattributes()
+ pdfsetpageresources (pageresources ())
+ pdfsetpageattributes (pageattributes ())
+ pdfsetpagesattributes(pagesattributes())
end
local function addtopageresources (k,v) pageresources [k] = v end
@@ -606,8 +809,8 @@ end
lpdf.registerpagefinalizer = registerpagefinalizer
lpdf.registerdocumentfinalizer = registerdocumentfinalizer
-function lpdf.finalizepage()
- if not environment.initex then
+function lpdf.finalizepage(shipout)
+ if shipout and not environment.initex then
-- resetpageproperties() -- maybe better before
run(pagefinalizers,"page")
setpageproperties()
@@ -625,152 +828,252 @@ function lpdf.finalizedocument()
end
end
-backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook
+-- codeinjections.finalizepage = lpdf.finalizepage -- no longer triggered at the tex end
---~ callbacks.register("finish_pdfpage", lpdf.finalizepage)
-callbacks.register("finish_pdffile", lpdf.finalizedocument)
+if not callbacks.register("finish_pdfpage", lpdf.finalizepage) then
--- some minimal tracing, handy for checking the order
+ local find_tail = nodes.tail
+ local latelua_node = nodes.pool.latelua
-local function trace_set(what,key)
- if trace_resources then
- report_finalizing("setting key %a in %a",key,what)
+ function nodeinjections.finalizepage(head)
+ local t = find_tail(head.list)
+ if t then
+ local n = latelua_node("lpdf.finalizepage(true)") -- last in the shipout
+ t.next = n
+ n.prev = t
+ end
+ return head, true
end
+
+ nodes.tasks.appendaction("shipouts","normalizers","backends.pdf.nodeinjections.finalizepage")
+
end
-local function trace_flush(what)
- if trace_resources then
- report_finalizing("flushing %a",what)
+
+callbacks.register("finish_pdffile", lpdf.finalizedocument)
+
+
+do
+
+ -- some minimal tracing, handy for checking the order
+
+ local function trace_set(what,key)
+ if trace_resources then
+ report_finalizing("setting key %a in %a",key,what)
+ end
end
-end
-lpdf.protectresources = true
+ local function trace_flush(what)
+ if trace_resources then
+ report_finalizing("flushing %a",what)
+ end
+ end
-local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
-local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
-local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
+ lpdf.protectresources = true
-local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end
-local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end
-local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end
+ local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
+ local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
+ ----- names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
-function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end
-function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end
-function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
+ local function flushcatalog()
+ if not environment.initex then
+ trace_flush("catalog")
+ catalog.Type = nil
+ pdfsetcatalog(catalog())
+ end
+ end
-local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict
+ local function flushinfo()
+ if not environment.initex then
+ trace_flush("info")
+ info.Type = nil
+ pdfsetinfo(info())
+ end
+ end
--- Some day I will implement a proper minimalized resource management.
+ -- local function flushnames()
+ -- if not environment.initex then
+ -- trace_flush("names")
+ -- names.Type = nil
+ -- pdfsetnames(names())
+ -- end
+ -- end
+
+ function lpdf.addtocatalog(k,v)
+ if not (lpdf.protectresources and catalog[k]) then
+ trace_set("catalog",k)
+ catalog[k] = v
+ end
+ end
-local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
-local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
-local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
-local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
+ function lpdf.addtoinfo(k,v)
+ if not (lpdf.protectresources and info[k]) then
+ trace_set("info",k)
+ info[k] = v
+ end
+ end
-local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
-local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
-local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
-local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
+ -- local function lpdf.addtonames(k,v)
+ -- if not (lpdf.protectresources and names[k]) then
+ -- trace_set("names",k)
+ -- names[k] = v
+ -- end
+ -- end
-local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
-local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
-local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
-local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
+ local names = pdfdictionary {
+ -- Type = pdfconstant("Names")
+ }
-function lpdf.collectedresources()
- local ExtGState = next(d_extgstates ) and p_extgstates
- local ColorSpace = next(d_colorspaces) and p_colorspaces
- local Pattern = next(d_patterns ) and p_patterns
- local Shading = next(d_shades ) and p_shades
- if ExtGState or ColorSpace or Pattern or Shading then
- local collected = pdfdictionary {
- ExtGState = ExtGState,
- ColorSpace = ColorSpace,
- Pattern = Pattern,
- Shading = Shading,
- -- ProcSet = pdfarray { pdfconstant("PDF") },
- }
- return collected()
- else
- return ""
+ local function flushnames()
+ if next(names) and not environment.initex then
+ names.Type = pdfconstant("Names")
+ trace_flush("names")
+ lpdf.addtocatalog("Names",pdfreference(pdfimmediateobject(tostring(names))))
+ end
+ end
+
+ function lpdf.addtonames(k,v)
+ if not (lpdf.protectresources and names[k]) then
+ trace_set("names", k)
+ names [k] = v
+ end
+ end
+
+ local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
+ local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
+ local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
+ local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
+
+ local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
+ local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
+ local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
+ local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
+
+ local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
+ local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
+ local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
+ local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
+
+ function lpdf.collectedresources()
+ local ExtGState = next(d_extgstates ) and p_extgstates
+ local ColorSpace = next(d_colorspaces) and p_colorspaces
+ local Pattern = next(d_patterns ) and p_patterns
+ local Shading = next(d_shades ) and p_shades
+ if ExtGState or ColorSpace or Pattern or Shading then
+ local collected = pdfdictionary {
+ ExtGState = ExtGState,
+ ColorSpace = ColorSpace,
+ Pattern = Pattern,
+ Shading = Shading,
+ -- ProcSet = pdfarray { pdfconstant("PDF") },
+ }
+ return collected()
+ else
+ return ""
+ end
end
-end
-function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
-function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
-function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
-function lpdf.adddocumentshade (k,v) d_shades [k] = v end
+ function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
+ function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
+ function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
+ function lpdf.adddocumentshade (k,v) d_shades [k] = v end
-registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
-registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
-registerdocumentfinalizer(flushpatterns,3,"patterns")
-registerdocumentfinalizer(flushshades,3,"shades")
+ registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
+ registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
+ registerdocumentfinalizer(flushpatterns,3,"patterns")
+ registerdocumentfinalizer(flushshades,3,"shades")
-registerdocumentfinalizer(flushcatalog,3,"catalog")
-registerdocumentfinalizer(flushinfo,3,"info")
-registerdocumentfinalizer(flushnames,3,"names") -- before catalog
+ registerdocumentfinalizer(flushnames,3,"names") -- before catalog
+ registerdocumentfinalizer(flushcatalog,3,"catalog")
+ registerdocumentfinalizer(flushinfo,3,"info")
-registerpagefinalizer(checkextgstates,3,"extended graphic states")
-registerpagefinalizer(checkcolorspaces,3,"color spaces")
-registerpagefinalizer(checkpatterns,3,"patterns")
-registerpagefinalizer(checkshades,3,"shades")
+ registerpagefinalizer(checkextgstates,3,"extended graphic states")
+ registerpagefinalizer(checkcolorspaces,3,"color spaces")
+ registerpagefinalizer(checkpatterns,3,"patterns")
+ registerpagefinalizer(checkshades,3,"shades")
+
+end
-- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1)
function lpdf.rotationcm(a)
local s, c = sind(a), cosd(a)
- return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
+ return format("%0.6F %0.6F %0.6F %0.6F 0 0 cm",c,s,-s,c)
end
-- ! -> universaltime
-local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
+do
-function lpdf.timestamp()
- return timestamp
-end
+ local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
-function lpdf.pdftimestamp(str)
- local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
- return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
-end
+ function lpdf.timestamp()
+ return timestamp
+ end
+
+ function lpdf.pdftimestamp(str)
+ local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
+ return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
+ end
+
+ function lpdf.id()
+ return format("%s.%s",tex.jobname,timestamp)
+ end
-function lpdf.id()
- return format("%s.%s",tex.jobname,timestamp)
end
+-- return nil is nicer in test prints
+
function lpdf.checkedkey(t,key,variant)
local pn = t and t[key]
- if pn then
+ if pn ~= nil then
local tn = type(pn)
if tn == variant then
if variant == "string" then
- return pn ~= "" and pn or nil
+ if pn ~= "" then
+ return pn
+ end
elseif variant == "table" then
- return next(pn) and pn or nil
+ if next(pn) then
+ return pn
+ end
else
return pn
end
- elseif tn == "string" and variant == "number" then
- return tonumber(pn)
+ elseif tn == "string" then
+ if variant == "number" then
+ return tonumber(pn)
+ elseif variant == "boolean" then
+ return isboolean(pn,nil,true)
+ end
end
end
+ -- return nil
end
function lpdf.checkedvalue(value,variant) -- code not shared
- if value then
+ if value ~= nil then
local tv = type(value)
if tv == variant then
if variant == "string" then
- return value ~= "" and value
+ if value ~= "" then
+ return value
+ end
elseif variant == "table" then
- return next(value) and value
+ if next(value) then
+ return value
+ end
else
return value
end
- elseif tv == "string" and variant == "number" then
- return tonumber(value)
+ elseif tv == "string" then
+ if variant == "number" then
+ return tonumber(value)
+ elseif variant == "boolean" then
+ return isboolean(value,nil,true)
+ end
end
end
+ -- return nil
end
function lpdf.limited(n,min,max,default)
@@ -790,34 +1093,121 @@ function lpdf.limited(n,min,max,default)
end
end
--- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks)
+-- lpdf.addtoinfo("ConTeXt.Version", environment.version)
-- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S
-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
+-- lpdf.addtoinfo("ConTeXt.Support", "contextgarden.net")
-if not pdfreferenceobject then
-
- local delayed = { }
+-- if not pdfreferenceobject then
+--
+-- local delayed = { }
+--
+-- local function flush()
+-- local n = 0
+-- for k,v in next, delayed do
+-- pdfimmediateobject(k,v)
+-- n = n + 1
+-- end
+-- if trace_objects then
+-- report_objects("%s objects flushed",n)
+-- end
+-- delayed = { }
+-- end
+--
+-- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
+-- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+--
+-- function lpdf.delayedobject(data)
+-- local n = pdfreserveobject()
+-- delayed[n] = data
+-- return n
+-- end
+--
+-- end
- local function flush()
- local n = 0
- for k,v in next, delayed do
- pdfimmediateobject(k,v)
- n = n + 1
- end
- if trace_objects then
- report_objects("%s objects flushed",n)
+-- setmetatable(pdf, {
+-- __index = function(t,k)
+-- if k == "info" then return pdf.getinfo()
+-- elseif k == "catalog" then return pdf.getcatalog()
+-- elseif k == "names" then return pdf.getnames()
+-- elseif k == "trailer" then return pdf.gettrailer()
+-- elseif k == "pageattribute" then return pdf.getpageattribute()
+-- elseif k == "pageattributes" then return pdf.getpageattributes()
+-- elseif k == "pageresources" then return pdf.getpageresources()
+-- elseif
+-- return nil
+-- end
+-- end,
+-- __newindex = function(t,k,v)
+-- if k == "info" then return pdf.setinfo(v)
+-- elseif k == "catalog" then return pdf.setcatalog(v)
+-- elseif k == "names" then return pdf.setnames(v)
+-- elseif k == "trailer" then return pdf.settrailer(v)
+-- elseif k == "pageattribute" then return pdf.setpageattribute(v)
+-- elseif k == "pageattributes" then return pdf.setpageattributes(v)
+-- elseif k == "pageresources" then return pdf.setpageresources(v)
+-- else
+-- rawset(t,k,v)
+-- end
+-- end,
+-- })
+
+
+-- The next variant of ActualText is what Taco and I could come up with
+-- eventually. As of September 2013 Acrobat copies okay, Sumatra copies a
+-- question mark, pdftotext injects an extra space and Okular adds a
+-- newline plus space.
+
+-- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [] TJ % t EMC ET"](code)
+
+do
+
+ local f_actual_text_one = formatters["BT /Span << /ActualText >> BDC [] TJ %s EMC ET"]
+ local f_actual_text_two = formatters["BT /Span << /ActualText >> BDC [] TJ %s EMC ET"]
+ local f_actual_text = formatters["/Span <> BDC"]
+
+ local context = context
+ local pdfdirect = nodes.pool.pdfdirect
+
+ function codeinjections.unicodetoactualtext(unicode,pdfcode)
+ if unicode < 0x10000 then
+ return f_actual_text_one(unicode,pdfcode)
+ else
+ return f_actual_text_two(unicode/1024+0xD800,unicode%1024+0xDC00,pdfcode)
end
- delayed = { }
end
- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+ implement {
+ name = "startactualtext",
+ arguments = "string",
+ actions = function(str)
+ context(pdfdirect(f_actual_text(tosixteen(str))))
+ end
+ }
- function lpdf.delayedobject(data)
- local n = pdfreserveobject()
- delayed[n] = data
- return n
- end
+ implement {
+ name = "stopactualtext",
+ actions = function()
+ context(pdfdirect("EMC"))
+ end
+ }
end
+
+-- interface
+
+local lpdfverbose = lpdf.verbose
+
+implement { name = "lpdf_collectedresources", actions = { lpdf.collectedresources, context } }
+implement { name = "lpdf_addtocatalog", arguments = two_strings, actions = lpdf.addtocatalog }
+implement { name = "lpdf_addtoinfo", arguments = two_strings, actions = lpdf.addtoinfo }
+implement { name = "lpdf_addtonames", arguments = two_strings, actions = lpdf.addtonames }
+implement { name = "lpdf_addpageattributes", arguments = two_strings, actions = lpdf.addtopageattributes }
+implement { name = "lpdf_addpagesattributes", arguments = two_strings, actions = lpdf.addtopagesattributes }
+implement { name = "lpdf_addpageresources", arguments = two_strings, actions = lpdf.addtopageresources }
+implement { name = "lpdf_adddocumentextgstate", arguments = two_strings, actions = function(a,b) lpdf.adddocumentextgstate (a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentcolorspace", arguments = two_strings, actions = function(a,b) lpdf.adddocumentcolorspace(a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentpattern", arguments = two_strings, actions = function(a,b) lpdf.adddocumentpattern (a,lpdfverbose(b)) end }
+implement { name = "lpdf_adddocumentshade", arguments = two_strings, actions = function(a,b) lpdf.adddocumentshade (a,lpdfverbose(b)) end }
+
diff --git a/tex/context/base/lpdf-mis.lua b/tex/context/base/lpdf-mis.lua
index 174d17427..a1b12d8c0 100644
--- a/tex/context/base/lpdf-mis.lua
+++ b/tex/context/base/lpdf-mis.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['lpdf-mis'] = {
-- course there are a couple of more changes.
local next, tostring = next, tostring
-local format, gsub = string.format, string.gsub
+local format, gsub, formatters = string.format, string.gsub, string.formatters
local texset = tex.set
local backends, lpdf, nodes = backends, lpdf, nodes
@@ -41,8 +41,17 @@ local pdfverbose = lpdf.verbose
local pdfstring = lpdf.string
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
+local pdfaction = lpdf.action
+
+local formattedtimestamp = lpdf.pdftimestamp
+local adddocumentextgstate = lpdf.adddocumentextgstate
+local addtocatalog = lpdf.addtocatalog
+local addtoinfo = lpdf.addtoinfo
+local addtopageattributes = lpdf.addtopageattributes
+local addtonames = lpdf.addtonames
local variables = interfaces.variables
+local v_stop = variables.stop
local positive = register(pdfliteral("/GSpositive gs"))
local negative = register(pdfliteral("/GSnegative gs"))
@@ -59,8 +68,8 @@ local function initializenegative()
}
local negative = pdfdictionary { Type = g, TR = pdfreference(pdfflushstreamobject("{ 1 exch sub }",d)) }
local positive = pdfdictionary { Type = g, TR = pdfconstant("Identity") }
- lpdf.adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
- lpdf.adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
+ adddocumentextgstate("GSnegative", pdfreference(pdfflushobject(negative)))
+ adddocumentextgstate("GSpositive", pdfreference(pdfflushobject(positive)))
initializenegative = nil
end
@@ -68,8 +77,8 @@ local function initializeoverprint()
local g = pdfconstant("ExtGState")
local knockout = pdfdictionary { Type = g, OP = false, OPM = 0 }
local overprint = pdfdictionary { Type = g, OP = true, OPM = 1 }
- lpdf.adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
- lpdf.adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
+ adddocumentextgstate("GSknockout", pdfreference(pdfflushobject(knockout)))
+ adddocumentextgstate("GSoverprint", pdfreference(pdfflushobject(overprint)))
initializeoverprint = nil
end
@@ -91,8 +100,6 @@ function nodeinjections.negative()
return copy_node(negative)
end
---
-
-- function codeinjections.addtransparencygroup()
-- -- png: /CS /DeviceRGB /I true
-- local d = pdfdictionary {
@@ -100,7 +107,7 @@ end
-- I = true,
-- K = true,
-- }
--- lpdf.registerpagefinalizer(function() lpdf.addtopageattributes("Group",d) end) -- hm
+-- lpdf.registerpagefinalizer(function() addtopageattributes("Group",d) end) -- hm
-- end
-- actions (todo: store and update when changed)
@@ -125,10 +132,10 @@ end
local function flushdocumentactions()
if opendocument then
- lpdf.addtocatalog("OpenAction",lpdf.action(opendocument))
+ addtocatalog("OpenAction",pdfaction(opendocument))
end
if closedocument then
- lpdf.addtocatalog("CloseAction",lpdf.action(closedocument))
+ addtocatalog("CloseAction",pdfaction(closedocument))
end
end
@@ -136,12 +143,12 @@ local function flushpageactions()
if openpage or closepage then
local d = pdfdictionary()
if openpage then
- d.O = lpdf.action(openpage)
+ d.O = pdfaction(openpage)
end
if closepage then
- d.C = lpdf.action(closepage)
+ d.C = pdfaction(closepage)
end
- lpdf.addtopageattributes("AA",d)
+ addtopageattributes("AA",d)
end
end
@@ -168,37 +175,37 @@ local function setupidentity()
if not title or title == "" then
title = tex.jobname
end
- lpdf.addtoinfo("Title", pdfunicode(title), title)
+ addtoinfo("Title", pdfunicode(title), title)
local subtitle = identity.subtitle or ""
if subtitle ~= "" then
- lpdf.addtoinfo("Subject", pdfunicode(subtitle), subtitle)
+ addtoinfo("Subject", pdfunicode(subtitle), subtitle)
end
local author = identity.author or ""
if author ~= "" then
- lpdf.addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
+ addtoinfo("Author", pdfunicode(author), author) -- '/Author' in /Info, 'Creator' in XMP
end
local creator = identity.creator or ""
if creator ~= "" then
- lpdf.addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
+ addtoinfo("Creator", pdfunicode(creator), creator) -- '/Creator' in /Info, 'CreatorTool' in XMP
end
- lpdf.addtoinfo("CreationDate", pdfstring(lpdf.pdftimestamp(lpdf.timestamp())))
+ local currenttimestamp = lpdf.timestamp()
+ addtoinfo("CreationDate", pdfstring(formattedtimestamp(currenttimestamp)))
local date = identity.date or ""
- local pdfdate = lpdf.pdftimestamp(date)
+ local pdfdate = formattedtimestamp(date)
if pdfdate then
- lpdf.addtoinfo("ModDate", pdfstring(pdfdate), date)
+ addtoinfo("ModDate", pdfstring(pdfdate), date)
else
-- users should enter the date in 2010-01-19T23:27:50+01:00 format
-- and if not provided that way we use the creation time instead
- date = lpdf.timestamp()
- lpdf.addtoinfo("ModDate", pdfstring(lpdf.pdftimestamp(date)), date)
+ addtoinfo("ModDate", pdfstring(formattedtimestamp(currenttimestamp)), currenttimestamp)
end
local keywords = identity.keywords or ""
if keywords ~= "" then
keywords = gsub(keywords, "[%s,]+", " ")
- lpdf.addtoinfo("Keywords",pdfunicode(keywords), keywords)
+ addtoinfo("Keywords",pdfunicode(keywords), keywords)
end
local id = lpdf.id()
- lpdf.addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
+ addtoinfo("ID", pdfstring(id), id) -- needed for pdf/x
done = true
else
-- no need for a message
@@ -225,7 +232,7 @@ local function flushjavascripts()
a[#a+1] = pdfstring(name)
a[#a+1] = pdfreference(pdfflushobject(j))
end
- lpdf.addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
+ addtonames("JavaScript",pdfreference(pdfflushobject(pdfdictionary{ Names = a })))
end
end
@@ -234,67 +241,93 @@ lpdf.registerdocumentfinalizer(flushjavascripts,"javascripts")
-- -- --
local pagespecs = {
- [variables.max] = { "FullScreen", false, false },
- [variables.bookmark] = { "UseOutlines", false, false },
- [variables.fit] = { "UseNone", false, true },
- [variables.doublesided] = { "UseNone", "TwoColumnRight", true },
- [variables.singlesided] = { "UseNone", false, false },
- [variables.default] = { "UseNone", "auto", false },
- [variables.auto] = { "UseNone", "auto", false },
- [variables.none] = { false, false, false },
+ [variables.max] = { mode = "FullScreen", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.bookmark] = { mode = "UseOutlines", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.fit] = { mode = "UseNone", layout = false, fit = true, fixed = false, duplex = false },
+ [variables.doublesided] = { mode = "UseNone", layout = "TwoColumnRight", fit = true, fixed = false, duplex = false },
+ [variables.singlesided] = { mode = "UseNone", layout = false, fit = false, fixed = false, duplex = false },
+ [variables.default] = { mode = "UseNone", layout = "auto", fit = false, fixed = false, duplex = false },
+ [variables.auto] = { mode = "UseNone", layout = "auto", fit = false, fixed = false, duplex = false },
+ [variables.none] = { mode = false, layout = false, fit = false, fixed = false, duplex = false },
+ -- new
+ [variables.fixed] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = false }, -- noscale
+ [variables.landscape] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = "DuplexFlipShortEdge" },
+ [variables.portrait] = { mode = "UseNone", layout = "auto", fit = false, fixed = true, duplex = "DuplexFlipLongEdge" },
+
}
local pagespec, topoffset, leftoffset, height, width, doublesided = "default", 0, 0, 0, 0, false
+local pdfpaperheight = tex.pdfpageheight
+local pdfpaperwidth = tex.pdfpagewidth
+
function codeinjections.setupcanvas(specification)
local paperheight = specification.paperheight
local paperwidth = specification.paperwidth
local paperdouble = specification.doublesided
if paperheight then
texset('global','pdfpageheight',paperheight)
+ pdfpaperheight = paperheight
end
if paperwidth then
texset('global','pdfpagewidth',paperwidth)
+ pdfpaperwidth = paperwidth
end
pagespec = specification.mode or pagespec
topoffset = specification.topoffset or 0
leftoffset = specification.leftoffset or 0
- height = specification.height or tex.pdfpageheight
- width = specification.width or tex.pdfpagewidth
+ height = specification.height or pdfpaperheight
+ width = specification.width or pdfpaperwidth
if paperdouble ~= nil then
doublesided = paperdouble
end
end
local function documentspecification()
+ if not pagespec or pagespec == "" then
+ pagespec = variables.default
+ end
+ -- local settings = utilities.parsers.settings_to_array(pagespec)
+ -- local spec = pagespecs[variables.default]
+ -- for i=1,#settings do
+ -- local s = pagespecs[settings[i]]
+ -- if s then
+ -- for k, v in next, s do
+ -- spec[k] = v
+ -- end
+ -- end
+ -- end
local spec = pagespecs[pagespec] or pagespecs[variables.default]
- if spec then
- local mode, layout, fit = spec[1], spec[2], spec[3]
- if layout == variables.auto then
- if doublesided then
- spec = pagespecs[variables.doublesided] -- to be checked voor interfaces
- if spec then
- mode, layout, fit = spec[1], spec[2], spec[3]
- end
- else
- layout = false
+ if spec.layout == "auto" then
+ if doublesided then
+ local s = pagespecs[variables.doublesided] -- to be checked voor interfaces
+ for k, v in next, s do
+ spec[k] = v
end
+ else
+ spec.layout = false
end
- mode = mode and pdfconstant(mode)
- layout = layout and pdfconstant(layout)
- fit = fit and pdfdictionary { FitWindow = true }
- if layout then
- lpdf.addtocatalog("PageLayout",layout)
- end
- if mode then
- lpdf.addtocatalog("PageMode",mode)
- end
- if fit then
- lpdf.addtocatalog("ViewerPreferences",fit)
- end
- lpdf.addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
- lpdf.addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
end
+ local layout = spec.layout
+ local mode = spec.mode
+ local fit = spec.fit
+ local fixed = spec.fixed
+ local duplex = spec.duplex
+ if layout then
+ addtocatalog("PageLayout",pdfconstant(layout))
+ end
+ if mode then
+ addtocatalog("PageMode",pdfconstant(mode))
+ end
+ if fit or fixed or duplex then
+ addtocatalog("ViewerPreferences",pdfdictionary {
+ FitWindow = fit and true or nil,
+ PrintScaling = fixed and pdfconstant("None") or nil,
+ Duplex = duplex and pdfconstant(duplex) or nil,
+ })
+ end
+ addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
+ addtocatalog("Version", pdfconstant(format("1.%s",tex.pdfminorversion)))
end
-- temp hack: the mediabox is not under our control and has a precision of 4 digits
@@ -302,21 +335,21 @@ end
local factor = number.dimenfactors.bp
local function boxvalue(n) -- we could share them
- return pdfverbose(format("%0.4f",factor * n))
+ return pdfverbose(formatters["%0.4F"](factor * n))
end
local function pagespecification()
- local pageheight = tex.pdfpageheight
+ local pageheight = pdfpaperheight
local box = pdfarray { -- can be cached
boxvalue(leftoffset),
boxvalue(pageheight+topoffset-height),
boxvalue(width-leftoffset),
boxvalue(pageheight-topoffset),
}
- lpdf.addtopageattributes("CropBox",box) -- mandate for rendering
- lpdf.addtopageattributes("TrimBox",box) -- mandate for pdf/x
- -- lpdf.addtopageattributes("BleedBox",box)
- -- lpdf.addtopageattributes("ArtBox",box)
+ addtopageattributes("CropBox",box) -- mandate for rendering
+ addtopageattributes("TrimBox",box) -- mandate for pdf/x
+ -- addtopageattributes("BleedBox",box)
+ -- addtopageattributes("ArtBox",box)
end
lpdf.registerpagefinalizer(pagespecification,"page specification")
@@ -337,34 +370,85 @@ local map = {
characters = "a",
}
+-- local function featurecreep()
+-- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
+-- local getstructureset = structures.sets.get
+-- for i=1,#pages do
+-- local p = pages[i]
+-- if not p then
+-- return -- fatal error
+-- else
+-- local numberdata = p.numberdata
+-- if numberdata then
+-- local conversionset = numberdata.conversionset
+-- if conversionset then
+-- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
+-- if conversion ~= lastconversion then
+-- lastconversion = conversion
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
+-- end
+-- end
+-- end
+-- if not lastconversion then
+-- lastconversion = "numbers"
+-- list[#list+1] = i - 1 -- pdf starts numbering at 0
+-- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+-- end
+-- end
+-- end
+-- addtocatalog("PageLabels", pdfdictionary { Nums = list })
+-- end
+
local function featurecreep()
- local pages, lastconversion, list = structures.pages.tobesaved, nil, pdfarray()
- local getstructureset = structures.sets.get
+ local pages = structures.pages.tobesaved
+ local list = pdfarray()
+ local getset = structures.sets.get
+ local stopped = false
+ local oldlabel = nil
+ local olconversion = nil
for i=1,#pages do
local p = pages[i]
if not p then
return -- fatal error
+ end
+ local label = p.viewerprefix or ""
+ if p.status == v_stop then
+ if not stopped then
+ list[#list+1] = i - 1 -- pdf starts numbering at 0
+ list[#list+1] = pdfdictionary {
+ P = pdfunicode(label),
+ }
+ stopped = true
+ end
+ oldlabel = nil
+ oldconversion = nil
+ stopped = false
else
local numberdata = p.numberdata
+ local conversion = nil
+ local number = p.number
if numberdata then
local conversionset = numberdata.conversionset
if conversionset then
- local conversion = getstructureset("structure:conversions",p.block,conversionset,1,"numbers")
- if conversion ~= lastconversion then
- lastconversion = conversion
- list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map[conversion] or map.numbers) }
- end
+ conversion = getset("structure:conversions",p.block,conversionset,1,"numbers")
end
end
- if not lastconversion then
- lastconversion = "numbers"
+ conversion = conversion and map[conversion] or map.numbers
+ if number == 1 or oldlabel ~= label or oldconversion ~= conversion then
list[#list+1] = i - 1 -- pdf starts numbering at 0
- list[#list+1] = pdfdictionary { S = pdfconstant(map.numbers) }
+ list[#list+1] = pdfdictionary {
+ S = pdfconstant(conversion),
+ St = number,
+ P = label ~= "" and pdfunicode(label) or nil,
+ }
end
+ oldlabel = label
+ oldconversion = conversion
+ stopped = false
end
end
- lpdf.addtocatalog("PageLabels", pdfdictionary { Nums = list })
+ addtocatalog("PageLabels", pdfdictionary { Nums = list })
end
lpdf.registerdocumentfinalizer(featurecreep,"featurecreep")
diff --git a/tex/context/base/lpdf-mov.lua b/tex/context/base/lpdf-mov.lua
index 41db97e0c..87375e4ce 100644
--- a/tex/context/base/lpdf-mov.lua
+++ b/tex/context/base/lpdf-mov.lua
@@ -11,10 +11,10 @@ local format = string.format
local lpdf = lpdf
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
local pdfconstant = lpdf.constant
local pdfdictionary = lpdf.dictionary
local pdfarray = lpdf.array
+local pdfborder = lpdf.border
local write_node = node.write
function nodeinjections.insertmovie(specification)
@@ -31,14 +31,16 @@ function nodeinjections.insertmovie(specification)
ShowControls = (specification.controls and true) or false,
Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil,
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("movie %s",specification.label),
Movie = moviedict,
A = controldict,
}
- write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(width,height,0,action())) -- test: context(...)
end
function nodeinjections.insertsound(specification)
@@ -51,13 +53,15 @@ function nodeinjections.insertsound(specification)
local sounddict = pdfdictionary {
F = soundclip.filename
}
+ local bs, bc = pdfborder()
local action = pdfdictionary {
Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
+ Border = bs,
+ C = bc,
T = format("sound %s",specification.label),
Movie = sounddict,
A = controldict,
}
- write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
+ write_node(nodeinjections.annotation(0,0,0,action())) -- test: context(...)
end
end
diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua
index 6b104d2fa..6295947d0 100644
--- a/tex/context/base/lpdf-nod.lua
+++ b/tex/context/base/lpdf-nod.lua
@@ -6,21 +6,29 @@ if not modules then modules = { } end modules ['lpdf-nod'] = {
license = "see context related readme files"
}
-local formatters = string.formatters
+local type = type
-local copy_node = node.copy
-local new_node = node.new
+local formatters = string.formatters
-local nodepool = nodes.pool
-local register = nodepool.register
local whatsitcodes = nodes.whatsitcodes
local nodeinjections = backends.nodeinjections
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local setfield = nuts.setfield
+
+local copy_node = nuts.copy
+local new_node = nuts.new
+
+local nodepool = nuts.pool
+local register = nodepool.register
+
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfliteral,"mode",1)
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) setfield(pdfdest,"named_id",1) -- xyz_zoom untouched
local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
local variables = interfaces.variables
@@ -38,14 +46,14 @@ local views = { -- beware, we do support the pdf keys but this is *not* official
function nodepool.pdfliteral(str)
local t = copy_node(pdfliteral)
- t.data = str
+ setfield(t,"data",str)
return t
end
function nodepool.pdfdirect(str)
local t = copy_node(pdfliteral)
- t.data = str
- t.mode = 1
+ setfield(t,"data",str)
+ setfield(t,"mode",1)
return t
end
@@ -57,16 +65,10 @@ function nodepool.pdfrestore()
return copy_node(pdfrestore)
end
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
- local t = copy_node(pdfsetmatrix)
- t.data = formatters["%s %s %s %s"](rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
- return t
-end
-
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty) -- todo: tx ty
local t = copy_node(pdfsetmatrix)
if type(rx) == "string" then
- t.data = rx
+ setfield(t,"data",rx)
else
if not rx then
rx = 1
@@ -86,12 +88,12 @@ function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
end
if sx == 0 and sy == 0 then
if rx == 1 and ry == 1 then
- t.data = "1 0 0 1"
+ setfield(t,"data","1 0 0 1")
else
- t.data = formatters["%0.6f 0 0 %0.6f"](rx,ry)
+ setfield(t,"data",formatters["%0.6F 0 0 %0.6F"](rx,ry))
end
else
- t.data = formatters["%0.6f %0.6f %0.6f %0.6f"](rx,sx,sy,ry)
+ setfield(t,"data",formatters["%0.6F %0.6F %0.6F %0.6F"](rx,sx,sy,ry))
end
end
return t
@@ -101,24 +103,28 @@ nodeinjections.save = nodepool.pdfsave
nodeinjections.restore = nodepool.pdfrestore
nodeinjections.transform = nodepool.pdfsetmatrix
+-- the next one is implemented differently, using latelua
+
function nodepool.pdfannotation(w,h,d,data,n)
- local t = copy_node(pdfannot)
- if w and w ~= 0 then
- t.width = w
- end
- if h and h ~= 0 then
- t.height = h
- end
- if d and d ~= 0 then
- t.depth = d
- end
- if n then
- t.objnum = n
- end
- if data and data ~= "" then
- t.data = data
- end
- return t
+ report("don't use node based annotations!")
+ os.exit()
+-- local t = copy_node(pdfannot)
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- if data and data ~= "" then
+-- setfield(t,"data",data)
+-- end
+-- return t
end
-- (!) The next code in pdfdest.w is wrong:
@@ -135,40 +141,43 @@ end
-- so we need to force a matrix.
function nodepool.pdfdestination(w,h,d,name,view,n)
- local t = copy_node(pdfdest)
- local hasdimensions = false
- if w and w ~= 0 then
- t.width = w
- hasdimensions = true
- end
- if h and h ~= 0 then
- t.height = h
- hasdimensions = true
- end
- if d and d ~= 0 then
- t.depth = d
- hasdimensions = true
- end
- if n then
- t.objnum = n
- end
- view = views[view] or view or 1 -- fit is default
- t.dest_id = name
- t.dest_type = view
- if hasdimensions and view == 0 then -- xyz
- -- see (!) s -> m -> t -> r
- local s = copy_node(pdfsave)
- local m = copy_node(pdfsetmatrix)
- local r = copy_node(pdfrestore)
- m.data = "1 0 0 1"
- s.next = m
- m.next = t
- t.next = r
- m.prev = s
- t.prev = m
- r.prev = t
- return s -- a list
- else
- return t
- end
+ report("don't use node based destinations!")
+ os.exit()
+-- local t = copy_node(pdfdest)
+-- local hasdimensions = false
+-- if w and w ~= 0 then
+-- setfield(t,"width",w)
+-- hasdimensions = true
+-- end
+-- if h and h ~= 0 then
+-- setfield(t,"height",h)
+-- hasdimensions = true
+-- end
+-- if d and d ~= 0 then
+-- setfield(t,"depth",d)
+-- hasdimensions = true
+-- end
+-- if n then
+-- setfield(t,"objnum",n)
+-- end
+-- view = views[view] or view or 1 -- fit is default
+-- setfield(t,"dest_id",name)
+-- setfield(t,"dest_type",view)
+-- if hasdimensions and view == 0 then -- xyz
+-- -- see (!) s -> m -> t -> r
+-- -- linked
+-- local s = copy_node(pdfsave)
+-- local m = copy_node(pdfsetmatrix)
+-- local r = copy_node(pdfrestore)
+-- setfield(m,"data","1 0 0 1")
+-- setfield(s,"next",m)
+-- setfield(m,"next",t)
+-- setfield(t,"next",r)
+-- setfield(m,"prev",s)
+-- setfield(t,"prev",m)
+-- setfield(r,"prev",t)
+-- return s -- a list
+-- else
+-- return t
+-- end
end
diff --git a/tex/context/base/lpdf-pda.xml b/tex/context/base/lpdf-pda.xml
index 2d8e7b6f5..3f6b969c0 100644
--- a/tex/context/base/lpdf-pda.xml
+++ b/tex/context/base/lpdf-pda.xml
@@ -3,15 +3,20 @@
+
application/pdf
-
+
-
+
+
+
+
+
diff --git a/tex/context/base/lpdf-pdx.xml b/tex/context/base/lpdf-pdx.xml
index 42e11650e..d55e1fdf3 100644
--- a/tex/context/base/lpdf-pdx.xml
+++ b/tex/context/base/lpdf-pdx.xml
@@ -8,10 +8,14 @@
application/pdf
-
+
-
+
+
+
+
+
diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua
index 6af65f9de..61676d5a8 100644
--- a/tex/context/base/lpdf-ren.lua
+++ b/tex/context/base/lpdf-ren.lua
@@ -15,47 +15,66 @@ local settings_to_array = utilities.parsers.settings_to_array
local backends, lpdf, nodes, node = backends, lpdf, nodes, node
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-local viewerlayers = attributes.viewerlayers
-
-local references = structures.references
-
-references.executers = references.executers or { }
-local executers = references.executers
-
-local variables = interfaces.variables
-
-local v_no = variables.no
-local v_yes = variables.yes
-local v_start = variables.start
-local v_stop = variables.stop
-local v_reset = variables.reset
-local v_auto = variables.auto
-local v_random = variables.random
-
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdf_ocg = pdfconstant("OCG")
-local pdf_ocmd = pdfconstant("OCMD")
-local pdf_off = pdfconstant("OFF")
-local pdf_on = pdfconstant("ON")
-local pdf_toggle = pdfconstant("Toggle")
-local pdf_setocgstate = pdfconstant("SetOCGState")
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+local viewerlayers = attributes.viewerlayers
+
+local references = structures.references
+
+references.executers = references.executers or { }
+local executers = references.executers
+
+local variables = interfaces.variables
+
+local v_no = variables.no
+local v_yes = variables.yes
+local v_start = variables.start
+local v_stop = variables.stop
+local v_reset = variables.reset
+local v_auto = variables.auto
+local v_random = variables.random
+
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+
+local addtopageattributes = lpdf.addtopageattributes
+local addtopageresources = lpdf.addtopageresources
+local addtocatalog = lpdf.addtocatalog
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdf_ocg = pdfconstant("OCG")
+local pdf_ocmd = pdfconstant("OCMD")
+local pdf_off = pdfconstant("OFF")
+local pdf_on = pdfconstant("ON")
+local pdf_view = pdfconstant("View")
+local pdf_design = pdfconstant("Design")
+local pdf_toggle = pdfconstant("Toggle")
+local pdf_setocgstate = pdfconstant("SetOCGState")
+
+local copy_node = node.copy
+
+local pdf_print = {
+ [v_yes] = pdfdictionary { PrintState = pdf_on },
+ [v_no ] = pdfdictionary { PrintState = pdf_off },
+}
-local copy_node = node.copy
+local pdf_intent = {
+ [v_yes] = pdf_view,
+ [v_no] = pdf_design,
+}
-local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
+local pdf_export = {
+ [v_yes] = pdf_on,
+ [v_no] = pdf_off,
+}
-- We can have references to layers before they are places, for instance from
-- hide and vide actions. This is why we need to be able to force usage of layers
@@ -95,10 +114,13 @@ local function useviewerlayer(name) -- move up so that we can use it as local
local nn = pdfreserveobject()
local nr = pdfreference(nn)
local nd = pdfdictionary {
- Type = pdf_ocg,
- Name = specification.title or "unknown",
- Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user
- Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not
+ Type = pdf_ocg,
+ Name = specification.title or "unknown",
+ Usage = {
+ Intent = pdf_intent[specification.editable or v_yes], -- disable layer hiding by user (useless)
+ Print = pdf_print [specification.printable or v_yes], -- printable or not
+ Export = pdf_export[specification.export or v_yes], -- export or not
+ },
}
cache[#cache+1] = { nn, nd }
pdfln[tag] = nr -- was n
@@ -161,9 +183,17 @@ local function flushtextlayers()
ON = videlayers,
OFF = hidelayers,
BaseState = pdf_on,
+
+AS = pdfarray {
+ pdfdictionary {
+ Category = pdfarray { pdfconstant("Print") },
+ Event = pdfconstant("Print"),
+ OCGs = (viewerlayers.hasorder and sortedlayers) or nil,
+ }
+},
},
}
- lpdf.addtocatalog("OCProperties",d)
+ addtocatalog("OCProperties",d)
textlayers = nil
end
end
@@ -171,7 +201,7 @@ end
local function flushpagelayers() -- we can share these
if pagelayers then
- lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
+ addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
end
end
@@ -342,8 +372,8 @@ function codeinjections.setpagetransition(specification)
end
delay = tonumber(delay)
if delay and delay > 0 then
- lpdf.addtopageattributes("Dur",delay)
+ addtopageattributes("Dur",delay)
end
- lpdf.addtopageattributes("Trans",d)
+ addtopageattributes("Trans",d)
end
end
diff --git a/tex/context/base/lpdf-swf.lua b/tex/context/base/lpdf-swf.lua
index 12c80036f..88cdcc4ec 100644
--- a/tex/context/base/lpdf-swf.lua
+++ b/tex/context/base/lpdf-swf.lua
@@ -28,8 +28,6 @@ local checkedkey = lpdf.checkedkey
local codeinjections = backends.pdf.codeinjections
local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
-
local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end)
local report_swf = logs.reporter("backend","swf")
@@ -302,5 +300,5 @@ function backends.pdf.nodeinjections.insertswf(spec)
-- factor = spec.factor,
-- label = spec.label,
}
- context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
+ context(nodeinjections.annotation(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
end
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index 29ffcd207..79ccfe075 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -6,70 +6,107 @@ if not modules then modules = { } end modules ['lpdf-tag'] = {
license = "see context related readme files"
}
+local next = next
local format, match, concat = string.format, string.match, table.concat
-local lpegmatch = lpeg.match
+local lpegmatch, P, S, C = lpeg.match, lpeg.P, lpeg.S, lpeg.C
local utfchar = utf.char
+local settings_to_hash = utilities.parsers.settings_to_hash
+local formatters = string.formatters
local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
local report_tags = logs.reporter("backend","tags")
-local backends, lpdf, nodes = backends, lpdf, nodes
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-
-local tasks = nodes.tasks
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfboolean = lpdf.boolean
-local pdfconstant = lpdf.constant
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-
-local texgetcount = tex.getcount
-
-local nodepool = nodes.pool
-
-local pdfliteral = nodepool.pdfliteral
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-
-local a_tagged = attributes.private('tagged')
-local a_image = attributes.private('image')
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local tosequence = nodes.tosequence
-local copy_node = node.copy
-local slide_nodelist = node.slide
-
-local structure_stack = { }
-local structure_kids = pdfarray()
-local structure_ref = pdfreserveobject()
-local parent_ref = pdfreserveobject()
-local root = { pref = pdfreference(structure_ref), kids = structure_kids }
-local tree = { }
-local elements = { }
-local names = pdfarray()
-local taglist = structures.tags.taglist
-local usedlabels = structures.tags.labels
-local properties = structures.tags.properties
-local usedmapping = { }
-
-local colonsplitter = lpeg.splitat(":")
-local dashsplitter = lpeg.splitat("-")
-
-local add_ids = false -- true
+local backends = backends
+local lpdf = lpdf
+local nodes = nodes
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+
+local tasks = nodes.tasks
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfboolean = lpdf.boolean
+local pdfconstant = lpdf.constant
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+
+local addtocatalog = lpdf.addtocatalog
+local addtopageattributes = lpdf.addtopageattributes
+
+local texgetcount = tex.getcount
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+
+local a_tagged = attributes.private('tagged')
+local a_image = attributes.private('image')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local nodepool = nuts.pool
+local pdfliteral = nodepool.pdfliteral
+
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+
+local traverse_nodes = nuts.traverse
+local tosequence = nuts.tosequence
+local copy_node = nuts.copy
+local slide_nodelist = nuts.slide
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+
+local structure_stack = { }
+local structure_kids = pdfarray()
+local structure_ref = pdfreserveobject()
+local parent_ref = pdfreserveobject()
+local root = { pref = pdfreference(structure_ref), kids = structure_kids }
+local tree = { }
+local elements = { }
+local names = pdfarray()
+
+local structurestags = structures.tags
+local taglist = structurestags.taglist
+local specifications = structurestags.specifications
+local usedlabels = structurestags.labels
+local properties = structurestags.properties
+local lasttaginchain = structurestags.lastinchain
+
+local usedmapping = { }
+
+----- tagsplitter = structurestags.patterns.splitter
+
+-- local embeddedtags = false -- true will id all, for tracing
+-- local f_tagid = formatters["%s-%04i"]
+-- local embeddedfilelist = pdfarray() -- /AF crap
+--
+-- directives.register("structures.tags.embedmath",function(v)
+-- if not v then
+-- -- only enable
+-- elseif embeddedtags == true then
+-- -- already all tagged
+-- elseif embeddedtags then
+-- embeddedtags.math = true
+-- else
+-- embeddedtags = { math = true }
+-- end
+-- end)
-- function codeinjections.maptag(original,target,kind)
-- mapping[original] = { target, kind or "inline" }
@@ -79,14 +116,15 @@ local function finishstructure()
if #structure_kids > 0 then
local nums, n = pdfarray(), 0
for i=1,#tree do
- n = n + 1 ; nums[n] = i-1
+ n = n + 1 ; nums[n] = i - 1
n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i]))
end
local parenttree = pdfdictionary {
Nums = nums
}
-- we need to split names into smaller parts (e.g. alphabetic or so)
- if add_ids then
+ -- we already have code for that somewhere
+ if #names > 0 then
local kids = pdfdictionary {
Limits = pdfarray { names[1], names[#names-1] },
Names = names,
@@ -106,18 +144,19 @@ local function finishstructure()
Type = pdfconstant("StructTreeRoot"),
K = pdfreference(pdfflushobject(structure_kids)),
ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)),
- IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil,
+ IDTree = #names > 0 and pdfreference(pdfflushobject(idtree)) or nil,
RoleMap = rolemap,
}
pdfflushobject(structure_ref,structuretree)
- lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref))
+ addtocatalog("StructTreeRoot",pdfreference(structure_ref))
--
local markinfo = pdfdictionary {
Marked = pdfboolean(true),
-- UserProperties = pdfboolean(true),
-- Suspects = pdfboolean(true),
+ -- AF = #embeddedfilelist > 0 and pdfreference(pdfflushobject(embeddedfilelist)) or nil,
}
- lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
--
for fulltag, element in next, elements do
pdfflushobject(element.knum,element.kids)
@@ -133,49 +172,110 @@ local pdf_mcr = pdfconstant("MCR")
local pdf_struct_element = pdfconstant("StructElem")
local function initializepage()
- index = 0
+ index = 0
pagenum = texgetcount("realpageno")
pageref = pdfreference(pdfpagereference(pagenum))
- list = pdfarray()
+ list = pdfarray()
tree[pagenum] = list -- we can flush after done, todo
end
local function finishpage()
-- flush what can be flushed
- lpdf.addtopageattributes("StructParents",pagenum-1)
+ addtopageattributes("StructParents",pagenum-1)
end
-- here we can flush and free elements that are finished
+local pdf_userproperties = pdfconstant("UserProperties")
+
+local function makeattribute(t)
+ if t and next(t) then
+ local properties = pdfarray()
+ for k, v in next, t do
+ properties[#properties+1] = pdfdictionary {
+ N = pdfunicode(k),
+ V = pdfunicode(v),
+ }
+ end
+ return pdfdictionary {
+ O = pdf_userproperties,
+ P = properties,
+ }
+ end
+end
+
local function makeelement(fulltag,parent)
- local tag, n = lpegmatch(dashsplitter,fulltag)
- local tg, detail = lpegmatch(colonsplitter,tag)
- local k, r = pdfarray(), pdfreserveobject()
- usedmapping[tg] = true
- tg = usedlabels[tg] or tg
+ local specification = specifications[fulltag]
+ local tag = specification.tagname
+ if tag == "ignore" then
+ return false
+ elseif tag == "mstackertop" or tag == "mstackerbot" or tag == "mstackermid"then
+ -- TODO
+ return true
+ end
+ --
+ local detail = specification.detail
+ local userdata = specification.userdata
+ --
+ usedmapping[tag] = true
+ --
+ -- specification.attribute is unique
+ --
+ local id = nil
+ -- local af = nil
+ -- if embeddedtags then
+ -- local tagname = specification.tagname
+ -- local tagindex = specification.tagindex
+ -- if embeddedtags == true or embeddedtags[tagname] then
+ -- id = f_tagid(tagname,tagindex)
+ -- af = job.fileobjreferences.collected[id]
+ -- if af then
+ -- local r = pdfreference(af)
+ -- af = pdfarray { r }
+ -- -- embeddedfilelist[#embeddedfilelist+1] = r
+ -- end
+ -- end
+ -- end
+ --
+ local k = pdfarray()
+ local r = pdfreserveobject()
+ local t = usedlabels[tag] or tag
local d = pdfdictionary {
Type = pdf_struct_element,
- S = pdfconstant(tg),
- ID = (add_ids and fulltag) or nil,
+ S = pdfconstant(t),
+ ID = id,
T = detail and detail or nil,
P = parent.pref,
Pg = pageref,
K = pdfreference(r),
+ A = a and makeattribute(a) or nil,
-- Alt = " Who cares ",
-- ActualText = " Hi Hans ",
+ AF = af,
}
local s = pdfreference(pdfflushobject(d))
- if add_ids then
- names[#names+1] = fulltag
+ if id then
+ names[#names+1] = id
names[#names+1] = s
end
local kids = parent.kids
kids[#kids+1] = s
- elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum }
+ local e = {
+ tag = t,
+ pref = s,
+ kids = k,
+ knum = r,
+ pnum = pagenum
+ }
+ elements[fulltag] = e
+ return e
end
-local function makecontent(parent,start,stop,slist,id)
- local tag, kids = parent.tag, parent.kids
+local f_BDC = formatters["/%s <> BDC"]
+
+local function makecontent(parent,id)
+ local tag = parent.tag
+ local kids = parent.kids
local last = index
if id == "image" then
local d = pdfdictionary {
@@ -197,109 +297,304 @@ local function makecontent(parent,start,stop,slist,id)
kids[#kids+1] = d
end
--
- local bliteral = pdfliteral(format("/%s <>BDC",tag,last))
- local prev = start.prev
- if prev then
- prev.next, bliteral.prev = bliteral, prev
- end
- start.prev, bliteral.next = bliteral, start
- if slist and slist.list == start then
- slist.list = bliteral
- elseif not prev then
- report_tags("this can't happen: injection in front of nothing")
- end
- --
- local eliteral = pdfliteral("EMC")
- local next = stop.next
- if next then
- next.prev, eliteral.next = eliteral, next
- end
- stop.next, eliteral.prev = eliteral, stop
- --
index = index + 1
- list[index] = parent.pref
- return bliteral, eliteral
+ list[index] = parent.pref -- page related list
+ --
+ return f_BDC(tag,last)
end
--- -- --
-
-local level, last, ranges, range = 0, nil, { }, nil
-
-local function collectranges(head,list)
- for n in traverse_nodes(head) do
- local id = n.id -- 14: image, 8: literal (mp)
- if id == glyph_code then
- local at = n[a_tagged]
- if not at then
- range = nil
- elseif last ~= at then
- range = { at, "glyph", n, n, list } -- attr id start stop list
- ranges[#ranges+1] = range
- last = at
- elseif range then
- range[4] = n -- stop
- end
- elseif id == hlist_code or id == vlist_code then
- local at = n[a_image]
- if at then
- local at = n[a_tagged]
+-- no need to adapt head, as we always operate on lists
+
+function nodeinjections.addtags(head)
+
+ local last = nil
+ local ranges = { }
+ local range = nil
+ local head = tonut(head)
+
+ local function collectranges(head,list)
+ for n in traverse_nodes(head) do
+ local id = getid(n) -- 14: image, 8: literal (mp)
+ if id == glyph_code then
+ local at = getattr(n,a_tagged)
if not at then
range = nil
+ elseif last ~= at then
+ range = { at, "glyph", n, n, list } -- attr id start stop list
+ ranges[#ranges+1] = range
+ last = at
+ elseif range then
+ range[4] = n -- stop
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local at = getattr(n,a_image)
+ if at then
+ local at = getattr(n,a_tagged)
+ if not at then
+ range = nil
+ else
+ ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+ end
+ last = nil
else
- ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+ local nl = getlist(n)
+ -- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+ collectranges(nl,n)
end
- last = nil
- else
- local nl = n.list
- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
- collectranges(nl,n)
end
end
end
-end
-function nodeinjections.addtags(head)
- -- no need to adapt head, as we always operate on lists
- level, last, ranges, range = 0, nil, { }, nil
initializepage()
+
collectranges(head)
+
if trace_tags then
for i=1,#ranges do
local range = ranges[i]
- local attr, id, start, stop = range[1], range[2], range[3], range[4]
- local tags = taglist[attr]
+ local attr = range[1]
+ local id = range[2]
+ local start = range[3]
+ local stop = range[4]
+ local tags = taglist[attr]
if tags then -- not ok ... only first lines
- report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags)
+ report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags.taglist)
end
end
end
+
+ local top = nil
+ local noftop = 0
+
for i=1,#ranges do
- local range = ranges[i]
- local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5]
- local tags = taglist[attr]
- local prev = root
- local noftags, tag = #tags, nil
- for j=1,noftags do
- local tag = tags[j]
- if not elements[tag] then
- makeelement(tag,prev)
+ local range = ranges[i]
+ local attr = range[1]
+ local id = range[2]
+ local start = range[3]
+ local stop = range[4]
+ local list = range[5]
+ local specification = taglist[attr]
+ local taglist = specification.taglist
+ local noftags = #taglist
+ local common = 0
+
+ if top then
+ for i=1,noftags >= noftop and noftop or noftags do
+ if top[i] == taglist[i] then
+ common = i
+ else
+ break
+ end
+ end
+ end
+
+ local prev = common > 0 and elements[taglist[common]] or root
+
+ for j=common+1,noftags do
+ local tag = taglist[j]
+ local prv = elements[tag] or makeelement(tag,prev)
+ if prv == false then
+ -- ignore this one
+ prev = false
+ break
+ elseif prv == true then
+ -- skip this one
+ else
+ prev = prv
end
- prev = elements[tag]
end
- local b, e = makecontent(prev,start,stop,list,id)
- if start == head then
- report_tags("this can't happen: parent list gets tagged")
- head = b
+
+ if prev then
+ -- use insert instead:
+ local literal = pdfliteral(makecontent(prev,id))
+ local prev = getprev(start)
+ if prev then
+ setfield(prev,"next",literal)
+ setfield(literal,"prev",prev)
+ end
+ setfield(start,"prev",literal)
+ setfield(literal,"next",start)
+ if list and getlist(list) == start then
+ setfield(list,"list",literal)
+ end
+ -- use insert instead:
+ local literal = pdfliteral("EMC")
+ local next = getnext(stop)
+ if next then
+ setfield(next,"prev",literal)
+ setfield(literal,"next",next)
+ end
+ setfield(stop,"next",literal)
+ setfield(literal,"prev",stop)
end
+ top = taglist
+ noftop = noftags
end
+
finishpage()
- -- can be separate feature
- --
- -- injectspans(head) -- does to work yet
- --
+
+ head = tonode(head)
return head, true
+
end
+-- variant: more structure but funny collapsing in viewer
+
+-- function nodeinjections.addtags(head)
+--
+-- local last, ranges, range = nil, { }, nil
+--
+-- local function collectranges(head,list)
+-- for n in traverse_nodes(head) do
+-- local id = getid(n) -- 14: image, 8: literal (mp)
+-- if id == glyph_code then
+-- local at = getattr(n,a_tagged)
+-- if not at then
+-- range = nil
+-- elseif last ~= at then
+-- range = { at, "glyph", n, n, list } -- attr id start stop list
+-- ranges[#ranges+1] = range
+-- last = at
+-- elseif range then
+-- range[4] = n -- stop
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local at = getattr(n,a_image)
+-- if at then
+-- local at = getattr(n,a_tagged)
+-- if not at then
+-- range = nil
+-- else
+-- ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+-- end
+-- last = nil
+-- else
+-- local nl = getlist(n)
+-- -- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+-- collectranges(nl,n)
+-- end
+-- end
+-- end
+-- end
+--
+-- initializepage()
+--
+-- head = tonut(head)
+-- collectranges(head)
+--
+-- if trace_tags then
+-- for i=1,#ranges do
+-- local range = ranges[i]
+-- local attr = range[1]
+-- local id = range[2]
+-- local start = range[3]
+-- local stop = range[4]
+-- local tags = taglist[attr]
+-- if tags then -- not ok ... only first lines
+-- report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags.taglist)
+-- end
+-- end
+-- end
+--
+-- local top = nil
+-- local noftop = 0
+-- local last = nil
+--
+-- for i=1,#ranges do
+-- local range = ranges[i]
+-- local attr = range[1]
+-- local id = range[2]
+-- local start = range[3]
+-- local stop = range[4]
+-- local list = range[5]
+-- local specification = taglist[attr]
+-- local taglist = specification.taglist
+-- local noftags = #taglist
+-- local tag = nil
+-- local common = 0
+-- -- local prev = root
+--
+-- if top then
+-- for i=1,noftags >= noftop and noftop or noftags do
+-- if top[i] == taglist[i] then
+-- common = i
+-- else
+-- break
+-- end
+-- end
+-- end
+--
+-- local result = { }
+-- local r = noftop - common
+-- if r > 0 then
+-- for i=1,r do
+-- result[i] = "EMC"
+-- end
+-- end
+--
+-- local prev = common > 0 and elements[taglist[common]] or root
+--
+-- for j=common+1,noftags do
+-- local tag = taglist[j]
+-- local prv = elements[tag] or makeelement(tag,prev)
+-- -- if prv == false then
+-- -- -- ignore this one
+-- -- prev = false
+-- -- break
+-- -- elseif prv == true then
+-- -- -- skip this one
+-- -- else
+-- prev = prv
+-- r = r + 1
+-- result[r] = makecontent(prev,id)
+-- -- end
+-- end
+--
+-- if r > 0 then
+-- local literal = pdfliteral(concat(result,"\n"))
+-- -- use insert instead:
+-- local literal = pdfliteral(result)
+-- local prev = getprev(start)
+-- if prev then
+-- setfield(prev,"next",literal)
+-- setfield(literal,"prev",prev)
+-- end
+-- setfield(start,"prev",literal)
+-- setfield(literal,"next",start)
+-- if list and getlist(list) == start then
+-- setfield(list,"list",literal)
+-- end
+-- end
+--
+-- top = taglist
+-- noftop = noftags
+-- last = stop
+--
+-- end
+--
+-- if last and noftop > 0 then
+-- local result = { }
+-- for i=1,noftop do
+-- result[i] = "EMC"
+-- end
+-- local literal = pdfliteral(concat(result,"\n"))
+-- -- use insert instead:
+-- local next = getnext(last)
+-- if next then
+-- setfield(next,"prev",literal)
+-- setfield(literal,"next",next)
+-- end
+-- setfield(last,"next",literal)
+-- setfield(literal,"prev",last)
+-- end
+--
+-- finishpage()
+--
+-- head = tonode(head)
+-- return head, true
+--
+-- end
+
-- this belongs elsewhere (export is not pdf related)
function codeinjections.enabletags(tg,lb)
diff --git a/tex/context/base/lpdf-u3d.lua b/tex/context/base/lpdf-u3d.lua
index 33269486c..c9f4a0369 100644
--- a/tex/context/base/lpdf-u3d.lua
+++ b/tex/context/base/lpdf-u3d.lua
@@ -17,7 +17,8 @@ if not modules then modules = { } end modules ['lpdf-u3d'] = {
-- point we will end up with a reimplementation. For instance
-- it makes sense to add the same activation code as with swf.
-local format, find = string.format, string.find
+local tonumber = tonumber
+local formatters, find = string.formatters, string.find
local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs
local backends, lpdf = backends, lpdf
@@ -38,8 +39,6 @@ local pdfflushstreamfileobject = lpdf.flushstreamfileobject
local checkedkey = lpdf.checkedkey
local limited = lpdf.limited
-local pdfannotation_node = nodes.pool.pdfannotation
-
local schemes = table.tohash {
"Artwork", "None", "White", "Day", "Night", "Hard",
"Primary", "Blue", "Red", "Cube", "CAD", "Headlamp",
@@ -429,13 +428,13 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
local preview = checkedkey(param,"preview","string")
if preview then
activationdict.A = pdfconstant("XA")
- local tag = format("%s:%s:%s",label,stream,preview)
+ local tag = formatters["%s:%s:%s"](label,stream,preview)
local ref = stored_pr[tag]
if not ref then
local figure = img.immediatewrite {
filename = preview,
- width = width,
- height = height
+ width = width,
+ height = height
}
ref = figure.objnum
stored_pr[tag] = ref
@@ -462,7 +461,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
},
ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
}
- local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
+ local pwd = pdfflushstreamobject(formatters["q /GS gs %F 0 0 %F 0 0 cm /IM Do Q"](factor*width,factor*height),pw)
annot.AP = pdfdictionary {
N = pdfreference(pwd)
}
@@ -484,5 +483,5 @@ function nodeinjections.insertu3d(spec)
controls = spec.controls,
label = spec.label,
}
- node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
+ node.write(nodeinjections.annotation(spec.width,spec.height,0,annotation()))
end
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index 11ac82a08..22971c2b7 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -46,20 +46,18 @@ local pdfcolorspec = lpdf.colorspec
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-local pdfreserveannotation = lpdf.reserveannotation
local pdfreserveobject = lpdf.reserveobject
local pdfpagereference = lpdf.pagereference
local pdfshareobjectreference = lpdf.shareobjectreference
+local pdfaction = lpdf.action
+local pdfborder = lpdf.border
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
+local pdftransparencyvalue = lpdf.transparencyvalue
+local pdfcolorvalues = lpdf.colorvalues
local hpack_node = node.hpack
local write_node = node.write -- test context(...) instead
-local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
-
-- symbols
local presets = { } -- xforms
@@ -117,8 +115,8 @@ codeinjections.presetsymbollist = presetsymbollist
-- }
local attachment_symbols = {
- Graph = pdfconstant("GraphPushPin"),
- Paperclip = pdfconstant("PaperclipTag"),
+ Graph = pdfconstant("Graph"),
+ Paperclip = pdfconstant("Paperclip"),
Pushpin = pdfconstant("PushPin"),
}
@@ -170,19 +168,36 @@ end
local function analyzecolor(colorvalue,colormodel)
local cvalue = colorvalue and tonumber(colorvalue)
local cmodel = colormodel and tonumber(colormodel) or 3
- return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil
+ return cvalue and pdfarray { pdfcolorvalues(cmodel,cvalue) } or nil
end
local function analyzetransparency(transparencyvalue)
local tvalue = transparencyvalue and tonumber(transparencyvalue)
- return tvalue and lpdf.transparencyvalue(tvalue) or nil
+ return tvalue and pdftransparencyvalue(tvalue) or nil
end
-- Attachments
+local nofattachments = 0
+local attachments = { }
+local filestreams = { }
+local referenced = { }
+local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+local tobesavedobjrefs = utilities.storage.allocate()
+local collectedobjrefs = utilities.storage.allocate()
+
+local fileobjreferences = {
+ collected = collectedobjrefs,
+ tobesaved = tobesavedobjrefs,
+}
-local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { }
+job.fileobjreferences = fileobjreferences
+
+local function initializer()
+ collectedobjrefs = job.fileobjreferences.collected or { }
+ tobesavedobjrefs = job.fileobjreferences.tobesaved or { }
+end
-local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+job.register('job.fileobjreferences.collected', tobesavedobjrefs, initializer)
local function flushembeddedfiles()
if next(filestreams) then
@@ -211,6 +226,7 @@ function codeinjections.embedfile(specification)
local hash = specification.hash or filename
local keepdir = specification.keepdir -- can change
local usedname = specification.usedname
+ local filetype = specification.filetype
if filename == "" then
filename = nil
end
@@ -248,11 +264,20 @@ function codeinjections.embedfile(specification)
end
end
end
- usedname = usedname ~= "" and usedname or filename
+ -- needs to cleaned up:
+ usedname = usedname ~= "" and usedname or filename or name
local basename = keepdir == true and usedname or file.basename(usedname)
-local basename = gsub(basename,"%./","")
- local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file
- local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") }
+ local basename = gsub(basename,"%./","")
+ local savename = name ~= "" and name or basename
+ if not filetype or filetype == "" then
+ filetype = name and (filename and file.suffix(filename)) or "txt"
+ end
+ savename = file.addsuffix(savename,filetype) -- type is mandate for proper working in viewer
+ local mimetype = specification.mimetype
+ local a = pdfdictionary {
+ Type = pdfconstant("EmbeddedFile"),
+ Subtype = mimetype and mimetype ~= "" and pdfconstant(mimetype) or nil,
+ }
local f
if data then
f = pdfflushstreamobject(data,a)
@@ -267,6 +292,7 @@ local basename = gsub(basename,"%./","")
UF = pdfstring(savename),
EF = pdfdictionary { F = pdfreference(f) },
Desc = title ~= "" and pdfunicode(title) or nil,
+ -- AFRelationship = pdfconstant("Source"), -- some day maybe, not mandate
}
local r = pdfreference(pdfflushobject(d))
filestreams[hash] = r
@@ -320,6 +346,10 @@ function nodeinjections.attachfile(specification)
aref = codeinjections.embedfile(specification)
attachments[registered] = aref
end
+ local reference = specification.reference
+ if reference and aref then
+ tobesavedobjrefs[reference] = aref[1]
+ end
if not aref then
report_attachment("skipping attachment, registered %a",registered)
-- already reported
@@ -342,7 +372,7 @@ function nodeinjections.attachfile(specification)
OC = analyzelayer(specification.layer),
}
local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
box.width, box.height, box.depth = width, height, depth
return box
end
@@ -427,19 +457,19 @@ function nodeinjections.comment(specification) -- brrr: seems to be done twice
local box
if usepopupcomments then
-- rather useless as we can hide/vide
- local nd = pdfreserveannotation()
- local nc = pdfreserveannotation()
+ local nd = pdfreserveobject()
+ local nc = pdfreserveobject()
local c = pdfdictionary {
Subtype = pdfconstant("Popup"),
Parent = pdfreference(nd),
}
d.Popup = pdfreference(nc)
box = hpack_node(
- pdfannotation_node(0,0,0,d(),nd),
- pdfannotation_node(width,height,depth,c(),nc)
+ nodeinjections.annotation(0,0,0,d(),nd),
+ nodeinjections.annotation(width,height,depth,c(),nc)
)
else
- box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
end
box.width, box.height, box.depth = width, height, depth -- redundant
return box
@@ -484,7 +514,7 @@ end
local ms, mu, mf = { }, { }, { }
local function delayed(label)
- local a = pdfreserveannotation()
+ local a = pdfreserveobject()
mu[label] = a
return pdfreference(a)
end
@@ -504,23 +534,25 @@ local function insertrenderingwindow(specification)
local actions = nil
if openpage or closepage then
actions = pdfdictionary {
- PO = (openpage and lpdf.action(openpage )) or nil,
- PC = (closepage and lpdf.action(closepage)) or nil,
+ PO = (openpage and lpdfaction(openpage )) or nil,
+ PC = (closepage and lpdfaction(closepage)) or nil,
}
end
local page = tonumber(specification.page) or texgetcount("realpageno") -- todo
- local r = mu[label] or pdfreserveannotation() -- why the reserve here?
+ local r = mu[label] or pdfreserveobject() -- why the reserve here?
local a = pdfdictionary {
S = pdfconstant("Rendition"),
R = mf[label],
OP = 0,
AN = pdfreference(r),
}
+ local bs, bc = pdfborder()
local d = pdfdictionary {
Subtype = pdfconstant("Screen"),
P = pdfreference(pdfpagereference(page)),
A = a, -- needed in order to make the annotation clickable (i.e. don't bark)
- Border = pdf_border,
+ Border = bs,
+ C = bc,
AA = actions,
}
local width = specification.width or 0
@@ -528,7 +560,7 @@ local function insertrenderingwindow(specification)
if height == 0 or width == 0 then
-- todo: sound needs no window
end
- write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref
+ write_node(nodeinjections.annotation(width,height,0,d(),r)) -- save ref
return pdfreference(r)
end
@@ -539,7 +571,7 @@ local function insertrendering(specification)
local option = settings_to_hash(specification.option)
if not mf[label] then
local filename = specification.filename
- local isurl = find(filename,"://")
+ local isurl = find(filename,"://",1,true)
-- local start = pdfdictionary {
-- Type = pdfconstant("MediaOffset"),
-- S = pdfconstant("T"), -- time
diff --git a/tex/context/base/lpdf-xmp.lua b/tex/context/base/lpdf-xmp.lua
index 061ed0757..b1a795c4b 100644
--- a/tex/context/base/lpdf-xmp.lua
+++ b/tex/context/base/lpdf-xmp.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['lpdf-xmp'] = {
comment = "with help from Peter Rolf",
}
+local tostring = tostring
local format, random, char, gsub, concat = string.format, math.random, string.char, string.gsub, table.concat
local xmlfillin = xml.fillin
@@ -25,7 +26,7 @@ local pdfconstant = lpdf.constant
local pdfreference = lpdf.reference
local pdfflushstreamobject = lpdf.flushstreamobject
--- I wonder why this begin end is empty / w (no time now to look into it)
+-- I wonder why this begin end is empty / w (no time now to look into it) / begin can also be "?"
local xpacket = [[
@@ -49,7 +50,7 @@ local mapping = {
-- Dublin Core schema
["Author"] = "rdf:Description/dc:creator/rdf:Seq/rdf:li",
["Format"] = "rdf:Description/dc:format", -- optional, but nice to have
- ["Subject"] = "rdf:Description/dc:description",
+ ["Subject"] = "rdf:Description/dc:description/rdf:Alt/rdf:li",
["Title"] = "rdf:Description/dc:title/rdf:Alt/rdf:li",
-- XMP Basic schema
["CreateDate"] = "rdf:Description/xmp:CreateDate",
@@ -90,7 +91,12 @@ local function setxmpfile(name)
end
codeinjections.setxmpfile = setxmpfile
-commands.setxmpfile = setxmpfile
+
+interfaces.implement {
+ name = "setxmpfile",
+ arguments = "string",
+ actions = setxmpfile
+}
local function valid_xmp()
if not xmp then
@@ -104,7 +110,7 @@ local function valid_xmp()
if xmpfile ~= "" then
report_xmp("using file %a",xmpfile)
end
- local xmpdata = (xmpfile ~= "" and io.loaddata(xmpfile)) or ""
+ local xmpdata = xmpfile ~= "" and io.loaddata(xmpfile) or ""
xmp = xml.convert(xmpdata)
end
return xmp
@@ -119,16 +125,16 @@ end
-- redefined
-local addtoinfo = lpdf.addtoinfo
-local addxmpinfo = lpdf.addxmpinfo
+local pdfaddtoinfo = lpdf.addtoinfo
+local pdfaddxmpinfo = lpdf.addxmpinfo
function lpdf.addtoinfo(tag,pdfvalue,strvalue)
- addtoinfo(tag,pdfvalue)
+ pdfaddtoinfo(tag,pdfvalue)
local value = strvalue or gsub(tostring(pdfvalue),"^%((.*)%)$","%1") -- hack
if trace_info then
report_info("set %a to %a",tag,value)
end
- addxmpinfo(tag,value)
+ pdfaddxmpinfo(tag,value)
end
-- for the do-it-yourselvers
@@ -146,7 +152,8 @@ end
local t = { } for i=1,24 do t[i] = random() end
local function flushxmpinfo()
- commands.freezerandomseed(os.clock()) -- hack
+ commands.pushrandomseed()
+ commands.setrandomseed(os.time())
local t = { } for i=1,24 do t[i] = char(96 + random(26)) end
local packetid = concat(t)
@@ -156,23 +163,22 @@ local function flushxmpinfo()
local producer = format("LuaTeX-%0.2f.%s",tex.luatexversion/100,tex.luatexrevision)
local creator = "LuaTeX + ConTeXt MkIV"
local time = lpdf.timestamp()
- local fullbanner = tex.pdftexbanner
- -- local fullbanner = gsub(tex.pdftexbanner,"kpse.*","")
-
- addxmpinfo("DocumentID", documentid)
- addxmpinfo("InstanceID", instanceid)
- addxmpinfo("Producer", producer)
- addxmpinfo("CreatorTool", creator)
- addxmpinfo("CreateDate", time)
- addxmpinfo("ModifyDate", time)
- addxmpinfo("MetadataDate", time)
- addxmpinfo("PTEX.Fullbanner", fullbanner)
-
- addtoinfo("Producer", producer)
- addtoinfo("Creator", creator)
- addtoinfo("CreationDate", time)
- addtoinfo("ModDate", time)
--- addtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
+ local fullbanner = status.banner
+
+ pdfaddxmpinfo("DocumentID", documentid)
+ pdfaddxmpinfo("InstanceID", instanceid)
+ pdfaddxmpinfo("Producer", producer)
+ pdfaddxmpinfo("CreatorTool", creator)
+ pdfaddxmpinfo("CreateDate", time)
+ pdfaddxmpinfo("ModifyDate", time)
+ pdfaddxmpinfo("MetadataDate", time)
+ pdfaddxmpinfo("PTEX.Fullbanner", fullbanner)
+
+ pdfaddtoinfo("Producer", producer)
+ pdfaddtoinfo("Creator", creator)
+ pdfaddtoinfo("CreationDate", time)
+ pdfaddtoinfo("ModDate", time)
+ -- pdfaddtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
local blob = xml.tostring(xml.first(xmp or valid_xmp(),"/x:xmpmeta"))
local md = pdfdictionary {
@@ -196,7 +202,7 @@ local function flushxmpinfo()
local r = pdfflushstreamobject(blob,md,false) -- uncompressed
lpdf.addtocatalog("Metadata",pdfreference(r))
- commands.defrostrandomseed() -- hack
+ commands.poprandomseed() -- hack
end
-- his will be enabled when we can inhibit compression for a stream at the lua end
diff --git a/tex/context/base/luat-bas.mkiv b/tex/context/base/luat-bas.mkiv
index a38912716..cb00d8f55 100644
--- a/tex/context/base/luat-bas.mkiv
+++ b/tex/context/base/luat-bas.mkiv
@@ -13,7 +13,8 @@
\writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
-\registerctxluafile{l-lua} {1.001}
+\registerctxluafile{l-lua} {1.001} % before sandbox
+\registerctxluafile{l-sandbox} {1.001}
\registerctxluafile{l-package} {1.001}
\registerctxluafile{l-lpeg} {1.001}
\registerctxluafile{l-function}{1.001}
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index 4f044f9ac..8c224ad2c 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -118,7 +118,7 @@ end
function callbacks.freeze(name,freeze)
freeze = type(freeze) == "string" and freeze
- if find(name,"%*") then
+ if find(name,"*",1,true) then
local pattern = name
for name, _ in next, list do
if find(name,pattern) then
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 3672c603e..0f6b8598f 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -7,7 +7,9 @@ if not modules then modules = { } end modules ['luat-cnf'] = {
}
local type, next, tostring, tonumber = type, next, tostring, tonumber
-local format, concat, find = string.format, table.concat, string.find
+local format, concat, find, lower, gsub = string.format, table.concat, string.find, string.lower, string.gsub
+
+local report = logs.reporter("system")
local allocate = utilities.storage.allocate
@@ -17,36 +19,22 @@ texconfig.shell_escape = 't'
luatex = luatex or { }
local luatex = luatex
-texconfig.error_line = 79 -- 79 -- obsolete
-texconfig.half_error_line = 50 -- 50 -- obsolete
+texconfig.max_print_line = 100000 -- frozen
+texconfig.max_in_open = 127 -- frozen
+texconfig.error_line = 79 -- frozen
+texconfig.half_error_line = 50 -- frozen
texconfig.expand_depth = 10000 -- 10000
texconfig.hash_extra = 100000 -- 0
texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15
+texconfig.max_in_open = 500 -- 15 -- in fact it's limited to 127
texconfig.max_print_line = 10000 -- 79
texconfig.max_strings = 500000 -- 15000
texconfig.param_size = 25000 -- 60
texconfig.save_size = 50000 -- 4000
+texconfig.save_size = 100000 -- 4000
texconfig.stack_size = 10000 -- 300
--- local function initialize()
--- local t, variable = allocate(), resolvers.variable
--- for name, default in next, variablenames do
--- local name = variablenames[i]
--- local value = variable(name)
--- value = tonumber(value)
--- if not value or value == "" or value == 0 then
--- value = default
--- end
--- texconfig[name], t[name] = value, value
--- end
--- initialize = nil
--- return t
--- end
---
--- luatex.variables = initialize()
-
local stub = [[
-- checking
@@ -76,7 +64,7 @@ function texconfig.init()
"string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
},
basictex = { -- noad
- "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token",
+ "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token", "newtoken"
},
extralua = {
"gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf",
@@ -87,6 +75,7 @@ function texconfig.init()
obsolete = {
"fontforge", -- can be filled by luat-log
"kpse",
+ "token",
},
functions = {
"assert", "pcall", "xpcall", "error", "collectgarbage",
@@ -134,13 +123,14 @@ function texconfig.init()
-- shortcut and helper
+ local bytecode = lua.bytecode
+
local function init(start)
- local b = lua.bytecode
local i = start
local t = os.clock()
- while b[i] do
- b[i]() ;
- b[i] = nil ;
+ while bytecode[i] do
+ bytecode[i]() ;
+ bytecode[i] = nil ;
i = i + 1
-- collectgarbage('step')
end
@@ -159,6 +149,8 @@ function texconfig.init()
end
end
+ texconfig.init = function() end
+
end
-- we provide a qualified path
@@ -172,26 +164,55 @@ end)
]]
local variablenames = {
- "error_line", "half_error_line",
- "expand_depth", "hash_extra", "nest_size",
- "max_in_open", "max_print_line", "max_strings",
- "param_size", "save_size", "stack_size",
+ error_line = false,
+ half_error_line = false,
+ max_print_line = false,
+ max_in_open = false,
+ expand_depth = true,
+ hash_extra = true,
+ nest_size = true,
+ max_strings = true,
+ param_size = true,
+ save_size = true,
+ stack_size = true,
}
local function makestub()
name = name or (environment.jobname .. ".lui")
+ report("creating stub file %a using directives:",name)
+ report()
firsttable = firsttable or lua.firstbytecode
local t = {
"-- this file is generated, don't change it\n",
"-- configuration (can be overloaded later)\n"
}
- for _,v in next, variablenames do
+ for v, permitted in table.sortedhash(variablenames) do
+ local d = "luatex." .. gsub(lower(v),"[^%a]","")
+ local dv = directives.value(d)
local tv = texconfig[v]
- if tv and tv ~= "" then
+ if dv then
+ if not tv then
+ report(" %s = %s (%s)",d,dv,"configured")
+ tv = dv
+ elseif not permitted then
+ report(" %s = %s (%s)",d,tv,"frozen")
+ elseif tonumber(dv) >= tonumber(tv) then
+ report(" %s = %s (%s)",d,dv,"overloaded")
+ tv = dv
+ else
+ report(" %s = %s (%s)",d,tv,"preset kept")
+ end
+ elseif tv then
+ report(" %s = %s (%s)",d,tv,permitted and "preset" or "frozen")
+ else
+ report(" %s = ",d)
+ end
+ if tv then
t[#t+1] = format("texconfig.%s=%s",v,tv)
end
end
io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable)))
+ logs.newline()
end
lua.registerfinalizer(makestub,"create stub file")
diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua
index 8b015477f..c436ee6d7 100644
--- a/tex/context/base/luat-cod.lua
+++ b/tex/context/base/luat-cod.lua
@@ -51,6 +51,9 @@ function lua.registercode(filename,version)
bytecode[n] = code
lua.lastbytecode = n
end
+ elseif environment.initex then
+ texio.write_nl("\nerror loading file: " .. filename .. " (aborting)")
+ os.exit()
end
end
end
@@ -85,7 +88,7 @@ local environment = environment
-- no string.unquoted yet
local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1")
-local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
+local sourcepath = find(sourcefile,"/",1,true) and gsub(sourcefile,"/[^/]+$","") or ""
local targetpath = "."
-- delayed (via metatable):
@@ -159,7 +162,7 @@ local function target_file(name)
return targetpath .. "/" .. name
end
-local function find_read_file (id,name)
+local function find_read_file(id,name)
return source_file(name)
end
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index 5558e0303..5f2a0d281 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -102,14 +102,20 @@ function environment.luafilechunk(filename,silent) -- used for loading lua bytec
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
- if trace_locating then
+-- if trace_locating then
+-- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+-- elseif not silent then
+-- texio.write("<",data and "+ " or "- ",fullname,">")
+-- end
+ if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
end
return data
else
- if trace_locating then
+-- if trace_locating then
+-- report_lua("unknown file %a",filename)
+-- end
+ if not silent then
report_lua("unknown file %a",filename)
end
return nil
diff --git a/tex/context/base/luat-exe.lua b/tex/context/base/luat-exe.lua
index a57a5a006..d8d954a30 100644
--- a/tex/context/base/luat-exe.lua
+++ b/tex/context/base/luat-exe.lua
@@ -6,121 +6,68 @@ if not modules then modules = { } end modules ['luat-exe'] = {
license = "see context related readme files"
}
--- this module needs checking (very old and never really used, not even enabled)
+if not sandbox then require("l-sandbox") require("util-sbx") end -- for testing
-local match, find, gmatch = string.match, string.find, string.gmatch
-local concat = table.concat
-local select = select
+local type = type
-local report_executers = logs.reporter("system","executers")
+local executers = resolvers.executers or { }
+resolvers.executers = executers
-resolvers.executers = resolvers.executers or { }
-local executers = resolvers.executers
+local disablerunners = sandbox.disablerunners
+local registerbinary = sandbox.registerbinary
+local registerroot = sandbox.registerroot
-local permitted = { }
+local lpegmatch = lpeg.match
-local osexecute = os.execute
-local osexec = os.exec
-local osspawn = os.spawn
-local iopopen = io.popen
+local sc_splitter = lpeg.tsplitat(";")
+local cm_splitter = lpeg.tsplitat(",")
-local execute = osexecute
-local exec = osexec
-local spawn = osspawn
-local popen = iopopen
-
-local function register(...)
- for k=1,select("#",...) do
- local v = select(k,...)
- permitted[#permitted+1] = v == "*" and ".*" or v
- end
-end
+local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
+local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
+local root_list directives.register("system.rootlist", function(v) root_list = v end)
-local function prepare(...)
- -- todo: make more clever first split
- local t = { ... }
- local n = #n
- local one = t[1]
- if n == 1 then
- if type(one) == 'table' then
- return one, concat(t," ",2,n)
- else
- local name, arguments = match(one,"^(.-)%s+(.+)$")
- if name and arguments then
- return name, arguments
- else
- return one, ""
+sandbox.initializer(function()
+ if execution_mode == "none" then
+ -- will be done later
+ elseif execution_mode == "list" then
+ if type(execution_list) == "string" then
+ execution_list = lpegmatch(cm_splitter,execution_list)
+ end
+ if type(execution_list) == "table" then
+ for i=1,#execution_list do
+ registerbinary(execution_list[i])
end
end
else
- return one, concat(t," ",2,n)
+ -- whatever else we have configured
end
-end
+end)
-local function executer(action)
- return function(...)
- local name, arguments = prepare(...)
- for k=1,#permitted do
- local v = permitted[k]
- if find(name,v) then
- return action(name .. " " .. arguments)
- else
- report_executers("not permitted: %s %s",name,arguments)
+sandbox.initializer(function()
+ if type(root_list) == "string" then
+ root_list = lpegmatch(sc_splitter,root_list)
+ end
+ if type(root_list) == "table" then
+ for i=1,#root_list do
+ local entry = root_list[i]
+ if entry ~= "" then
+ registerroot(entry)
end
end
- return action("")
end
-end
+end)
-local function finalize() -- todo: os.exec, todo: report ipv print
- execute = executer(osexecute)
- exec = executer(osexec)
- spawn = executer(osspawn)
- popen = executer(iopopen)
- finalize = function()
- report_executers("already finalized")
- end
- register = function()
- report_executers("already finalized, no registration permitted")
- end
- os.execute = execute
- os.exec = exec
- os.spawn = spawn
- io.popen = popen
-end
-
-executers.finalize = function(...) return finalize(...) end
-executers.register = function(...) return register(...) end
-executers.execute = function(...) return execute (...) end
-executers.exec = function(...) return exec (...) end
-executers.spawn = function(...) return spawn (...) end
-executers.popen = function(...) return popen (...) end
-
-local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
-local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
-
-function executers.check()
+sandbox.finalizer(function()
if execution_mode == "none" then
- finalize()
- elseif execution_mode == "list" and execution_list ~= "" then
- for s in gmatch("[^%s,]",execution_list) do
- register(s)
- end
- finalize()
- else
- -- all
+ disablerunners()
end
-end
-
---~ resolvers.executers.register('.*')
---~ resolvers.executers.register('*')
---~ resolvers.executers.register('dir','ls')
---~ resolvers.executers.register('dir')
+end)
---~ resolvers.executers.finalize()
---~ resolvers.executers.execute('dir',"*.tex")
---~ resolvers.executers.execute("dir *.tex")
---~ resolvers.executers.execute("ls *.tex")
---~ os.execute('ls')
+-- Let's prevent abuse of these libraries (built-in support still works).
---~ resolvers.executers.check()
+sandbox.finalizer(function()
+ mplib = nil
+ epdf = nil
+ zip = nil
+ fontloader = nil
+end)
diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua
index 20a4a8fcd..92c1dd6c4 100644
--- a/tex/context/base/luat-fmt.lua
+++ b/tex/context/base/luat-fmt.lua
@@ -95,7 +95,7 @@ function environment.make_format(name)
-- generate format
local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
report_format("running command: %s\n",command)
- os.spawn(command)
+ os.execute(command)
-- remove related mem files
local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
-- report_format("removing related mplib format with pattern %a", pattern)
@@ -133,7 +133,7 @@ function environment.run_format(name,data,more)
else
local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
report_format("running command: %s",command)
- os.spawn(command)
+ os.execute(command)
end
end
end
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index 587214b93..34e83e7bb 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -6,14 +6,6 @@ if not modules then modules = { } end modules ['luat-ini'] = {
license = "see context related readme files"
}
--- rather experimental down here ... adapted to lua 5.2 ... but still
--- experimental
-
-local debug = require("debug")
-
-local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
-local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable
-
--[[ldx--
We cannot load anything yet. However what we will do us reserve a few tables.
These can be used for runtime user data or third party modules and will not be
@@ -29,178 +21,6 @@ parametersets = parametersets or { } -- experimental for team
table.setmetatableindex(moduledata,table.autokey)
table.setmetatableindex(thirddata, table.autokey)
---[[ldx--
-
Please create a namespace within these tables before using them!
-
-
-userdata ['my.name'] = { }
-thirddata['tricks' ] = { }
-
---ldx]]--
-
---[[ldx--
-We could cook up a readonly model for global tables but it makes more sense
-to invite users to use one of the predefined namespaces. One can redefine the
-protector. After all, it's just a lightweight suggestive system, not a
-watertight one.
---ldx]]--
-
-local global = _G
-global.global = global
-
-local dummy = function() end
-
---[[ldx--
-Another approach is to freeze tables by using a metatable, this will be
-implemented stepwise.
---ldx]]--
-
--- moduledata : no need for protection (only for developers)
--- isolatedata : full protection
--- userdata : protected
--- thirddata : protected
-
---[[ldx--
-We could have a metatable that automaticaly creates a top level namespace.
---ldx]]--
-
-local luanames = lua.name -- luatex itself
-
-lua.numbers = lua.numbers or { } local numbers = lua.numbers
-lua.messages = lua.messages or { } local messages = lua.messages
-
-storage.register("lua/numbers", numbers, "lua.numbers" )
-storage.register("lua/messages", messages, "lua.messages")
-
-local setfenv = setfenv or debug.setfenv -- < 5.2
-
-if setfenv then
-
- local protected = {
- -- global table
- global = global,
- -- user tables
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- -- reserved
- protect = dummy,
- unprotect = dummy,
- -- luatex
- tex = tex,
- -- lua
- string = string,
- table = table,
- lpeg = lpeg,
- math = math,
- io = io,
- file = file,
- bit32 = bit32,
- --
- context = context,
- }
-
- local protect_full = function(name)
- local t = { }
- for k, v in next, protected do
- t[k] = v
- end
- return t
- end
-
- local protect_part = function(name) -- adds
- local t = rawget(global,name)
- if not t then
- t = { }
- for k, v in next, protected do
- t[k] = v
- end
- rawset(global,name,t)
- end
- return t
- end
-
- protect = function(name)
- if name == "isolateddata" then
- setfenv(2,protect_full(name))
- else
- setfenv(2,protect_part(name or "shareddata"))
- end
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- -- initialize once
- if name ~= "isolateddata" then
- protect_full(name or "shareddata")
- end
- end
-
-elseif libraries then -- assume >= 5.2
-
- local shared
-
- protect = function(name)
- if not shared then
- -- e.g. context is not yet known
- local public = {
- global = global,
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- protect = dummy,
- unprotect = dummy,
- context = context,
- }
- --
- for k, v in next, libraries.builtin do public[k] = v end
- for k, v in next, libraries.functions do public[k] = v end
- for k, v in next, libraries.obsolete do public[k] = nil end
- --
- shared = { __index = public }
- protect = function(name)
- local t = global[name] or { }
- setmetatable(t,shared) -- set each time
- return t
- end
- end
- return protect(name)
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
-else
-
- protect = dummy
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
+if not global then
+ global = _G
end
-
diff --git a/tex/context/base/luat-ini.mkiv b/tex/context/base/luat-ini.mkiv
index a3a590311..b455a4158 100644
--- a/tex/context/base/luat-ini.mkiv
+++ b/tex/context/base/luat-ini.mkiv
@@ -34,7 +34,7 @@
% the \type {\normalexpanded} around \type {\directlua}. Something to discuss
% in the team.
-\unexpanded\def\startlua % \stoplua
+\normalprotected\def\startlua % \stoplua
{\begingroup
\obeylualines
\luat_start_lua_indeed}
@@ -42,7 +42,7 @@
\def\luat_start_lua_indeed#1\stoplua
{\normalexpanded{\endgroup\noexpand\directlua{#1}}} % \zerocount is default
-\unexpanded\def\startluacode % \stopluacode
+\normalprotected\def\startluacode % \stopluacode
{\begingroup
\obeylualines
\obeyluatokens
@@ -77,7 +77,7 @@
\edef\lua_letter_seven {\string\7} \edef\lua_letter_eight {\string\8}
\edef\lua_letter_nine {\string\9} \edef\lua_letter_zero {\string\0}
-\appendtoks
+\everyluacode {% \appendtoks
\let\\\lua_letter_backslash
\let\|\lua_letter_bar \let\-\lua_letter_dash
\let\(\lua_letter_lparent \let\)\lua_letter_rparent
@@ -92,85 +92,15 @@
\let\5\lua_letter_five \let\6\lua_letter_six
\let\7\lua_letter_seven \let\8\lua_letter_eight
\let\9\lua_letter_nine \let\0\lua_letter_zero
-\to \everyluacode
+} % \to \everyluacode
-\unexpanded\def\obeyluatokens
+\normalprotected\def\obeyluatokens
{\setcatcodetable\luacatcodes
\the\everyluacode}
-
\edef\luamajorversion{\ctxwrite{_MINORVERSION}}
\edef\luaminorversion{\ctxwrite{_MAJORVERSION}}
-%D \macros
-%D {definenamedlua}
-%D
-%D We provide an interface for defining instances:
-
-\def\s!lua{lua} \def\v!code{code} \def\!!name{name} \def\s!data{data}
-
-%D Beware: because \type {\expanded} is een convert command, the error
-%D message will show \type{} as part of the message.
-
-\installcorenamespace{luacode}
-
-\unexpanded\def\luat_start_named_lua_code#1%
- {\begingroup
- \obeylualines
- \obeyluatokens
- \csname\??luacode#1\endcsname}
-
-\unexpanded\def\definenamedlua[#1]#2[#3]% no optional arg handling here yet / we could use numbers instead (more efficient)
- {\ifcsname\??luacode#1\endcsname \else
- \scratchcounter\ctxlua{lua.registername("#1","#3")}%
- \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\e!stop#1\v!code\endcsname}%
- %{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
- {\noexpand\normalexpanded{\endgroup\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}}%
- \expandafter\edef\csname\e!start#1\v!code\endcsname {\luat_start_named_lua_code{#1}}%
- \expandafter\edef\csname #1\v!code\endcsname##1{\noexpand\directlua\the\scratchcounter{local _ENV=protect("#1\s!data")##1}}%
- \fi}
-
-%D We predefine a few.
-
-% \definenamedlua[module][module instance] % not needed
-
-\definenamedlua[user] [private user instance]
-\definenamedlua[third] [third party module instance]
-\definenamedlua[isolated][isolated instance]
-
-%D In practice this works out as follows:
-%D
-%D \startbuffer
-%D \startluacode
-%D context("LUA")
-%D \stopluacode
-%D
-%D \startusercode
-%D global.context("USER 1")
-%D context.par()
-%D context("USER 2")
-%D context.par()
-%D if characters then
-%D context("ACCESS directly")
-%D elseif global.characters then
-%D context("ACCESS via global")
-%D else
-%D context("NO ACCESS at all")
-%D end
-%D context.par()
-%D if bogus then
-%D context("ACCESS directly")
-%D elseif global.bogus then
-%D context("ACCESS via global")
-%D else
-%D context("NO ACCESS at all")
-%D end
-%D context.par()
-%D \stopusercode
-%D \stopbuffer
-%D
-%D \typebuffer
-
%D We need a way to pass strings safely to \LUA\ without the
%D need for tricky escaping. Compare:
%D
@@ -188,23 +118,28 @@
%D for it:
%D
%D \starttyping
-%D \long\edef\luaescapestring#1{\!!bs#1\!!es}
+%D \edef\luaescapestring#1{\!!bs#1\!!es}
%D \stoptyping
-\def\setdocumentfilename #1#2{\ctxlua{document.setfilename(#1,"#2")}}
-\def\setdocumentargument #1#2{\ctxlua{document.setargument("#1","#2")}}
-\def\setdocumentargumentdefault#1#2{\ctxlua{document.setdefaultargument("#1","#2")}}
-\def\getdocumentfilename #1{\ctxlua{document.getfilename("#1")}}
-\def\getdocumentargument #1{\ctxlua{document.getargument("#1")}}
-\def\getdocumentargumentdefault#1#2{\ctxlua{document.getargument("#1","#2")}}
-\def\doifdocumentargumentelse #1{\doifsomethingelse{\getdocumentargument{#1}}}
-\def\doifdocumentargument #1{\doifsomething {\getdocumentargument{#1}}}
-\def\doifnotdocumentargument #1{\doifnothing {\getdocumentargument{#1}}}
-\def\doifdocumentfilenameelse #1{\doifsomethingelse{\getdocumentfilename{#1}}}
-\def\doifdocumentfilename #1{\doifsomething {\getdocumentfilename{#1}}}
-\def\doifnotdocumentfilename #1{\doifnothing {\getdocumentfilename{#1}}}
-
-\let\doifelsedocumentargument\doifdocumentargumentelse
+\def\setdocumentfilename #1#2{\clf_setdocumentfilename\numexpr#1\relax{#2}}
+\def\setdocumentargument #1#2{\clf_setdocumentargument{#1}{#2}}
+\def\setdocumentargumentdefault#1#2{\clf_setdocumentdefaultargument{#1}{#2}}
+\def\getdocumentfilename #1{\clf_getdocumentfilename\numexpr#1\relax}
+\def\getdocumentargument #1{\clf_getdocumentargument{#1}{}}
+\def\setdocumentargument #1#2{\clf_setdocumentargument{#1}{#2}}
+\def\getdocumentargumentdefault#1#2{\clf_getdocumentargument{#1}{#2}}
+
+% seldom used so no need for speedy variants:
+
+\def\doifelsedocumentargument #1{\doifelsesomething{\clf_getdocumentargument{#1}}}
+\def\doifdocumentargument #1{\doifsomething {\clf_getdocumentargument{#1}}}
+\def\doifnotdocumentargument #1{\doifnothing {\clf_getdocumentargument{#1}}}
+\def\doifelsedocumentfilename #1{\doifelsesomething{\clf_getdocumentfilename\numexpr#1\relax}}
+\def\doifdocumentfilename #1{\doifsomething {\clf_getdocumentfilename\numexpr#1\relax}}
+\def\doifnotdocumentfilename #1{\doifnothing {\clf_getdocumentfilename\numexpr#1\relax}}
+
+\let\doifdocumentargumentelse\doifelsedocumentargument
+\let\doifdocumentfilenameelse\doifelsedocumentfilename
%D A handy helper:
@@ -212,7 +147,7 @@
%D Experimental:
-\unexpanded\def\startluaparameterset[#1]%
+\normalprotected\def\startluaparameterset[#1]%
{\begingroup
\obeylualines
\obeyluatokens
@@ -251,7 +186,7 @@
%D \ctxluacode{context("%0.5f",1/3)}
%D \stoptyping
-\unexpanded\def\ctxluacode
+\normalprotected\def\ctxluacode
{\begingroup
\obeylualines
\obeyluatokens
@@ -266,4 +201,83 @@
\def\luat_lua_code
{\normalexpanded{\endgroup\noexpand\directlua\expandafter{\the\scratchtoks}}} % \zerocount is default
+% \startctxfunction MyFunctionA
+% context(" A1 ")
+% \stopctxfunction
+%
+% \startctxfunctiondefinition MyFunctionB
+% context(" B2 ")
+% \stopctxfunctiondefinition
+%
+% \starttext
+% \dorecurse{10000}{\ctxfunction{MyFunctionA}} \page
+% \dorecurse{10000}{\MyFunctionB} \page
+% \dorecurse{10000}{\ctxlua{context(" C3 ")}} \page
+% \stoptext
+
+\installsystemnamespace{ctxfunction}
+
+\normalprotected\def\startctxfunctiondefinition #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_definition_indeed{#1}}
+
+\installsystemnamespace{luafunction}
+
+\def\luat_start_lua_function_definition_indeed#1#2\stopctxfunctiondefinition
+ {\endgroup
+ \expandafter\chardef\csname\??luafunction#1\endcsname\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+\normalprotected\def\setctxluafunction#1#2% experiment
+ {\expandafter\chardef\csname\??luafunction#1\endcsname#2\relax
+ \expandafter\edef\csname#1\endcsname{\noexpand\luafunction\csname\??luafunction#1\endcsname}}
+
+\let\stopctxfunctiondefinition\relax
+
+\normalprotected\def\startctxfunction #1 %
+ {\begingroup \obeylualines \obeyluatokens \luat_start_lua_function_indeed{#1}}
+
+\def\luat_start_lua_function_indeed#1#2\stopctxfunction
+ {\endgroup\expandafter\edef\csname\??ctxfunction#1\endcsname{\noexpand\luafunction\ctxcommand{ctxfunction(\!!bs#2\!!es)}\relax}}
+
+\let\stopctxfunction\relax
+
+\def\ctxfunction#1%
+ {\csname\??ctxfunction#1\endcsname}
+
+% In theory this is faster due to the call not being wrapped in a function but in
+% practice the speedup can't be noticed. The actions called for often have lots of
+% lookups so an extra one doesn't matter much. The kind of calls differs a lot per
+% document and often there are other ways to optimize a style. For instance we can
+% gain a lot when defining a font, but when a frozen definition is used that gain
+% gets completely lost. For some calls (take list writers) it can get worse if only
+% because readability gets worse and passing is already efficient due to selective
+% flushing, while with the token scanners one has to scan all of them.
+
+% \startctxfunctiondefinition foo commands.foo() \stopctxfunctiondefinition
+%
+% \installctxfunction\foo{commands.foo}
+
+% This is a forward definition:
+
+\def\checkedstrippedcsname#1% this permits \strippedcsname{\xxx} and \strippedcsname{xxx}
+ {\expandafter\syst_helpers_checked_stripped_csname\string#1}
+
+\def\syst_helpers_checked_stripped_csname#1%
+ {\if\noexpand#1\letterbackslash\else#1\fi}
+
+\normalprotected\def\installctxfunction#1#2%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\ctxcommand{ctxfunction("#2",true)}\relax
+ \expandafter\xdef\csname\m_syst_name\endcsname{\noexpand\luafunction\csname\??luafunction\m_syst_name\endcsname}}
+
+\normalprotected\def\installctxscanner#1#2%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\ctxcommand{ctxscanner("\m_syst_name","#2",true)}\relax
+ \expandafter\xdef\csname\m_syst_name\endcsname{\noexpand\luafunction\csname\??luafunction\m_syst_name\endcsname}}
+
+\normalprotected\def\resetctxscanner#1%
+ {\edef\m_syst_name{\checkedstrippedcsname#1}%
+ \global\expandafter\chardef\csname\??luafunction\m_syst_name\endcsname\zerocount
+ \global\expandafter\let\csname\m_syst_name\endcsname\relax}
+
\protect \endinput
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua
index 52f14683e..e1772af4e 100644
--- a/tex/context/base/luat-iop.lua
+++ b/tex/context/base/luat-iop.lua
@@ -6,190 +6,19 @@ if not modules then modules = { } end modules ['luat-iop'] = {
license = "see context related readme files"
}
--- this paranoid stuff in web2c ... we cannot hook checks into the
--- input functions because one can always change the callback but
--- we can feed back specific patterns and paths into the next
--- mechanism
-
--- os.execute os.exec os.spawn io.fopen
--- os.remove lfs.chdir lfs.mkdir
--- io.open zip.open epdf.open mlib.new
-
--- cache
-
-local topattern, find = string.topattern, string.find
-
-local report_limiter = logs.reporter("system","limiter")
-
--- the basic methods
-
-local function match(ruleset,name)
- local n = #ruleset
- if n > 0 then
- for i=1,n do
- local r = ruleset[i]
- if find(name,r[1]) then
- return r[2]
- end
- end
- return false
- else
- -- nothing defined (or any)
- return true
- end
-end
-
-local function protect(ruleset,proc)
- return function(name,...)
- if name == "" then
- -- report_limiter("no access permitted: ") -- can happen in mplib code
- return nil, "no name given"
- elseif match(ruleset,name) then
- return proc(name,...)
- else
- report_limiter("no access permitted for %a",name)
- return nil, name .. ": no access permitted"
- end
- end
-end
-
-function io.limiter(preset)
- preset = preset or { }
- local ruleset = { }
- for i=1,#preset do
- local p = preset[i]
- local what, spec = p[1] or "", p[2] or ""
- if spec == "" then
- -- skip 'm
- elseif what == "tree" then
- resolvers.dowithpath(spec, function(r)
- local spec = resolvers.resolve(r) or ""
- if spec ~= "" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- end
- end)
- elseif what == "permit" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- elseif what == "forbid" then
- ruleset[#ruleset+1] = { topattern(spec,true), false }
- end
- end
- if #ruleset > 0 then
- return {
- match = function(name) return match (ruleset,name) end,
- protect = function(proc) return protect(ruleset,proc) end,
- }
- else
- return {
- match = function(name) return true end,
- protect = proc,
- }
- end
-end
-
--- a few handlers
-
-io.i_limiters = { }
-io.o_limiters = { }
-
-function io.i_limiter(v)
- local i = io.i_limiters[v]
- if i then
- local i_limiter = io.limiter(i)
- function io.i_limiter()
- return i_limiter
- end
- return i_limiter
- end
-end
-
-function io.o_limiter(v)
- local o = io.o_limiters[v]
- if o then
- local o_limiter = io.limiter(o)
- function io.o_limiter()
- return o_limiter
- end
- return o_limiter
- end
-end
-
--- the real thing (somewhat fuzzy as we need to know what gets done)
-
-local i_opener, i_limited = io.open, false
-local o_opener, o_limited = io.open, false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- i_opener = protect(i_opener)
- i_limited = true
- report_limiter("input mode set to %a",v)
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- o_opener = protect(o_opener)
- o_limited = true
- report_limiter("output mode set to %a",v)
- end
- end
-end
-
-function io.open(name,method)
- if method and find(method,"[wa]") then
- return o_opener(name,method)
- else
- return i_opener(name,method)
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
-local i_limited = false
-local o_limited = false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- lfs.chdir = protect(lfs.chdir) -- needs checking
- i_limited = true
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- os.remove = protect(os.remove) -- rather okay
- lfs.chdir = protect(lfs.chdir) -- needs checking
- lfs.mkdir = protect(lfs.mkdir) -- needs checking
- o_limited = true
- end
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
--- the definitions
-
-local limiters = resolvers.variable("limiters")
-
-if limiters then
- io.i_limiters = limiters.input or { }
- io.o_limiters = limiters.output or { }
-end
-
+local cleanedpathlist = resolvers.cleanedpathlist
+local registerroot = sandbox.registerroot
+
+sandbox.initializer(function()
+ local function register(str,mode)
+ local trees = cleanedpathlist(str)
+ for i=1,#trees do
+ registerroot(trees[i],mode)
+ end
+ end
+ register("TEXMF","read")
+ register("TEXINPUTS","read")
+ register("MPINPUTS","read")
+ -- register("TEXMFCACHE","write")
+ registerroot(".","write")
+end)
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index 3f72e780e..24f9da415 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -36,6 +36,8 @@
\registerctxluafile{util-sta}{1.001}
+\registerctxluafile{util-sbx}{1.001} % needs tracker and templates
+
\registerctxluafile{data-ini}{1.001}
\registerctxluafile{data-exp}{1.001}
\registerctxluafile{data-env}{1.001}
@@ -71,8 +73,8 @@
\registerctxluafile{luat-ini}{1.001}
\registerctxluafile{util-env}{1.001}
\registerctxluafile{luat-env}{1.001}
-\registerctxluafile{luat-exe}{1.001}
-\registerctxluafile{luat-iop}{1.001}
+\registerctxluafile{luat-exe}{1.001} % simplified
+\registerctxluafile{luat-iop}{1.001} % simplified
\registerctxluafile{luat-bwc}{1.001}
\registerctxluafile{trac-lmx}{1.001} % might become l-lmx or luat-lmx
\registerctxluafile{luat-mac}{1.001}
diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua
index 282dc8ce3..6214e591e 100644
--- a/tex/context/base/luat-mac.lua
+++ b/tex/context/base/luat-mac.lua
@@ -92,7 +92,7 @@ local nolong = 1 - longleft - longright
local utf8character = P(1) * R("\128\191")^1 -- unchecked but fast
local name = (R("AZ","az") + utf8character)^1
-local csname = (R("AZ","az") + S("@?!_") + utf8character)^1
+local csname = (R("AZ","az") + S("@?!_:-*") + utf8character)^1
local longname = (longleft/"") * (nolong^1) * (longright/"")
local variable = P("#") * Cs(name + longname)
local escapedname = escape * csname
@@ -144,7 +144,10 @@ local grammar = { "converter",
* V("texbody")
* stopcode
* poplocal,
- texbody = ( V("definition")
+ texbody = (
+leadingcomment -- new per 2015-03-03 (ugly)
++
+ V("definition")
+ identifier
+ V("braced")
+ (1 - stopcode)
@@ -201,7 +204,7 @@ end
function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname
local data = resolvers.loadtexfile(oldname)
- data = interfaces.preprocessed(data) or ""
+ data = interfaces.preprocessed(data) or "" -- interfaces not yet defined
io.savedata(newname,data)
end
@@ -385,7 +388,7 @@ end
-- \normalexpanded
-- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}%
-- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}%
--- \doifnextoptionalelse\yes\nop}
+-- \doifelsenextoptional\yes\nop}
-- ]]))
--
-- print(macros.preprocessed([[
diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua
index 719a6f7c9..65cf0f338 100644
--- a/tex/context/base/luat-run.lua
+++ b/tex/context/base/luat-run.lua
@@ -6,8 +6,8 @@ if not modules then modules = { } end modules ['luat-run'] = {
license = "see context related readme files"
}
-local format = string.format
-local insert = table.insert
+local format, find = string.format, string.find
+local insert, remove = table.insert, table.remove
-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
@@ -130,31 +130,102 @@ luatex.registerstopactions(luatex.cleanuptempfiles)
-- for the moment here
-local synctex = false
-
local report_system = logs.reporter("system")
+local synctex = 0
directives.register("system.synctex", function(v)
- synctex = v
- if v then
- report_system("synctex functionality is enabled!")
+ synctex = tonumber(v) or (toboolean(v,true) and 1) or (v == "zipped" and 1) or (v == "unzipped" and -1) or 0
+ if synctex ~= 0 then
+ report_system("synctex functionality is enabled (%s)!",tostring(synctex))
else
report_system("synctex functionality is disabled!")
end
- synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false
- -- currently this is bugged:
- tex.synctex = synctex
- -- so for the moment we need:
- context.normalsynctex()
- if synctex then
- context.plusone()
- else
- context.zerocount()
- end
+ tex.normalsynctex = synctex
end)
statistics.register("synctex tracing",function()
- if synctex or tex.synctex ~= 0 then
+ if synctex ~= 0 then
return "synctex has been enabled (extra log file generated)"
end
end)
+
+-- filenames
+
+local types = {
+ "data",
+ "font map",
+ "image",
+ "font subset",
+ "full font",
+}
+
+local report_open = logs.reporter("open source")
+local report_close = logs.reporter("close source")
+local report_load = logs.reporter("load resource")
+
+local register = callbacks.register
+
+local level = 0
+local total = 0
+local stack = { }
+local all = false
+
+local function report_start(left,name)
+ if not left then
+ -- skip
+ elseif left ~= 1 then
+ if all then
+ report_load("%s > %s",types[left],name or "?")
+ end
+ elseif find(name,"virtual://") then
+ insert(stack,false)
+ else
+ insert(stack,name)
+ total = total + 1
+ level = level + 1
+ report_open("%i > %i > %s",level,total,name or "?")
+ end
+end
+
+local function report_stop(right)
+ if level == 1 or not right or right == 1 then
+ local name = remove(stack)
+ if name then
+ report_close("%i > %i > %s",level,total,name or "?")
+ level = level - 1
+ end
+ end
+end
+
+local function report_none()
+end
+
+register("start_file",report_start)
+register("stop_file", report_stop)
+
+directives.register("system.reportfiles", function(v)
+ if v == "noresources" then
+ all = false
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif toboolean(v) or v == "all" then
+ all = true
+ register("start_file",report_start)
+ register("stop_file", report_stop)
+ elseif v == "traditional" then
+ register("start_file",nil)
+ register("stop_file", nil)
+ else
+ register("start_file",report_none)
+ register("stop_file", report_none)
+ end
+end)
+
+-- start_run doesn't work
+
+-- luatex.registerstartactions(function()
+-- if environment.arguments.sandbox then
+-- sandbox.enable()
+-- end
+-- end)
+
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 7a11b7f5e..b04d655c2 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -13,6 +13,7 @@ local gmatch, format = string.gmatch, string.format
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
local bytecode = lua.bytecode
local strippedloadstring = utilities.lua.strippedloadstring
+local formatters = string.formatters
local trace_storage = false
local report_storage = logs.reporter("system","storage")
@@ -48,38 +49,71 @@ function storage.register(...)
return t
end
-local n = 0
-local function dump()
- local max = storage.max
- for i=1,#data do
- local d = data[i]
- local message, original, target = d[1], d[2] ,d[3]
- local c, code, name = 0, { }, nil
- -- we have a nice definer for this
- for str in gmatch(target,"([^%.]+)") do
- if name then
- name = name .. "." .. str
+local n = 0 -- is that one used ?
+
+if environment.initex then
+
+ -- local function dump()
+ -- local max = storage.max
+ -- for i=1,#data do
+ -- local d = data[i]
+ -- local message, original, target = d[1], d[2] ,d[3]
+ -- local c, code, name = 0, { }, nil
+ -- -- we have a nice definer for this
+ -- for str in gmatch(target,"([^%.]+)") do
+ -- if name then
+ -- name = name .. "." .. str
+ -- else
+ -- name = str
+ -- end
+ -- c = c + 1 ; code[c] = formatters["%s = %s or { }"](name,name)
+ -- end
+ -- max = max + 1
+ -- if trace_storage then
+ -- c = c + 1 ; code[c] = formatters["print('restoring %s from slot %s')"](message,max)
+ -- end
+ -- c = c + 1 ; code[c] = serialize(original,name)
+ -- if trace_storage then
+ -- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ -- end
+ -- -- we don't need tracing in such tables
+ -- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
+ -- collectgarbage("step")
+ -- end
+ -- storage.max = max
+ -- end
+
+ local function dump()
+ local max = storage.max
+ local strip = storage.strip
+ for i=1,#data do
+ max = max + 1
+ local tabledata = data[i]
+ local message = tabledata[1]
+ local original = tabledata[2]
+ local target = tabledata[3]
+ local definition = utilities.tables.definetable(target,false,true)
+ local comment = formatters["restoring %s from slot %s"](message,max)
+ if trace_storage then
+ comment = formatters["print('%s')"](comment)
else
- name = str
+ comment = formatters["-- %s"](comment)
end
- c = c + 1 ; code[c] = format("%s = %s or { }",name,name)
- end
- max = max + 1
- if trace_storage then
- c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
- end
- c = c + 1 ; code[c] = serialize(original,name)
- if trace_storage then
- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ local dumped = serialize(original,target)
+ if trace_storage then
+ report_storage('saving %a in slot %a, size %s',message,max,#dumped)
+ end
+ -- we don't need tracing in such tables
+ dumped = concat({ definition, comment, dumped },"\n")
+ bytecode[max] = strippedloadstring(dumped,strip,formatters["slot %s (%s)"](max,name))
+ collectgarbage("step")
end
- -- we don't need tracing in such tables
- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
- collectgarbage("step")
+ storage.max = max
end
- storage.max = max
-end
-lua.registerfinalizer(dump,"dump storage")
+ lua.registerfinalizer(dump,"dump storage")
+
+end
-- to be tested with otf caching:
@@ -115,31 +149,14 @@ statistics.register("stored bytecode data", function()
local tofmodules = storage.tofmodules or 0
local tofdumps = storage.toftables or 0
if environment.initex then
- local luautilities = utilities.lua
- local nofstrippedbytes = luautilities.nofstrippedbytes
- local nofstrippedchunks = luautilities.nofstrippedchunks
- if nofstrippedbytes > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks,
- nofstrippedbytes
- )
- elseif nofstrippedchunks > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks
- )
- else
- return format("%s modules, %s tables, %s chunks",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps
- )
- end
+ local luautilities = utilities.lua
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ luautilities.nofstrippedchunks or 0,
+ luautilities.nofstrippedbytes or 0
+ )
else
return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
nofmodules, tofmodules,
@@ -163,6 +180,7 @@ storage.register("storage/shared", storage.shared, "storage.shared")
local mark = storage.mark
if string.patterns then mark(string.patterns) end
+if string.formatters then mark(string.formatters) end
if lpeg.patterns then mark(lpeg.patterns) end
if os.env then mark(os.env) end
if number.dimenfactors then mark(number.dimenfactors) end
diff --git a/tex/context/base/luat-usr.lua b/tex/context/base/luat-usr.lua
new file mode 100644
index 000000000..071e3bf5b
--- /dev/null
+++ b/tex/context/base/luat-usr.lua
@@ -0,0 +1,192 @@
+if not modules then modules = { } end modules ['luat-usr'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local global = global
+
+local moduledata = moduledata
+local thirddata = thirddata
+local userdata = userdata
+local documentdata = documentdata
+
+local context = context
+local tostring = tostring
+local tonumber = tonumber
+local print = print
+
+local string = string
+local table = table
+local lpeg = lpeg
+local math = math
+local io = io
+local os = os
+local lpeg = lpeg
+
+local luanames = lua.name -- luatex itself
+
+local setmetatableindex = table.setmetatableindex
+local load = load
+local xpcall = xpcall
+local instance_banner = string.formatters["=[instance: %s]"] -- the = controls the lua error / see: lobject.c
+local tex_errormessage = context.errmessage
+
+local implement = interfaces.implement
+local reporter = logs.reporter
+
+local report_instance = reporter("lua instance")
+local report_script = reporter("lua script")
+local report_thread = reporter("lua thread")
+local newline = logs.newline
+
+lua.numbers = lua.numbers or { }
+lua.messages = lua.messages or { }
+
+local numbers = lua.numbers
+local messages = lua.messages
+
+storage.register("lua/numbers", numbers, "lua.numbers" )
+storage.register("lua/messages", messages, "lua.messages")
+
+-- First we implement a pure lua version of directlua and a persistent
+-- variant of it:
+
+local function runscript(code)
+ local done, message = loadstring(code)
+ if done then
+ done()
+ else
+ newline()
+ report_script("error : %s",message or "unknown")
+ report_script()
+ report_script("code : %s",code)
+ newline()
+ end
+end
+
+local threads = setmetatableindex(function(t,k)
+ local v = setmetatableindex({},global)
+ t[k] = v
+ return v
+end)
+
+local function runthread(name,code)
+ if not code or code == "" then
+ threads[name] = nil
+ else
+ local thread = threads[name]
+ local done, message = loadstring(code,nil,nil,thread)
+ if done then
+ done()
+ else
+ newline()
+ report_thread("thread: %s",name)
+ report_thread("error : %s",message or "unknown")
+ report_thread()
+ report_thread("code : %s",code)
+ newline()
+ end
+ end
+end
+
+interfaces.implement {
+ name = "luascript",
+ actions = runscript,
+ arguments = "string"
+}
+
+interfaces.implement {
+ name = "luathread",
+ actions = runthread,
+ arguments = { "string", "string" }
+}
+
+-- local scanners = interfaces.scanners
+--
+-- local function ctxscanner(name)
+-- local scanner = scanners[name]
+-- if scanner then
+-- scanner()
+-- else
+-- report("unknown scanner: %s",name)
+-- end
+-- end
+--
+-- interfaces.implement {
+-- name = "clfscanner",
+-- actions = ctxscanner,
+-- arguments = "string",
+-- }
+
+local function registername(name,message)
+ if not name or name == "" then
+ report_instance("no valid name given")
+ return
+ end
+ if not message or message == "" then
+ message = name
+ end
+ local lnn = numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = instance_banner(message)
+ local report = reporter("lua instance",message)
+ local proxy = {
+ -- we can access all via:
+ global = global, -- or maybe just a metatable
+ -- some protected data
+ moduledata = setmetatableindex(moduledata),
+ thirddata = setmetatableindex(thirddata),
+ -- less protected data
+ userdata = userdata,
+ documentdata = documentdata,
+ -- always there fast
+ context = context,
+ tostring = tostring,
+ tonumber = tonumber,
+ -- standard lua modules
+ string = string,
+ table = table,
+ lpeg = lpeg,
+ math = math,
+ io = io,
+ os = os,
+ lpeg = lpeg,
+ --
+ print = print,
+ report = report,
+ }
+ return function(code)
+ local code, message = load(code,nil,nil,proxy)
+ if not code then
+ report_instance("error: %s",message or code)
+ elseif not xpcall(code,report) then
+ tex_errormessage("hit return to continue or quit this run")
+ end
+ end
+end
+
+lua.registername = registername
+
+implement {
+ name = "registernamedlua",
+ arguments = { "string", "string", "string" },
+ actions = function(name,message,csname)
+ if csname and csname ~= "" then
+ implement {
+ name = csname,
+ arguments = "string",
+ actions = registername(name,message) or report,
+ scope = "private",
+ }
+ else
+ report_instance("unvalid csname for %a",message or name or "?")
+ end
+ end
+}
diff --git a/tex/context/base/luat-usr.mkiv b/tex/context/base/luat-usr.mkiv
new file mode 100644
index 000000000..92d40010c
--- /dev/null
+++ b/tex/context/base/luat-usr.mkiv
@@ -0,0 +1,126 @@
+%D \module
+%D [ file=luat-usr,
+%D version=2005.08.11,% moved from luat-ini
+%D title=\CONTEXT\ Lua Macros,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Lua Macros / Userspace}
+
+\unprotect
+
+\registerctxluafile{luat-usr}{1.001}
+
+%D A few goodies:
+%D
+%D \startbuffer
+%D \luascript { context("foo 1:") context(i) }
+%D \luathread {test} { i = 10 context("bar 1:") context(i) }
+%D \luathread {test} { context("bar 2:") context(i) }
+%D \luathread {test} {}
+%D \luathread {test} { context("bar 3:") context(i) }
+%D \luascript { context("foo 2:") context(i) }
+%D \stopbuffer
+%D
+%D \typebuffer \startlines \getbuffer \stoplines
+
+\let\luascript \clf_luascript
+\let\luathread \clf_luathread
+%let\clfscanner\clf_clfscanner
+%def\clfscanner#1{\csname clf_#1\endcsname}
+
+%D \macros
+%D {definenamedlua}
+%D
+%D We provide an interface for defining instances:
+
+\def\s!lua {lua}
+\def\s!code {code}
+\def\s!data {data}
+%def\s!start{start}
+%def\s!stop {stop}
+
+%D Beware: because \type {\expanded} is een convert command, the error
+%D message will show \type{} as part of the message.
+
+\installsystemnamespace{luacode}
+
+\unexpanded\def\definenamedlua
+ {\bgroup
+ \dodoubleempty\syst_definenamedlua}
+
+\unexpanded\def\syst_definenamedlua[#1][#2]% no optional arg handling here yet / we could use numbers instead (more efficient)
+ {\iffirstargument
+ \ifcsname\??luacode#1\endcsname \else
+ %
+ \edef\fullname{lua_code_#1}%
+ %
+ \clf_registernamedlua{#1}{#2}{\fullname}%
+ %
+ \unexpanded\expandafter\xdef\csname\s!start#1\s!code\endcsname
+ {\begingroup
+ \obeylualines
+ \obeyluatokens
+ \csname\??luacode#1\endcsname}%
+ %
+ \global\expandafter\let\csname\s!stop#1\s!code\endcsname\relax
+ %
+ \normalexpanded{\xdef\csname\??luacode#1\endcsname##1\csname\s!stop#1\s!code\endcsname}%
+ {\noexpand\expandafter\endgroup
+ \noexpand\expandafter
+ \expandafter\noexpand\csname clf_\fullname\endcsname
+ \noexpand\expandafter{\noexpand\normalexpanded{##1}}}%
+ %
+ \global\expandafter\let\csname#1\s!code\expandafter\endcsname\csname clf_\fullname\endcsname
+ \fi
+ \fi
+ \egroup}
+
+%D We predefine a few.
+
+% \definenamedlua[module][module instance] % not needed
+
+\definenamedlua[user] [private user]
+\definenamedlua[third] [third party module]
+\definenamedlua[isolated][isolated]
+
+%D In practice this works out as follows:
+%D
+%D \startbuffer
+%D \startluacode
+%D context("LUA")
+%D \stopluacode
+%D
+%D \startusercode
+%D global.context("USER 1")
+%D context.par()
+%D context("USER 2")
+%D context.par()
+%D if characters then
+%D context("ACCESS directly")
+%D elseif global.characters then
+%D context("ACCESS via global")
+%D else
+%D context("NO ACCESS at all")
+%D end
+%D context.par()
+%D if bogus then
+%D context("ACCESS directly")
+%D elseif global.bogus then
+%D context("ACCESS via global")
+%D else
+%D context("NO ACCESS at all")
+%D end
+%D context.par()
+%D \stopusercode
+%D \stopbuffer
+%D
+%D \typebuffer
+
+\protect \endinput
diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua
index 0fffe261a..8eedade39 100644
--- a/tex/context/base/lxml-aux.lua
+++ b/tex/context/base/lxml-aux.lua
@@ -10,20 +10,23 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
-- compatibility reasons
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+local trace_inclusions = false trackers.register("lxml.inclusions", function(v) trace_inclusions = v end)
local report_xml = logs.reporter("xml")
local xml = xml
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
+local xmlcopy, xmlname = xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
local xmlfilter = xml.filter
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
+local type, next, setmetatable, getmetatable = type, next, setmetatable, getmetatable
local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local striplinepatterns = utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -83,13 +86,15 @@ end
function xml.each(root,pattern,handle,reverse)
local collected = xmlapplylpath(root,pattern)
if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
+ if handle then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
end
end
return collected
@@ -162,6 +167,8 @@ local function redo_ni(d)
end
end
+xml.reindex = redo_ni
+
local function xmltoelement(whatever,root)
if not whatever then
return nil
@@ -221,8 +228,18 @@ function xml.delete(root,pattern)
report('deleting',pattern,c,e)
end
local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
+ local ni = e.ni
+ if ni <= #d then
+ if false then
+ p.dt[ni] = ""
+ else
+ -- what if multiple deleted in one set
+ remove(d,ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ -- disturbing
+ end
end
end
end
@@ -353,46 +370,71 @@ xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
xml.injectafter = inject_element
xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
+local function include(xmldata,pattern,attribute,recursive,loaddata,level)
+ -- attribute = attribute or 'href'
+ pattern = pattern or 'include'
+ loaddata = loaddata or io.loaddata
local collected = xmlapplylpath(xmldata,pattern)
if collected then
+ if not level then
+ level = 1
+ end
for c=1,#collected do
local ek = collected[c]
local name = nil
local ekdt = ek.dt
local ekat = ek.at
- local epdt = ek.__p__.dt
+ local ekrt = ek.__p__
+ local epdt = ekrt.dt
if not attribute or attribute == "" then
name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
name = ekat[a]
- if name then break end
+ if name then
+ break
+ end
+ end
+ end
+ local data = nil
+ if name and name ~= "" then
+ data = loaddata(name) or ""
+ if trace_inclusions then
+ report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
+ if not data or data == "" then
epdt[ek.ni] = "" -- xml.empty(d,k)
elseif ekat["parse"] == "text" then
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
else
if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
+ include(xi,pattern,attribute,recursive,loaddata,level+1)
+ end
+ local child = xml.body(xi) -- xml.assign(d,k,xi)
+ child.__p__ = ekrt
+ child.__f__ = name -- handy for tracing
+ epdt[ek.ni] = child
+ local inclusions = xmldata.settings.inclusions
+ if inclusions then
+ inclusions[#inclusions+1] = name
+ else
+ xmldata.settings.inclusions = { name }
+ end
+ if child.er then
+ local badinclusions = xmldata.settings.badinclusions
+ if badinclusions then
+ badinclusions[#badinclusions+1] = name
+ else
+ xmldata.settings.badinclusions = { name }
+ end
end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
end
end
end
@@ -401,68 +443,108 @@ end
xml.include = include
+function xml.inclusion(e,default)
+ while e do
+ local f = e.__f__
+ if f then
+ return f
+ else
+ e = e.__p__
+ end
+ end
+ return default
+end
+
+local function getinclusions(key,e,sorted)
+ while e do
+ local settings = e.settings
+ if settings then
+ local inclusions = settings[key]
+ if inclusions then
+ inclusions = table.unique(inclusions) -- a copy
+ if sorted then
+ table.sort(inclusions) -- so we sort the copy
+ end
+ return inclusions -- and return the copy
+ else
+ e = e.__p__
+ end
+ else
+ e = e.__p__
+ end
+ end
+end
+
+function xml.inclusions(e,sorted)
+ return getinclusions("inclusions",e,sorted)
+end
+
+function xml.badinclusions(e,sorted)
+ return getinclusions("badinclusions",e,sorted)
+end
+
+local b_collapser = lpeg.patterns.b_collapser
+local m_collapser = lpeg.patterns.m_collapser
+local e_collapser = lpeg.patterns.e_collapser
+
+local b_stripper = lpeg.patterns.b_stripper
+local m_stripper = lpeg.patterns.m_stripper
+local e_stripper = lpeg.patterns.e_stripper
+
+local lpegmatch = lpeg.match
+
local function stripelement(e,nolines,anywhere)
local edt = e.dt
if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
+ local n = #edt
+ if n == 0 then
+ return e -- convenient
+ elseif anywhere then
+ local t = { }
+ local m = 0
+ for e=1,n do
local str = edt[e]
if type(str) ~= "string" then
- n = n + 1
- t[n] = str
+ m = m + 1
+ t[m] = str
elseif str ~= "" then
- -- todo: lpeg for each case
if nolines then
- str = gsub(str,"%s+"," ")
+ str = lpegmatch((n == 1 and b_collapser) or (n == m and e_collapser) or m_collapser,str)
+ else
+ str = lpegmatch((n == 1 and b_stripper) or (n == m and e_stripper) or m_stripper,str)
end
- str = gsub(str,"^%s*(.-)%s*$","%1")
if str ~= "" then
- n = n + 1
- t[n] = str
+ m = m + 1
+ t[m] = str
end
end
end
e.dt = t
else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
+ local str = edt[1]
+ if type(str) == "string" then
+ if str ~= "" then
+ str = lpegmatch(nolines and b_collapser or b_stripper,str)
+ end
+ if str == "" then
remove(edt,1)
+ n = n - 1
else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
+ edt[1] = str
end
end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
+ if n > 0 then
+ str = edt[n]
+ if type(str) == "string" then
if str == "" then
remove(edt)
else
- edt[nedt] = str
+ str = lpegmatch(nolines and e_collapser or e_stripper,str)
+ if str == "" then
+ remove(edt)
+ else
+ edt[n] = str
+ end
end
end
end
@@ -702,8 +784,8 @@ function xml.finalizers.xml.cdata(collected)
return ""
end
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
+function xml.insertcomment(e,str,n)
+ insert(e.dt,n or 1,{
tg = "@cm@",
ns = "",
special = true,
@@ -712,7 +794,27 @@ function xml.insertcomment(e,str,n) -- also insertcdata
})
end
-function xml.setcdata(e,str) -- also setcomment
+function xml.insertcdata(e,str,n)
+ insert(e.dt,n or 1,{
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcomment(e,str,n)
+ e.dt = { {
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
+function xml.setcdata(e,str)
e.dt = { {
tg = "@cd@",
ns = "",
@@ -790,7 +892,7 @@ local function recurse(e,action)
for i=1,#edt do
local str = edt[i]
if type(str) ~= "string" then
- recurse(str,action,recursive)
+ recurse(str,action) -- ,recursive
elseif str ~= "" then
edt[i] = action(str)
end
@@ -809,3 +911,91 @@ function helpers.recursetext(collected,action,recursive)
end
end
end
+
+-- on request ... undocumented ...
+--
+-- _tag : element name
+-- _type : node type (_element can be an option)
+-- _namespace : only if given
+--
+-- [1..n] : text or table
+-- key : value or attribite 'key'
+--
+-- local str = [[
+--
+--
+--
+--
+--
+-- c > d
+--
+--
+-- ]]
+--
+-- inspect(xml.totable(xml.convert(str)))
+-- inspect(xml.totable(xml.convert(str),true))
+-- inspect(xml.totable(xml.convert(str),true,true))
+
+local specials = {
+ ["@rt@"] = "root",
+ ["@pi@"] = "instruction",
+ ["@cm@"] = "comment",
+ ["@dt@"] = "declaration",
+ ["@cd@"] = "cdata",
+}
+
+local function convert(x,strip,flat)
+ local ns = x.ns
+ local tg = x.tg
+ local at = x.at
+ local dt = x.dt
+ local node = flat and {
+ [0] = (not x.special and (ns ~= "" and ns .. ":" .. tg or tg)) or nil,
+ } or {
+ _namespace = ns ~= "" and ns or nil,
+ _tag = not x.special and tg or nil,
+ _type = specials[tg] or "_element",
+ }
+ if at then
+ for k, v in next, at do
+ node[k] = v
+ end
+ end
+ local n = 0
+ for i=1,#dt do
+ local di = dt[i]
+ if type(di) == "table" then
+ if flat and di.special then
+ -- ignore
+ else
+ di = convert(di,strip,flat)
+ if di then
+ n = n + 1
+ node[n] = di
+ end
+ end
+ elseif strip then
+ di = lpegmatch(strip,di)
+ if di ~= "" then
+ n = n + 1
+ node[n] = di
+ end
+ else
+ n = n + 1
+ node[n] = di
+ end
+ end
+ if next(node) then
+ return node
+ end
+end
+
+function xml.totable(x,strip,flat)
+ if type(x) == "table" then
+ if strip then
+ strip = striplinepatterns[strip]
+ end
+ return convert(x,strip,flat)
+ end
+end
+
diff --git a/tex/context/base/lxml-css.lua b/tex/context/base/lxml-css.lua
index 0deaea4d3..fa921b24f 100644
--- a/tex/context/base/lxml-css.lua
+++ b/tex/context/base/lxml-css.lua
@@ -146,7 +146,19 @@ local pattern = Cf( Ct("") * (
+ (C("sansserif") + C("sans")) / "sans-serif" -- match before serif
+ C("serif")
)
- ) + P(1)
+ )
+--+ P("\\") * (
+-- P("bf") * ( Cg ( Cc("weight") * Cc("bold") ) )
+-- + P("bi") * ( Cg ( Cc("weight") * Cc("bold") )
+-- * Cg ( Cc("style") * Cc("italic") ) )
+-- + P("bs") * ( Cg ( Cc("weight") * Cc("bold") )
+-- * Cg ( Cc("style") * Cc("oblique") ) )
+-- + P("it") * ( Cg ( Cc("style") * Cc("italic") ) )
+-- + P("sl") * ( Cg ( Cc("style") * Cc("oblique") ) )
+-- + P("sc") * ( Cg ( Cc("variant") * Cc("small-caps") ) )
+-- + P("tt") * ( Cg ( Cc("family") * Cc("monospace") ) )
+--)
+ + P(1)
)^0 , rawset)
function css.fontspecification(str)
diff --git a/tex/context/base/lxml-ini.lua b/tex/context/base/lxml-ini.lua
new file mode 100644
index 000000000..115403395
--- /dev/null
+++ b/tex/context/base/lxml-ini.lua
@@ -0,0 +1,142 @@
+if not modules then modules = { } end modules ['lxml-ini'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local xml = xml
+local lxml = lxml
+
+-- this defines an extra scanner lxmlid:
+
+local scanners = tokens.scanners
+local scanstring = scanners.string
+local getid = lxml.id
+
+scanners.lxmlid = function() return getid(scanstring()) end
+
+local implement = interfaces.implement
+
+-- lxml.id
+
+implement { name = "lxmlid", actions = lxml.getid, arguments = "string" }
+
+implement { name = "xmldoif", actions = lxml.doif, arguments = { "string", "string" } }
+implement { name = "xmldoifnot", actions = lxml.doifnot, arguments = { "string", "string" } }
+implement { name = "xmldoifelse", actions = lxml.doifelse, arguments = { "string", "string" } }
+implement { name = "xmldoiftext", actions = lxml.doiftext, arguments = { "string", "string" } }
+implement { name = "xmldoifnottext", actions = lxml.doifnottext, arguments = { "string", "string" } }
+implement { name = "xmldoifelsetext", actions = lxml.doifelsetext, arguments = { "string", "string" } }
+
+implement { name = "xmldoifempty", actions = lxml.doifempty, arguments = { "string", "string" } }
+implement { name = "xmldoifnotempty", actions = lxml.doifnotempty, arguments = { "string", "string" } }
+implement { name = "xmldoifelseempty", actions = lxml.doifelseempty, arguments = { "string", "string" } }
+implement { name = "xmldoifselfempty", actions = lxml.doifempty, arguments = "string" }
+implement { name = "xmldoifnotselfempty", actions = lxml.doifnotempty, arguments = "string" }
+implement { name = "xmldoifelseselfempty", actions = lxml.doifelseempty, arguments = "string" }
+
+--------- { name = "xmlcontent", actions = lxml.content, arguments = "string" }
+--------- { name = "xmlflushstripped", actions = lxml.strip, arguments = { "string", true } }
+implement { name = "xmlall", actions = lxml.all, arguments = { "string", "string" } }
+implement { name = "xmlatt", actions = lxml.att, arguments = { "string", "string" } }
+implement { name = "xmlattdef", actions = lxml.att, arguments = { "string", "string", "string" } }
+implement { name = "xmlattribute", actions = lxml.attribute, arguments = { "string", "string", "string" } }
+implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
+implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
+implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
+implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
+implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
+implement { name = "xmlconcatrange", actions = lxml.concatrange, arguments = { "string", "string", "string", "string", "string" } } -- \detokenize{#5}
+implement { name = "xmlcontext", actions = lxml.context, arguments = { "string", "string" } }
+implement { name = "xmlcount", actions = lxml.count, arguments = { "string", "string" } }
+implement { name = "xmldelete", actions = lxml.delete, arguments = { "string", "string" } }
+implement { name = "xmldirect", actions = lxml.direct, arguments = "string" }
+implement { name = "xmldirectives", actions = lxml.directives.setup, arguments = "string" }
+implement { name = "xmldirectivesafter", actions = lxml.directives.after, arguments = "string" }
+implement { name = "xmldirectivesbefore", actions = lxml.directives.before, arguments = "string" }
+implement { name = "xmldisplayverbatim", actions = lxml.displayverbatim, arguments = "string" }
+implement { name = "xmlelement", actions = lxml.element, arguments = { "string", "string" } } -- could be integer but now we can alias
+implement { name = "xmlfilter", actions = lxml.filter, arguments = { "string", "string" } }
+implement { name = "xmlfilterlist", actions = lxml.filterlist, arguments = { "string", "string" } }
+implement { name = "xmlfirst", actions = lxml.first, arguments = { "string", "string" } }
+implement { name = "xmlflush", actions = lxml.flush, arguments = "string" }
+implement { name = "xmlflushcontext", actions = lxml.context, arguments = "string" }
+implement { name = "xmlflushlinewise", actions = lxml.flushlinewise, arguments = "string" }
+implement { name = "xmlflushspacewise", actions = lxml.flushspacewise, arguments = "string" }
+implement { name = "xmlfunction", actions = lxml.applyfunction, arguments = { "string", "string" } }
+implement { name = "xmlinclude", actions = lxml.include, arguments = { "string", "string", "string", true } }
+implement { name = "xmlincludeoptions", actions = lxml.include, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlinclusion", actions = lxml.inclusion, arguments = "string" }
+implement { name = "xmlinclusions", actions = lxml.inclusions, arguments = "string" }
+implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
+implement { name = "xmlindex", actions = lxml.index, arguments = { "string", "string", "string" } } -- can be integer but now we can alias
+implement { name = "xmlinfo", actions = lxml.info, arguments = "string" }
+implement { name = "xmlinlineverbatim", actions = lxml.inlineverbatim, arguments = "string" }
+implement { name = "xmllast", actions = lxml.last, arguments = "string" }
+implement { name = "xmlload", actions = lxml.load, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloadbuffer", actions = lxml.loadbuffer, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloaddata", actions = lxml.loaddata, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlloaddirectives", actions = lxml.directives.load, arguments = "string" }
+implement { name = "xmlloadregistered", actions = lxml.loadregistered, arguments = { "string", "string", "string" } }
+implement { name = "xmlmain", actions = lxml.main, arguments = "string" }
+implement { name = "xmlmatch", actions = lxml.match, arguments = "string" }
+implement { name = "xmlname", actions = lxml.name, arguments = "string" }
+implement { name = "xmlnamespace", actions = lxml.namespace, arguments = "string" }
+implement { name = "xmlnonspace", actions = lxml.nonspace, arguments = { "string", "string" } }
+implement { name = "xmlpos", actions = lxml.pos, arguments = "string" }
+implement { name = "xmlraw", actions = lxml.raw, arguments = { "string", "string" } }
+implement { name = "xmlregisterns", actions = xml.registerns, arguments = { "string", "string" } }
+implement { name = "xmlremapname", actions = xml.remapname, arguments = { "lxmlid", "string","string","string" } }
+implement { name = "xmlremapnamespace", actions = xml.renamespace, arguments = { "lxmlid", "string", "string" } }
+implement { name = "xmlsave", actions = lxml.save, arguments = { "string", "string" } }
+--------- { name = "xmlsetfunction", actions = lxml.setaction, arguments = { "string", "string", "string" } }
+implement { name = "xmlsetsetup", actions = lxml.setsetup, arguments = { "string", "string", "string" } }
+implement { name = "xmlsnippet", actions = lxml.snippet, arguments = { "string", "string" } }
+implement { name = "xmlstrip", actions = lxml.strip, arguments = { "string", "string" } }
+implement { name = "xmlstripanywhere", actions = lxml.strip, arguments = { "string", "string", true, true } }
+implement { name = "xmlstripnolines", actions = lxml.strip, arguments = { "string", "string", true } }
+implement { name = "xmlstripped", actions = lxml.stripped, arguments = { "string", "string" } }
+implement { name = "xmlstrippednolines", actions = lxml.stripped, arguments = { "string", "string", true } }
+implement { name = "xmltag", actions = lxml.tag, arguments = "string" }
+implement { name = "xmltext", actions = lxml.text, arguments = { "string", "string" } }
+implement { name = "xmltobuffer", actions = lxml.tobuffer, arguments = { "string", "string", "string" } }
+implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true } }
+implement { name = "xmltofile", actions = lxml.tofile, arguments = { "string", "string", "string" } }
+implement { name = "xmltoparameters", actions = lxml.toparameters, arguments = "string" }
+implement { name = "xmlverbatim", actions = lxml.verbatim, arguments = "string" }
+
+implement { name = "xmlstartraw", actions = lxml.startraw }
+implement { name = "xmlstopraw", actions = lxml.stopraw }
+
+implement { name = "xmlprependsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } } -- 2:*
+implement { name = "xmlappendsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } } -- 2:*
+implement { name = "xmlbeforesetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } } -- 2:*
+implement { name = "xmlaftersetup", actions = lxml.installsetup, arguments = { 4, "string", "string", "string" } } -- 2:*
+implement { name = "xmlprependdocumentsetup", actions = lxml.installsetup, arguments = { 1, "string", "string" } }
+implement { name = "xmlappenddocumentsetup", actions = lxml.installsetup, arguments = { 2, "string", "string" } }
+implement { name = "xmlbeforedocumentsetup", actions = lxml.installsetup, arguments = { 3, "string", "string", "string" } }
+implement { name = "xmlafterdocumentsetup", actions = lxml.installsetup, arguments = { 4, "string", "string" } }
+implement { name = "xmlremovesetup", actions = lxml.removesetup, arguments = { "string", "string" } } -- 1:*
+implement { name = "xmlremovedocumentsetup", actions = lxml.removesetup, arguments = { "string", "string" } }
+implement { name = "xmlflushdocumentsetups", actions = lxml.flushsetups, arguments = { "string", "string", "string" } } -- 2:*
+implement { name = "xmlresetdocumentsetups", actions = lxml.resetsetups, arguments = "string" }
+
+implement { name = "xmlgetindex", actions = lxml.getindex, arguments = { "string", "string" } }
+implement { name = "xmlwithindex", actions = lxml.withindex, arguments = { "string", "string", "string" } }
+
+implement { name = "xmlsetentity", actions = xml.registerentity, arguments = { "string", "string" } }
+implement { name = "xmltexentity", actions = lxml.registerentity, arguments = { "string", "string" } }
+
+implement { name = "xmlsetcommandtotext", actions = lxml.setcommandtotext, arguments = "string" }
+implement { name = "xmlsetcommandtonone", actions = lxml.setcommandtonone, arguments = "string" }
+
+implement { name = "xmlstarttiming", actions = function() statistics.starttiming(lxml) end }
+implement { name = "xmlstoptiming", actions = function() statistics.stoptiming (lxml) end }
+
+-- kind of special (3rd argument is a function)
+
+commands.xmlsetfunction = lxml.setaction
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index cfa0114d0..fab644fdb 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -12,6 +12,7 @@
%C details.
%D Todo: auto apply setups (manage at lua end)
+%D Todo: manuak: \xmlinclusion \xmlinclusions
\writestatus{loading}{ConTeXt XML Support / Initialization}
@@ -20,100 +21,203 @@
%registerctxluafile{lxml-xml}{1.001} % xml finalizers
%registerctxluafile{lxml-aux}{1.001} % extras using parser
%registerctxluafile{lxml-mis}{1.001} % extras independent of parser
-\registerctxluafile{char-ent}{1.001}
\registerctxluafile{lxml-ent}{1.001} % entity hacks
\registerctxluafile{lxml-tex}{1.001} % tex finalizers
\registerctxluafile{lxml-dir}{1.001} % ctx hacks
+\registerctxluafile{lxml-ini}{1.001} % interface
\unprotect % todo \!!bs \!!es where handy (slower)
-\def\ctxlxml #1{\ctxlua{lxml.#1}}
-
-\def\xmlmain #1{\ctxlxml{main("#1")}}
-\def\xmlmatch #1{\ctxlxml{match("#1")}}
-\def\xmlall #1#2{\ctxlxml{all("#1","#2")}}
-\def\xmlatt #1#2{\ctxlxml{att("#1","#2")}}
-\def\xmlattdef #1#2#3{\ctxlxml{att("#1","#2","#3")}}
-\def\xmlchainatt #1#2{\ctxlxml{chainattribute("#1","/","#2")}}
-\def\xmlchainattdef #1#2#3{\ctxlxml{chainattribute("#1","/","#2","#3")}}
-\def\xmlattribute #1#2#3{\ctxlxml{attribute("#1","#2","#3")}}
-\def\xmlattributedef #1#2#3#4{\ctxlxml{attribute("#1","#2","#3","#4")}}
-\def\xmlcommand #1#2#3{\ctxlxml{command("#1","#2","#3")}}
-\def\xmlconcat #1#2#3{\ctxlxml{concat("#1","#2",[[\detokenize{#3}]])}}
-\def\xmlconcatrange#1#2#3#4#5{\ctxlxml{concatrange("#1","#2","#3","#4",[[\detokenize{#5}]])}}
-\def\xmlcount #1#2{\ctxlxml{count("#1","#2")}}
-\def\xmldelete #1#2{\ctxlxml{delete("#1","#2")}}
-\def\xmldirectives #1{\ctxlxml{directives.setup("#1")}}
-\def\xmldirectivesbefore #1{\ctxlxml{directives.before("#1")}}
-\def\xmldirectivesafter #1{\ctxlxml{directives.after("#1")}}
-\def\xmlfilter #1#2{\ctxlxml{filter("#1",\!!bs#2\!!es)}}
-\def\xmlfilterlist #1#2{\ctxlxml{filterlist("#1",\!!bs#2\!!es)}}
-\def\xmlfunction #1#2{\ctxlxml{applyfunction("#1",\!!bs#2\!!es)}}
-\def\xmlfirst #1#2{\ctxlxml{first("#1","#2")}}
-\def\xmlflush #1{\ctxlxml{flush("#1")}}
-\def\xmlflushlinewise #1{\ctxlxml{flushlinewise("#1")}}
-\def\xmlflushspacewise #1{\ctxlxml{flushspacewise("#1")}}
-%def\xmlcontent #1{\ctxlxml{content("#1")}}
-%def\xmlflushstripped #1{\ctxlxml{strip("#1",true)}}
-\def\xmldirect #1{\ctxlxml{direct("#1")}} % in loops, not dt but root
-\def\xmlidx #1#2#3{\ctxlxml{idx("#1","#2",\number#3)}}
-\def\xmlinclude #1#2#3{\ctxlxml{include("#1","#2","#3",true)}}
-\def\xmlindex #1#2#3{\ctxlxml{index("#1","#2",\number#3)}}
-\def\xmlinfo #1{\hbox{\ttxx[\ctxlxml{info("#1")}]}}
-\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
-\def\xmllast #1#2{\ctxlxml{last("#1","#2")}}
-\def\xmlname #1{\ctxlxml{name("#1")}}
-\def\xmlnamespace #1{\ctxlxml{namespace("#1")}}
-\def\xmlnonspace #1#2{\ctxlxml{nonspace("#1","#2")}}
-\def\xmlraw #1#2{\ctxlxml{raw("#1","#2")}}
-\def\xmlcontext #1#2{\ctxlxml{context("#1","#2")}}
-\def\xmlflushcontext #1{\ctxlxml{context("#1")}}
-\def\xmlsnippet #1#2{\ctxlxml{snippet("#1",#2)}}
-\def\xmlelement #1#2{\ctxlxml{element("#1",#2)}}
-\def\xmlregisterns #1#2{\ctxlua{xml.registerns("#1","#2")}} % document
-\def\xmlremapname #1#2#3#4{\ctxlua{xml.remapname(lxml.id("#1"),"#2","#3","#4")}} % element
-\def\xmlremapnamespace #1#2#3{\ctxlua{xml.renamespace(lxml.id("#1"),"#2","#3")}} % document
-\def\xmlchecknamespace #1#2#3{\ctxlua{xml.checknamespace(lxml.id("#1"),"#2","#3")}} % element
-\def\xmlsetfunction #1#2#3{\ctxlxml{setaction("#1",\!!bs#2\!!es,#3)}}
-\def\xmlsetsetup #1#2#3{\ctxlxml{setsetup("#1",\!!bs#2\!!es,"#3")}}
-\def\xmlstrip #1#2{\ctxlxml{strip("#1","#2")}}
-\def\xmlstripnolines #1#2{\ctxlxml{strip("#1","#2",true)}}
-\def\xmlstripanywhere #1#2{\ctxlxml{strip("#1","#2",true,true)}}
-\def\xmlstripped #1#2{\ctxlxml{stripped("#1","#2")}}
-\def\xmlstrippednolines #1#2{\ctxlxml{stripped("#1","#2",true)}}
-\def\xmltag #1{\ctxlxml{tag("#1")}}
-\def\xmltext #1#2{\ctxlxml{text("#1","#2")}}
-\def\xmlverbatim #1{\ctxlxml{verbatim("#1")}}
-\def\xmldisplayverbatim #1{\ctxlxml{displayverbatim("#1")}}
-\def\xmlinlineverbatim #1{\ctxlxml{inlineverbatim("#1")}}
-
-\def\xmlload #1#2{\ctxlxml{load("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloadbuffer #1#2{\ctxlxml{loadbuffer("#1","#2","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloaddata #1#2{\ctxlxml{loaddata("#1",\!!bs#2\!!es,"\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloadregistered #1#2{\ctxlxml{loadregistered("#1","\directxmlparameter\c!entities","\directxmlparameter\c!compress")}}
-\def\xmlloaddirectives #1{\ctxlxml{directives.load("any:///#1")}}
-\def\xmlpos #1{\ctxlxml{pos("#1")}}
-
-\def\xmltoparameters #1{\ctxlxml{toparameters("#1")}}
-
-\def\xmltofile #1#2#3{\ctxlxml{tofile("#1","#2","#3")}} % id pattern filename
+% todo: { } mandate
+% avoid #
+
+\def\ctxlxml #1{\ctxlua{lxml.#1}}
+
+%def\xmlall #1#2{\clf_xmlall {#1}{#2}}
+%def\xmlatt #1#2{\clf_xmlatt {#1}{#2}}
+%def\xmlattdef #1#2#3{\clf_xmlattdef {#1}{#2}{#3}}
+%def\xmlattribute #1#2#3{\clf_xmlattribute {#1}{#2}{#3}}
+%def\xmlattributedef #1#2#3#4{\clf_xmlattributedef {#1}{#2}{#3}{#4}}
+%def\xmlchainatt #1#2{\clf_xmlchainatt {#1}{#2}}
+%def\xmlchainattdef #1#2#3{\clf_xmlchainattdef {#1}{#2}{#3}}
+%def\xmlrefatt #1#2{\clf_xmlrefatt {#1}{#2}}
+%def\xmlchecknamespace #1#2#3{\clf_xmlchecknamespace {#1}{#2}{#3}} % element
+%def\xmlcommand #1#2#3{\clf_xmlcommand {#1}{#2}{#3}}
+\def\xmlconcat #1#2#3{\clf_xmlconcat {#1}{#2}{\detokenize{#3}}}
+\def\xmlconcatrange #1#2#3#4#5{\clf_xmlconcatrange {#1}{#2}{#3}{#4}{\detokenize{#5}}}
+%def\xmlcontext #1#2{\clf_xmlcontext {#1}{#2}}
+%def\xmlcount #1#2{\clf_xmlcount {#1}{#2}}
+%def\xmldelete #1#2{\clf_xmldelete {#1}{#2}}
+%def\xmldirect #1{\clf_xmldirect {#1}} % in loops, not dt but root
+%def\xmldirectives #1{\clf_xmldirectives {#1}}
+%def\xmldirectivesafter #1{\clf_xmldirectivesafter {#1}}
+%def\xmldirectivesbefore #1{\clf_xmldirectivesbefore {#1}}
+%def\xmldisplayverbatim #1{\clf_xmldisplayverbatim {#1}}
+%def\xmlelement #1#2{\clf_xmlelement {#1}{#2}}
+%def\xmlfilter #1#2{\clf_xmlfilter {#1}{#2}}
+%def\xmlfilterlist #1#2{\clf_xmlfilterlist {#1}{#2}}
+%def\xmlfirst #1#2{\clf_xmlfirst {#1}{#2}}
+%def\xmlflush #1{\clf_xmlflush {#1}}
+%def\xmlflushcontext #1{\clf_xmlflushcontext {#1}}
+%def\xmlflushlinewise #1{\clf_xmlflushlinewise {#1}}
+%def\xmlflushspacewise #1{\clf_xmlflushspacewise {#1}}
+%def\xmlfunction #1#2{\clf_xmlfunction {#1}{#2}}
+%def\xmlinclude #1#2#3{\clf_xmlinclude {#1}{#2}{#3}}
+%def\xmlincludeoptions#1#2#3#4{\clf_xmlincludeoptions {#1}{#2}{#3}{#4}}
+%def\xmlinclusion #1{\clf_xmlinclusion {#1}}
+%def\xmlinclusions #1{\clf_xmlinclusions {#1}}
+%def\xmlbadinclusions #1{\clf_xmlbadinclusions {#1}}
+%def\xmlindex #1#2#3{\clf_xmlindex {#1}{#2}{#3}}
+%let\xmlposition \xmlindex
+%def\xmlinlineverbatim #1{\clf_xmlinlineverbatim {#1}}
+%def\xmllast #1#2{\clf_xmllast {#1}{#2}}
+\def\xmlload #1#2{\clf_xmlload {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+\def\xmlloadbuffer #1#2{\clf_xmlloadbuffer {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+\def\xmlloaddata #1#2{\clf_xmlloaddata {#1}{#2}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+%def\xmlloaddirectives #1{\clf_xmlloaddirectives {#1}}
+\def\xmlloadregistered #1#2{\clf_xmlloadregistered {#1}{\directxmlparameter\c!entities}{\directxmlparameter\c!compress}}
+%def\xmlmain #1{\clf_xmlmain {#1}}
+%def\xmlmatch #1{\clf_xmlmatch {#1}}
+%def\xmlname #1{\clf_xmlname {#1}}
+%def\xmlnamespace #1{\clf_xmlnamespace {#1}}
+%def\xmlnonspace #1#2{\clf_xmlnonspace {#1}{#2}}
+%def\xmlpos #1{\clf_xmlpos {#1}}
+%def\xmlraw #1#2{\clf_xmlraw {#1}{#2}}
+%def\xmlregisterns #1#2{\clf_xmlregisterns {#1}{#2}} % document
+%def\xmlremapname #1#2#3#4{\clf_xmlremapname {#1}{#2}{#3}{#4}} % element
+%def\xmlremapnamespace #1#2#3{\clf_xmlremapnamespace {#1}{#2}{#3}} % document
+%def\xmlsave #1#2{\clf_xmlsave {#1}{#2}}
+%def\xmlsetfunction #1#2#3{\clf_xmlsetfunction {#1}{#2}{#3}}
+%def\xmlsetsetup #1#2#3{\clf_xmlsetsetup {#1}{#2}{#3}}
+%def\xmlsnippet #1#2{\clf_xmlsnippet {#1}{#2}}
+%def\xmlstrip #1#2{\clf_xmlstrip {#1}{#2}}
+%def\xmlstripanywhere #1#2{\clf_xmlstripanywhere {#1}{#2}}
+%def\xmlstripnolines #1#2{\clf_xmlstripnolines {#1}{#2}}
+%def\xmlstripped #1#2{\clf_xmlstripped {#1}{#2}}
+%def\xmlstrippednolines #1#2{\clf_xmlstrippednolines {#1}{#2}}
+%def\xmltag #1{\clf_xmltag {#1}}
+%def\xmltext #1#2{\clf_xmltext {#1}{#2}}
+%def\xmltobuffer #1#2#3{\clf_xmltobuffer {#1}{#2}{#3}} % id pattern name
+%def\xmltobufferverbose #1#2#3{\clf_xmltobufferverbose {#1}{#2}{#3}} % id pattern name
+%def\xmltofile #1#2#3{\clf_xmltofile {#1}{#2}{#3}} % id pattern filename
+%def\xmltoparameters #1{\clf_xmltoparameters {#1}}
+%def\xmlverbatim #1{\clf_xmlverbatim {#1}}
+
+% experiment:
+\let\xmlall \clf_xmlall
+\let\xmlatt \clf_xmlatt
+\let\xmlattdef \clf_xmlattdef
+\let\xmlattribute \clf_xmlattribute
+\let\xmlattributedef \clf_xmlattributedef
+\let\xmlchainatt \clf_xmlchainatt
+\let\xmlchainattdef \clf_xmlchainattdef
+\let\xmlrefatt \clf_xmlrefatt
+\let\xmlchecknamespace \clf_xmlchecknamespace
+\let\xmlcommand \clf_xmlcommand
+% \xmlconcat
+% \xmlconcatrange
+\let\xmlcontext \clf_xmlcontext
+\let\xmlcount \clf_xmlcount
+\let\xmldelete \clf_xmldelete
+\let\xmldirect \clf_xmldirect % in loops, not dt but root
+\let\xmldirectives \clf_xmldirectives
+\let\xmldirectivesafter \clf_xmldirectivesafter
+\let\xmldirectivesbefore \clf_xmldirectivesbefore
+\let\xmldisplayverbatim \clf_xmldisplayverbatim
+\let\xmlelement \clf_xmlelement
+\let\xmlfilter \clf_xmlfilter
+\let\xmlfilterlist \clf_xmlfilterlist
+\let\xmlfirst \clf_xmlfirst
+\let\xmlflush \clf_xmlflush
+\let\xmlflushcontext \clf_xmlflushcontext
+\let\xmlflushlinewise \clf_xmlflushlinewise
+\let\xmlflushspacewise \clf_xmlflushspacewise
+\let\xmlfunction \clf_xmlfunction
+\let\xmlinclude \clf_xmlinclude
+\let\xmlincludeoptions \clf_xmlincludeoptions
+\let\xmlinclusion \clf_xmlinclusion
+\let\xmlinclusions \clf_xmlinclusions
+\let\xmlbadinclusions \clf_xmlbadinclusions
+\let\xmlindex \clf_xmlindex
+\let\xmlposition \clf_xmlindex
+\let\xmlinlineverbatim \clf_xmlinlineverbatim
+\let\xmllast \clf_xmllast
+% \xmlload
+% \xmlloadbuffer
+% \xmlloaddata
+\let\xmlloaddirectives \clf_xmlloaddirectives
+% \xmlloadregistered
+\let\xmlmain \clf_xmlmain
+\let\xmlmatch \clf_xmlmatch
+\let\xmlname \clf_xmlname
+\let\xmlnamespace \clf_xmlnamespace
+\let\xmlnonspace \clf_xmlnonspace
+\let\xmlpos \clf_xmlpos
+\let\xmlraw \clf_xmlraw
+\let\xmlregisterns \clf_xmlregisterns % document
+\let\xmlremapname \clf_xmlremapname % element
+\let\xmlremapnamespace \clf_xmlremapnamespace % document
+\let\xmlsave \clf_xmlsave
+%let\xmlsetfunction \clf_xmlsetfunction
+\let\xmlsetsetup \clf_xmlsetsetup
+\let\xmlsnippet \clf_xmlsnippet
+\let\xmlstrip \clf_xmlstrip
+\let\xmlstripanywhere \clf_xmlstripanywhere
+\let\xmlstripnolines \clf_xmlstripnolines
+\let\xmlstripped \clf_xmlstripped
+\let\xmlstrippednolines \clf_xmlstrippednolines
+\let\xmltag \clf_xmltag
+\let\xmltext \clf_xmltext
+\let\xmltobuffer \clf_xmltobuffer % id pattern name
+\let\xmltobufferverbose \clf_xmltobufferverbose % id pattern name
+\let\xmltofile \clf_xmltofile % id pattern filename
+\let\xmltoparameters \clf_xmltoparameters
+\let\xmlverbatim \clf_xmlverbatim
+
+\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlinfo{#1}]}}
+\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
+
+% we need to pass the last argument as function, so
+
+\def\xmlsetfunction#1#2#3{\ctxcommand{xmlsetfunction("#1",\!!bs#2\!!es,#3)}}
+
+% goodie:
+
+\unexpanded\def\xmlprettyprint#1#2%
+ {\xmltobufferverbose{#1}{.}{xml-temp}%
+ \ifdefined\scitebuffer
+ \scitebuffer[#2][xml-temp]%
+ \else
+ \typebuffer[xml-temp][\c!option=#2]%
+ \fi}
% kind of special:
-\def\xmlstartraw{\ctxlxml{startraw()}}
-\def\xmlstopraw {\ctxlxml{stopraw()}}
+%def\xmlstartraw{\clf_xmlstartraw}
+%def\xmlstopraw {\clf_xmlstopraw}
+
+\let\xmlstartraw\clf_xmlstartraw
+\let\xmlstopraw \clf_xmlstopraw
-% todo: \xmldoifelseattribute
+% these are expandable! todo: \xmldoifelseattribute
-\def\xmldoif #1#2{\ctxlxml{doif (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifnot #1#2{\ctxlxml{doifnot (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifelse #1#2{\ctxlxml{doifelse (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoiftext #1#2{\ctxlxml{doiftext (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifnottext #1#2{\ctxlxml{doifnottext (\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
-\def\xmldoifelsetext #1#2{\ctxlxml{doifelsetext(\!!bs#1\!!es,\!!bs#2\!!es)}} % expandable
+\let\xmldoif \clf_xmldoif
+\let\xmldoifnot \clf_xmldoifnot
+\let\xmldoifelse \clf_xmldoifelse
+\let\xmldoiftext \clf_xmldoiftext
+\let\xmldoifnottext \clf_xmldoifnottext
+\let\xmldoifelsetext \clf_xmldoifelsetext
-%def\xmldoifelseempty #1#2{\ctxlxml{doifelseempty("#1","#2")}} % #2, "*" or "" == self not yet implemented
-%def\xmldoifelseselfempty #1{\ctxlxml{doifelseempty("#1")}}
+\let\xmldoifempty \clf_xmldoifempty
+\let\xmldoifnotempty \clf_xmldoifnotempty
+\let\xmldoifelseempty \clf_xmldoifelseempty
+\let\xmldoifselfempty \clf_xmldoifselfempty
+\let\xmldoifnotselfempty \clf_xmldoifnotselfempty
+\let\xmldoifelseselfempty \clf_xmldoifelseselfempty
+
+\let\xmldoiftextelse \xmldoifelsetext
+\let\xmldoifemptyelse \xmldoifelseempty
+\let\xmldoifselfemptyelse \xmldoifelseselfempty
% \startxmlsetups xml:include
% \xmlinclude{main}{include}{filename|href}
@@ -127,28 +231,28 @@
%\ef\xmlsetup#1#2{\setupwithargument{#2}{#1}}
\let\xmlsetup\setupwithargumentswapped
-\let\xmls\setupwithargumentswapped
-\let\xmlw\setupwithargument
+\let\xmls\setupwithargumentswapped % hardly any faster
+\let\xmlw\setupwithargument % hardly any faster
\newtoks \registeredxmlsetups
% todo: 1:xml:whatever always before 3:xml:something
-\unexpanded\def\xmlprependsetup #1{\ctxlxml{installsetup(1,"*","#1")}}
-\unexpanded\def\xmlappendsetup #1{\ctxlxml{installsetup(2,"*","#1")}}
-\unexpanded\def\xmlbeforesetup #1#2{\ctxlxml{installsetup(3,"*","#1","#2")}}
-\unexpanded\def\xmlaftersetup #1#2{\ctxlxml{installsetup(4,"*","#1","#2")}}
+\unexpanded\def\xmlprependsetup #1{\clf_xmlprependsetup {*}{#1}}
+\unexpanded\def\xmlappendsetup #1{\clf_xmlappendsetup {*}{#1}}
+\unexpanded\def\xmlbeforesetup #1#2{\clf_xmlbeforesetup {*}{#1}{#2}}
+\unexpanded\def\xmlaftersetup #1#2{\clf_xmlaftersetup {*}{#1}{#2}}
-\unexpanded\def\xmlprependdocumentsetup #1#2{\ctxlxml{installsetup(1,"#1","#2")}}
-\unexpanded\def\xmlappenddocumentsetup #1#2{\ctxlxml{installsetup(2,"#1","#2")}}
-\unexpanded\def\xmlbeforedocumentsetup#1#2#3{\ctxlxml{installsetup(3,"#1","#2","#3")}}
-\unexpanded\def\xmlafterdocumentsetup #1#2#3{\ctxlxml{installsetup(4,"#1","#2","#3")}}
+\unexpanded\def\xmlprependdocumentsetup #1#2{\clf_xmlprependdocumentsetup{#1}{#2}}
+\unexpanded\def\xmlappenddocumentsetup #1#2{\clf_xmlappenddocumentsetup {#1}{#2}}
+\unexpanded\def\xmlbeforedocumentsetup #1#2#3{\clf_xmlbeforedocumentsetup {#1}{#2}{#3}}
+\unexpanded\def\xmlafterdocumentsetup #1#2#3{\clf_xmlafterdocumentsetup {#1}{#2}{#3}}
-\unexpanded\def\xmlremovesetup #1{\ctxlxml{removesetup("*","#1")}}
-\unexpanded\def\xmlremovedocumentsetup #1#2{\ctxlxml{removesetup("#1","#2")}}
+\unexpanded\def\xmlremovesetup #1{\clf_xmlremovesetup {*}{#1}}
+\unexpanded\def\xmlremovedocumentsetup #1#2{\clf_xmlremovedocumentsetup {#1}{#2}}
-\unexpanded\def\xmlflushdocumentsetups #1#2{\ctxlxml{flushsetups("#1","*","#2")}} % #1 == id where to apply * and #2
-\unexpanded\def\xmlresetdocumentsetups #1{\ctxlxml{resetsetups("#1")}}
+\unexpanded\def\xmlflushdocumentsetups #1#2{\clf_xmlflushdocumentsetups {#1}{*}{#2}} % #1 == id where to apply * and #2
+\unexpanded\def\xmlresetdocumentsetups #1{\clf_xmlresetdocumentsetups {#1}}
\let\xmlregistersetup \xmlappendsetup
\let\xmlregisterdocumentsetup\xmlappenddocumentsetup
@@ -168,8 +272,8 @@
\xmldefaulttotext{#1}% after include
\xmlstoptiming}
-\unexpanded\def\xmlstarttiming{\ctxlua{statistics.starttiming(lxml)}}
-\unexpanded\def\xmlstoptiming {\ctxlua{statistics.stoptiming (lxml)}}
+\unexpanded\def\xmlstarttiming{\clf_xmlstarttiming}
+\unexpanded\def\xmlstoptiming {\clf_xmlstoptiming}
\def\lxml_process#1#2#3#4#5% flag \loader id name what initializersetup
{\begingroup
@@ -227,11 +331,6 @@
% \xmlsetfunction{main}{verbatim}{lxml.displayverbatim}
% \xmlsetfunction{main}{verb} {lxml.inlineverbatim}
-% \unexpanded\def\startxmldisplayverbatim[#1]{}
-% \unexpanded\def\stopxmldisplayverbatim {}
-% \unexpanded\def\startxmlinlineverbatim [#1]{}
-% \unexpanded\def\stopxmlinlineverbatim {}
-
% we use an xml: namespace so one has to define a suitable verbatim, say
%
% \definetyping[xml:verbatim][typing]
@@ -250,7 +349,7 @@
\unexpanded\def\startxmlinlineverbatim[#1]%
{\begingroup
\edef\currenttype{xml:#1}%
- \let\stopxmldisplayverbatim\endgroup
+ \let\stopxmlinlineverbatim\endgroup
\doinitializeverbatim}
% will move but is developed for xml
@@ -269,14 +368,13 @@
\def\inlinemessage #1{\dontleavehmode{\tttf#1}}
\def\displaymessage#1{\blank\inlinemessage{#1}\blank}
-% \def\xmltraceentities % settextcleanup is not defined
-% {\ctxlua{xml.settextcleanup(lxml.trace_text_entities)}%
-% \appendtoks\ctxlxml{showtextentities()}\to\everygoodbye}
-
% processing instructions
-\def\xmlcontextdirective#1% kind class key value
- {\executeifdefined{xml#1directive}\gobblethreearguments}
+\unexpanded\def\xmlinstalldirective#1#2%
+ {\clf_xmlinstalldirective{#1}{\checkedstrippedcsname#2}}
+
+% \def\xmlcontextdirective#1% kind class key value
+% {\executeifdefined{xml#1directive}\gobblethreearguments}
% setting up xml:
%
@@ -299,15 +397,15 @@
\letvalue{\??xmldefaults\v!text }\plusone
\letvalue{\??xmldefaults\v!hidden}\plustwo
-\unexpanded\def\xmldefaulttotext#1%
+\unexpanded\def\xmldefaulttotext
{\ifcase\xmlprocessingmode
- % unset
+ \expandafter\gobbleoneargument % unset
\or
- \ctxlxml{setcommandtotext("#1")}% 1
+ \expandafter\clf_xmlsetcommandtotext % 1
\or
- \ctxlxml{setcommandtonone("#1")}% 2
+ \expandafter\clf_xmlsetcommandtonone % 2
\else
- % unset
+ \expandafter\gobbleoneargument % unset
\fi}
\appendtoks
@@ -324,22 +422,23 @@
%def\xmlvalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\csname\??xmlmapvalue#1:#2\expandafter\expandafter\gobbleoneargument\expandafter\endcsname\else\expandafter\firstofoneargument\fi}
\def\xmldoifelsevalue #1#2{\ifcsname\??xmlmapvalue#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\let\xmldoifvalueelse\xmldoifelsevalue
+
\let\xmlmapval\xmlmapvalue
\let\xmlval \xmlvalue
%D Experimental:
-\def\xmlgetindex #1{\ctxlxml{getindex("\xmldocument","#1")}}
-\def\xmlrawindex #1{\ctxlxml{rawindex("#1")}}
-\def\xmlwithindex #1#2{\ctxlxml{withindex("\xmldocument","#1","#2")}}
+\def\xmlgetindex #1{\clf_xmlgetindex {\xmldocument}{#1}}
+\def\xmlwithindex #1#2{\clf_xmlwithindex{\xmldocument}{#1}{#2}}
\def\xmlreference #1#2{\string\xmlwithindex{#1}{#2}}
%D Entities (might change):
\setnewconstant\xmlautoentities\plusone % 0=off, 1=upper, 2=upper,lower
-\def\xmlsetentity#1#2{\ctxlua{xml.registerentity('#1',\!!bs\detokenize{#2}\!!es)}}
-\def\xmltexentity#1#2{\ctxlua{lxml.registerentity('#1',\!!bs\detokenize{#2}\!!es)}}
+\unexpanded\def\xmlsetentity#1#2{\clf_xmlsetentity{#1}{\detokenize{#2}}}
+\unexpanded\def\xmltexentity#1#2{\clf_xmltexentity{#1}{\detokenize{#2}}}
% \xmlsetentity{tex}{\TEX{}} % {} needed
@@ -391,6 +490,16 @@
\let\processxmlfile \processXMLfile
\let\xmldata \XMLdata
+\unexpanded\def\xmlsetinjectors[#1]%
+ {\clf_xmlsetinjectors{#1}}
+
+\unexpanded\def\xmlresetinjectors
+ {\clf_xmlresetinjectors{}}
+
+\def\xmlinjector#1{\executeifdefined{#1}\donothing}
+
+\let\xmlapplyselectors\clf_xmlapplyselectors
+
\protect \endinput
% \newcount\charactersactiveoffset \charactersactiveoffset="10000
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index 51ab321b9..decb6567b 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -505,6 +505,9 @@ local function apply_expression(list,expression,order)
return collected
end
+-- this one can be made faster but there are not that many conversions so it doesn't
+-- really pay of
+
local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
local spaces = S(" \n\r\t\f")^0
@@ -541,12 +544,11 @@ local lp_builtin = P (
local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
--- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
--- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
-
-lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
-lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+----- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+----- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+local lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+local lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
@@ -806,7 +808,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns == "" then ns = "*" end
if not tg or tg == "" then tg = "*" end
tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
+ t[#t+1] = (directive and tg) or format("not(%s)",tg)
end
if nodetest == false then
return format("not(%s)",concat(t,"|"))
@@ -1039,37 +1041,6 @@ local function normal_apply(list,parsed,nofparsed,order)
return collected
end
---~ local function applylpath(list,pattern)
---~ -- we avoid an extra call
---~ local parsed = cache[pattern]
---~ if parsed then
---~ lpathcalls = lpathcalls + 1
---~ lpathcached = lpathcached + 1
---~ elseif type(pattern) == "table" then
---~ lpathcalls = lpathcalls + 1
---~ parsed = pattern
---~ else
---~ parsed = lpath(pattern) or pattern
---~ end
---~ if not parsed then
---~ return
---~ end
---~ local nofparsed = #parsed
---~ if nofparsed == 0 then
---~ return -- something is wrong
---~ end
---~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
---~ if not one then
---~ return -- something is wrong
---~ elseif not trace_lpath then
---~ return normal_apply(list,parsed,nofparsed,one.mi)
---~ elseif trace_lprofile then
---~ return profiled_apply(list,parsed,nofparsed,one.mi)
---~ else
---~ return traced_apply(list,parsed,nofparsed,one.mi)
---~ end
---~ end
-
local function applylpath(list,pattern)
if not list then
return
@@ -1163,7 +1134,6 @@ expressions.print = function(...)
return true
end
-expressions.contains = find
expressions.find = find
expressions.upper = upper
expressions.lower = lower
@@ -1187,6 +1157,10 @@ function expressions.contains(str,pattern)
return false
end
+function xml.expressions.idstring(str)
+ return type(str) == "string" and gsub(str,"^#","") or ""
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -1384,8 +1358,13 @@ function xml.elements(root,pattern,reverse) -- r, d, k
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1395,7 +1374,7 @@ function xml.elements(root,pattern,reverse) -- r, d, k
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1411,8 +1390,13 @@ function xml.collected(root,pattern,reverse) -- e
local collected = applylpath(root,pattern)
if not collected then
return dummy
- elseif reverse then
- local c = #collected + 1
+ end
+ local n = #collected
+ if n == 0 then
+ return dummy
+ end
+ if reverse then
+ local c = n + 1
return function()
if c > 1 then
c = c - 1
@@ -1420,7 +1404,7 @@ function xml.collected(root,pattern,reverse) -- e
end
end
else
- local n, c = #collected, 0
+ local c = 0
return function()
if c < n then
c = c + 1
@@ -1441,7 +1425,7 @@ end
-- texy (see xfdf):
-local function split(e)
+local function split(e) -- todo: use helpers / lpeg
local dt = e.dt
if dt then
for i=1,#dt do
diff --git a/tex/context/base/lxml-sor.mkiv b/tex/context/base/lxml-sor.mkiv
index 0ee1f16f3..0d8eb6ba1 100644
--- a/tex/context/base/lxml-sor.mkiv
+++ b/tex/context/base/lxml-sor.mkiv
@@ -19,10 +19,13 @@
\unprotect
+% the flusher is unexpandable so that it can be used in tables (noalign
+% interferences)
+
\unexpanded\def\xmlresetsorter #1{\ctxlxml{sorters.reset("#1")}}
\unexpanded\def\xmladdsortentry#1#2#3{\ctxlxml{sorters.add("#1","#2",\!!bs#3\!!es)}}
\unexpanded\def\xmlshowsorter #1{\ctxlxml{sorters.show("#1")}}
-\unexpanded\def\xmlflushsorter #1#2{\ctxlxml{sorters.flush("#1","#2")}}
+ \def\xmlflushsorter #1#2{\ctxlxml{sorters.flush("#1","#2")}}
\unexpanded\def\xmlsortentries #1{\ctxlxml{sorters.sort("#1")}}
\protect \endinput
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 3e10eb96d..47e2cac61 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -42,10 +42,10 @@ local xml = xml
--~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
+local type, next, setmetatable, getmetatable, tonumber, rawset = type, next, setmetatable, getmetatable, tonumber, rawset
local lower, find, match, gsub = string.lower, string.find, string.match, string.gsub
local utfchar = utf.char
-local lpegmatch = lpeg.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
local formatters = string.formatters
@@ -243,8 +243,10 @@ local function add_end(spacing, namespace, tag)
top = stack[#stack]
if #stack < 1 then
errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
elseif toclose.tg ~= tag then -- no namespace check
errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ report_xml(errorstr)
end
dt = top.dt
dt[#dt+1] = toclose
@@ -254,11 +256,38 @@ local function add_end(spacing, namespace, tag)
end
end
+-- local function add_text(text)
+-- if cleanup and #text > 0 then
+-- dt[#dt+1] = cleanup(text)
+-- else
+-- dt[#dt+1] = text
+-- end
+-- end
+
local function add_text(text)
+ local n = #dt
if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
+ if n > 0 then
+ local s = dt[n]
+ if type(s) == "string" then
+ dt[n] = s .. cleanup(text)
+ else
+ dt[n+1] = cleanup(text)
+ end
+ else
+ dt[1] = cleanup(text)
+ end
else
- dt[#dt+1] = text
+ if n > 0 then
+ local s = dt[n]
+ if type(s) == "string" then
+ dt[n] = s .. text
+ else
+ dt[n+1] = text
+ end
+ else
+ dt[1] = text
+ end
end
end
@@ -297,8 +326,11 @@ local function attribute_specification_error(str)
return str
end
+local badentity = "&error;"
+local badentity = "&"
+
xml.placeholders = {
- unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end,
+ unknown_dec_entity = function(str) return str == "" and badentity or formatters["&%s;"](str) end,
unknown_hex_entity = function(str) return formatters["%s;"](str) end,
unknown_any_entity = function(str) return formatters["%s;"](str) end,
}
@@ -325,12 +357,13 @@ end
-- one level expansion (simple case), no checking done
-local rest = (1-P(";"))^0
-local many = P(1)^0
+local p_rest = (1-P(";"))^0
+local p_many = P(1)^0
+local p_char = lpegpatterns.utf8character
local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+ P("&") * (P("#x")*(p_rest/fromhex) + P("#")*(p_rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(p_many/fromhex) + P("#")*(p_many/fromdec))
-- parsing in the xml file
@@ -367,7 +400,41 @@ local privates_n = {
-- keeps track of defined ones
}
-local escaped = utf.remapper(privates_u)
+-- -- local escaped = utf.remapper(privates_u) -- can't be used as it freezes
+-- -- local unprivatized = utf.remapper(privates_p) -- can't be used as it freezes
+--
+-- local p_privates_u = false
+-- local p_privates_p = false
+--
+-- table.setmetatablenewindex(privates_u,function(t,k,v) rawset(t,k,v) p_privates_u = false end)
+-- table.setmetatablenewindex(privates_p,function(t,k,v) rawset(t,k,v) p_privates_p = false end)
+--
+-- local function escaped(str)
+-- if not str or str == "" then
+-- return ""
+-- else
+-- if not p_privates_u then
+-- p_privates_u = Cs((lpeg.utfchartabletopattern(privates_u)/privates_u + p_char)^0)
+-- end
+-- return lpegmatch(p_privates_u,str)
+-- end
+-- end
+--
+-- local function unprivatized(str)
+-- if not str or str == "" then
+-- return ""
+-- else
+-- if not p_privates_p then
+-- p_privates_p = Cs((lpeg.utfchartabletopattern(privates_p)/privates_p + p_char)^0)
+-- end
+-- return lpegmatch(p_privates_p,str)
+-- end
+-- end
+
+local escaped = utf.remapper(privates_u,"dynamic")
+local unprivatized = utf.remapper(privates_p,"dynamic")
+
+xml.unprivatized = unprivatized
local function unescaped(s)
local p = privates_n[s]
@@ -382,10 +449,7 @@ local function unescaped(s)
return p
end
-local unprivatized = utf.remapper(privates_p)
-
xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
xml.privatecodes = privates_n
local function handle_hex_entity(str)
@@ -484,7 +548,7 @@ local function handle_any_entity(str)
report_xml("keeping entity &%s;",str)
end
if str == "" then
- a = "&error;"
+ a = badentity
else
a = "&" .. str .. ";"
end
@@ -513,7 +577,7 @@ local function handle_any_entity(str)
if trace_entities then
report_xml("invalid entity &%s;",str)
end
- a = "&error;"
+ a = badentity
acache[str] = a
else
if trace_entities then
@@ -528,8 +592,19 @@ local function handle_any_entity(str)
end
end
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
+-- local function handle_end_entity(chr)
+-- report_xml("error in entity, %a found instead of %a",chr,";")
+-- end
+
+local function handle_end_entity(str)
+ report_xml("error in entity, %a found without ending %a",str,";")
+ return str
+end
+
+local function handle_crap_error(chr)
+ report_xml("error in parsing, unexpected %a found ",chr)
+ add_text(chr)
+ return chr
end
local space = S(' \r\n\t')
@@ -546,18 +621,20 @@ local valid = R('az', 'AZ', '09') + S('_-.')
local name_yes = C(valid^1) * colon * C(valid^1)
local name_nop = C(P(true)) * C(valid^1)
local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
+local utfbom = lpegpatterns.utfbom -- no capture
local spacing = C(space^0)
----- entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
+local anyentitycontent = (1-open-semicolon-space-close-ampersand)^0
local hexentitycontent = R("AF","af","09")^0
local decentitycontent = R("09")^0
local parsedentity = P("#")/"" * (
P("x")/"" * (hexentitycontent/handle_hex_entity) +
(decentitycontent/handle_dec_entity)
) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+----- entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+local entity = (ampersand/"") * parsedentity * (semicolon/"")
+ + ampersand * (anyentitycontent / handle_end_entity)
local text_unparsed = C((1-open)^1)
local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
@@ -590,6 +667,8 @@ local emptyelement = (spacing * open * name * attributes * optionals
local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
+-- todo: combine the opens in:
+
local begincomment = open * P("!--")
local endcomment = P("--") * close
local begininstruction = open * P("?")
@@ -635,6 +714,14 @@ local comment = (spacing * begincomment * somecomment * endcomm
local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
+-- local text_unparsed = C((1-open)^1)
+-- local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
+
+local crap_parsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata - ampersand
+local crap_unparsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata
+local parsedcrap = Cs((crap_parsed^1 + entity)^1) / handle_crap_error
+local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
+
-- nicer but slower:
--
-- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
@@ -651,13 +738,13 @@ local trailer = space^0 * (text_unparsed/set_message)^0
local grammar_parsed_text = P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
}
local grammar_unparsed_text = P { "preamble",
preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
+ children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction + unparsedcrap,
}
-- maybe we will add settings to result as well
@@ -697,7 +784,7 @@ local function _xmlconvert_(data, settings)
errorstr = "empty xml file"
elseif utfize or resolve then
if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
+ -- errorstr = "" can be set!
else
errorstr = "invalid xml file - parsed text"
end
@@ -713,6 +800,8 @@ local function _xmlconvert_(data, settings)
local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
+setmetatable(result, mt)
+setmetatable(result.dt[1], mt)
setmetatable(stack, mt)
local errorhandler = settings.error_handler
if errorhandler == false then
@@ -746,8 +835,11 @@ local function _xmlconvert_(data, settings)
end
if errorstr and errorstr ~= "" then
result.error = true
+ else
+ errorstr = nil
end
result.statistics = {
+ errormessage = errorstr,
entities = {
decimals = dcache,
hexadecimals = hcache,
@@ -765,7 +857,7 @@ end
-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
-- in a protector:
-function xmlconvert(data,settings)
+local function xmlconvert(data,settings)
local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
if ok then
return result
@@ -916,14 +1008,18 @@ and then handle the lot.
-- new experimental reorganized serialize
-local function verbose_element(e,handlers) -- options
+local f_attribute = formatters['%s=%q']
+
+local function verbose_element(e,handlers,escape) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
+ local n = 0
for k,v in next, eat do
- ats[#ats+1] = formatters['%s=%q'](k,escaped(v))
+ n = n + 1
+ ats[n] = f_attribute(k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -1016,25 +1112,27 @@ local function verbose_document(e,handlers)
end
local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
+ if e then
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
+ end
+ end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
end
end
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 2cbdfc886..550a06a18 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['lxml-tex'] = {
-- be an cldf-xml helper library.
local utfchar = utf.char
-local concat, insert, remove = table.concat, table.insert, table.remove
+local concat, insert, remove, sortedkeys = table.concat, table.insert, table.remove, table.sortedkeys
local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
local lpegmatch = lpeg.match
@@ -19,40 +19,66 @@ local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local tex, xml = tex, xml
local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
+local basename, dirname, joinfile = file.basename, file.dirname, file.join
lxml = lxml or { }
local lxml = lxml
-local catcodenumbers = catcodes.numbers
-local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
-local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
-
-local commands = commands
-local context = context
-local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
-
-local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
-local xmlwithelements = xml.withelements
-local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
-local xmlapplylpath = xml.applylpath
-local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes
-
-local variables = (interfaces and interfaces.variables) or { }
-
-local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
-local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
-local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
-local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_lxml = logs.reporter("xml","tex")
-local report_xml = logs.reporter("xml","tex")
-
-local forceraw, rawroot = false, nil
+local catcodenumbers = catcodes.numbers
+local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
+local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
+
+local commands = commands
+local context = context
+local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
+
+local implement = interfaces.implement
+
+local xmlelements = xml.elements
+local xmlcollected = xml.collected
+local xmlsetproperty = xml.setproperty
+local xmlwithelements = xml.withelements
+local xmlserialize = xml.serialize
+local xmlcollect = xml.collect
+local xmltext = xml.text
+local xmltostring = xml.tostring
+local xmlapplylpath = xml.applylpath
+local xmlunprivatized = xml.unprivatized
+local xmlprivatetoken = xml.privatetoken
+local xmlprivatecodes = xml.privatecodes
+local xmlstripelement = xml.stripelement
+local xmlinclusion = xml.inclusion
+local xmlinclusions = xml.inclusions
+local xmlbadinclusions = xml.badinclusions
+local xmlcontent = xml.content
+
+local variables = interfaces and interfaces.variables or { }
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+local settings_to_set = utilities.parsers.settings_to_set
+local options_to_hash = utilities.parsers.options_to_hash
+local options_to_array = utilities.parsers.options_to_array
+
+local insertbeforevalue = utilities.tables.insertbeforevalue
+local insertaftervalue = utilities.tables.insertaftervalue
+
+local resolveprefix = resolvers.resolve
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
+local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
+local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
+local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local trace_selectors = false trackers.register("lxml.selectors",function(v) trace_selectors = v end)
+
+local report_lxml = logs.reporter("lxml","tex")
+local report_xml = logs.reporter("xml","tex")
+
+local forceraw = false
+local forceraw = nil
-- tex entities
--
@@ -62,7 +88,7 @@ lxml.entities = lxml.entities or { }
storage.register("lxml/entities",lxml.entities,"lxml.entities")
---~ xml.placeholders.unknown_any_entity = nil -- has to be per xml
+-- xml.placeholders.unknown_any_entity = nil -- has to be per xml
local xmlentities = xml.entities
local texentities = lxml.entities
@@ -351,7 +377,7 @@ end
function lxml.checkindex(name)
local root = getid(name)
- return (root and root.index) or 0
+ return root and root.index or 0
end
function lxml.withindex(name,n,command) -- will change as name is always there now
@@ -414,7 +440,7 @@ function lxml.convert(id,data,entities,compress,currentresource)
end
function lxml.load(id,filename,compress,entities)
- filename = commands.preparedfile(filename) -- not commands!
+ filename = ctxrunner.preparedfile(filename)
if trace_loading then
report_lxml("loading file %a as %a",filename,id)
end
@@ -433,16 +459,43 @@ function lxml.register(id,xmltable,filename)
return xmltable
end
-function lxml.include(id,pattern,attribute,recurse)
+-- recurse prepare rootpath resolve basename
+
+local options_true = { "recurse", "prepare", "rootpath" }
+local options_nil = { "prepare", "rootpath" }
+
+function lxml.include(id,pattern,attribute,options)
starttiming(xml)
local root = getid(id)
- xml.include(root,pattern,attribute,recurse,function(filename)
+ if options == true then
+ -- downward compatible
+ options = options_true
+ elseif not options then
+ -- downward compatible
+ options = options_nil
+ else
+ options = settings_to_hash(options) or { }
+ end
+ xml.include(root,pattern,attribute,options.recurse,function(filename)
if filename then
- filename = commands.preparedfile(filename)
- if file.dirname(filename) == "" and root.filename then
- local dn = file.dirname(root.filename)
- if dn ~= "" then
- filename = file.join(dn,filename)
+ -- preprocessing
+ if options.prepare then
+ filename = commands.preparedfile(filename)
+ end
+ -- handy if we have a flattened structure
+ if options.basename then
+ filename = basename(filename)
+ end
+ if options.resolve then
+ filename = resolveprefix(filename) or filename
+ end
+ -- some protection
+ if options.rootpath then
+ if dirname(filename) == "" and root.filename then
+ local dn = dirname(root.filename)
+ if dn ~= "" then
+ filename = joinfile(dn,filename)
+ end
end
end
if trace_loading then
@@ -457,6 +510,31 @@ function lxml.include(id,pattern,attribute,recurse)
stoptiming(xml)
end
+function lxml.inclusion(id,default)
+ local inclusion = xmlinclusion(getid(id),default)
+ if inclusion then
+ context(inclusion)
+ end
+end
+
+function lxml.inclusions(id,sorted)
+ local inclusions = xmlinclusions(getid(id),sorted)
+ if inclusions then
+ context(concat(inclusions,","))
+ end
+end
+
+function lxml.badinclusions(id,sorted)
+ local badinclusions = xmlbadinclusions(getid(id),sorted)
+ if badinclusions then
+ context(concat(badinclusions,","))
+ end
+end
+
+function lxml.save(id,name)
+ xml.save(getid(id),name)
+end
+
function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed
if not name or name == "" then
name = tex.jobname
@@ -538,30 +616,49 @@ local function tex_element(e,handlers)
end
end
+--
+--
+
local pihandlers = { } xml.pihandlers = pihandlers
-local category = P("context-") * C((1-P("-"))^1) * P("-directive")
local space = S(" \n\r")
local spaces = space^0
local class = C((1-space)^0)
local key = class
+local rest = C(P(1)^0)
local value = C(P(1-(space * -1))^0)
-
-local parser = category * spaces * class * spaces * key * spaces * value
-
-pihandlers[#pihandlers+1] = function(str)
- if str then
- local a, b, c, d = lpegmatch(parser,str)
- if d then
- contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
+local category = P("context-") * (
+ C((1-P("-"))^1) * P("-directive")
+ + P("directive") * spaces * key
+ )
+
+local c_parser = category * spaces * value -- rest
+local k_parser = class * spaces * key * spaces * rest --value
+
+implement {
+ name = "xmlinstalldirective",
+ arguments = { "string", "string" },
+ actions = function(name,csname)
+ if csname then
+ local keyvalueparser = k_parser / context[csname]
+ local keyvaluechecker = function(category,rest,e)
+ lpegmatch(keyvalueparser,rest)
+ end
+ pihandlers[name] = keyvaluechecker
end
end
-end
+}
local function tex_pi(e,handlers)
local str = e.dt[1]
- for i=1,#pihandlers do
- pihandlers[i](str)
+ if str and str ~= "" then
+ local category, rest = lpegmatch(c_parser,str)
+ if category and rest and #rest > 0 then
+ local handler = pihandlers[category]
+ if handler then
+ handler(category,rest,e)
+ end
+ end
end
end
@@ -915,16 +1012,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif setup == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif setup == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if setup == "-" then
- e.command = false
- elseif setup == "+" then
- e.command = true
- else
- e.command = e.tg
- end
+ e.command = e.tg
end
end
elseif trace_setups then
@@ -967,16 +1066,18 @@ function lxml.setsetup(id,pattern,setup)
end
end
end
+ elseif b == "-" then
+ for c=1,nc do
+ collected[c].command = false
+ end
+ elseif b == "+" then
+ for c=1,nc do
+ collected[c].command = true
+ end
else
for c=1,nc do
local e = collected[c]
- if b == "-" then
- e.command = false
- elseif b == "+" then
- e.command = true
- else
- e.command = a .. e.tg
- end
+ e.command = a .. e.tg
end
end
elseif trace_setups then
@@ -1112,11 +1213,13 @@ local function command(collected,cmd,otherwise)
local e = collected[c]
local ix = e.ix
local name = e.name
- if not ix then
+ if name and not ix then
lxml.addindex(name,false,true)
ix = e.ix
end
- if wildcard then
+ if not ix or not name then
+ report_lxml("no valid node index for element %a using command %s",name or "?",cmd)
+ elseif wildcard then
contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}")
else
contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}")
@@ -1186,7 +1289,7 @@ local function stripped(collected) -- tricky as we strip in place
local nc = #collected
if nc > 0 then
for c=1,nc do
- cprint(xml.stripelement(collected[c]))
+ cprint(xmlstripelement(collected[c]))
end
end
end
@@ -1311,10 +1414,11 @@ function texfinalizers.name(collected,n)
c = collected[nc-n+1]
end
if c then
- if c.ns == "" then
+ local ns = c.ns
+ if not ns or ns == "" then
contextsprint(ctxcatcodes,c.tg)
else
- contextsprint(ctxcatcodes,c.ns,":",c.tg)
+ contextsprint(ctxcatcodes,ns,":",c.tg)
end
end
end
@@ -1327,11 +1431,11 @@ function texfinalizers.tags(collected,nonamespace)
if nc > 0 then
for c=1,nc do
local e = collected[c]
- local ns, tg = e.ns, e.tg
- if nonamespace or ns == "" then
- contextsprint(ctxcatcodes,tg)
+ local ns = e.ns
+ if nonamespace or (not ns or ns == "") then
+ contextsprint(ctxcatcodes,e.tg)
else
- contextsprint(ctxcatcodes,ns,":",tg)
+ contextsprint(ctxcatcodes,ns,":",e.tg)
end
end
end
@@ -1341,11 +1445,10 @@ end
--
local function verbatim(id,before,after)
- local root = getid(id)
- if root then
- if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
- lxml.toverbatim(xmltostring(root.dt))
---~ lxml.toverbatim(xml.totext(root.dt))
+ local e = getid(id)
+ if e then
+ if before then contextsprint(ctxcatcodes,before,"[",e.tg or "?","]") end
+ lxml.toverbatim(xmltostring(e.dt)) -- lxml.toverbatim(xml.totext(e.dt))
if after then contextsprint(ctxcatcodes,after) end
end
end
@@ -1429,7 +1532,7 @@ end
lxml.content = text
function lxml.position(id,pattern,n)
- position(xmlapplylpath(getid(id),pattern),n)
+ position(xmlapplylpath(getid(id),pattern),tonumber(n))
end
function lxml.chainattribute(id,pattern,a,default)
@@ -1445,72 +1548,136 @@ function lxml.concat(id,pattern,separator,lastseparator,textonly)
end
function lxml.element(id,n)
- position(xmlapplylpath(getid(id),"/*"),n)
+ position(xmlapplylpath(getid(id),"/*"),tonumber(n)) -- tonumber handy
end
lxml.index = lxml.position
function lxml.pos(id)
- local root = getid(id)
- contextsprint(ctxcatcodes,(root and root.ni) or 0)
-end
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.ni or 0)
+end
+
+-- function lxml.att(id,a,default)
+-- local root = getid(id)
+-- if root then
+-- local at = root.at
+-- local str = (at and at[a]) or default
+-- if str and str ~= "" then
+-- contextsprint(notcatcodes,str)
+-- end
+-- elseif default then
+-- contextsprint(notcatcodes,default)
+-- end
+-- end
+--
+-- no need for an assignment so:
function lxml.att(id,a,default)
- local root = getid(id)
- if root then
- local at = root.at
- local str = (at and at[a]) or default
- if str and str ~= "" then
- contextsprint(notcatcodes,str)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ -- normally always true
+ local str = at[a]
+ if not str then
+ if default and default ~= "" then
+ contextsprint(notcatcodes,default)
+ end
+ elseif str ~= "" then
+ contextsprint(notcatcodes,str)
+ else
+ -- explicit empty is valid
+ end
+ elseif default and default ~= "" then
+ contextsprint(notcatcodes,default)
end
- elseif default then
+ elseif default and default ~= "" then
contextsprint(notcatcodes,default)
end
end
+function lxml.refatt(id,a)
+ local e = getid(id)
+ if e then
+ local at = e.at
+ if at then
+ local str = at[a]
+ if str and str ~= "" then
+ str = gsub(str,"^#+","")
+ if str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ end
+ end
+ end
+end
+
function lxml.name(id) -- or remapped name? -> lxml.info, combine
- local r = getid(id)
- local ns = r.rn or r.ns or ""
- if ns ~= "" then
- contextsprint(ctxcatcodes,ns,":",r.tg)
- else
- contextsprint(ctxcatcodes,r.tg)
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns,":",e.tg)
+ else
+ contextsprint(ctxcatcodes,e.tg)
+ end
end
end
function lxml.match(id) -- or remapped name? -> lxml.info, combine
- contextsprint(ctxcatcodes,getid(id).mi or 0)
+ local e = getid(id)
+ contextsprint(ctxcatcodes,e and e.mi or 0)
end
function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
- contextsprint(ctxcatcodes,getid(id).tg or "")
+ local e = getid(id)
+ if e then
+ local tg = e.tg
+ if tg and tg ~= "" then
+ contextsprint(ctxcatcodes,tg)
+ end
+ end
end
function lxml.namespace(id) -- or remapped name?
- local root = getid(id)
- contextsprint(ctxcatcodes,root.rn or root.ns or "")
+ local e = getid(id)
+ if e then
+ local ns = e.rn or e.ns
+ if ns and ns ~= "" then
+ contextsprint(ctxcatcodes,ns)
+ end
+ end
end
function lxml.flush(id)
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlsprint(dt)
+ local e = getid(id)
+ if e then
+ local dt = e.dt
+ if dt then
+ xmlsprint(dt)
+ end
end
end
function lxml.snippet(id,i)
local e = getid(id)
if e then
- local edt = e.dt
- if edt then
- xmlsprint(edt[i])
+ local dt = e.dt
+ if dt then
+ local dti = dt[i]
+ if dti then
+ xmlsprint(dti)
+ end
end
end
end
function lxml.direct(id)
- xmlsprint(getid(id))
+ local e = getid(id)
+ if e then
+ xmlsprint(e)
+ end
end
function lxml.command(id,pattern,cmd)
@@ -1562,7 +1729,20 @@ function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) e
-- special case: "*" and "" -> self else lpath lookup
---~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern
+local function checkedempty(id,pattern)
+ local e = getid(id)
+ if not pattern or pattern == "" then
+ local dt = e.dt
+ local nt = #dt
+ return (nt == 0) or (nt == 1 and dt[1] == "")
+ else
+ return isempty(getid(id),pattern)
+ end
+end
+
+function lxml.doifempty (id,pattern) doif (checkedempty(id,pattern)) end
+function lxml.doifnotempty (id,pattern) doifnot (checkedempty(id,pattern)) end
+function lxml.doifelseempty(id,pattern) doifelse(checkedempty(id,pattern)) end
-- status info
@@ -1690,3 +1870,213 @@ end
texfinalizers.upperall = xmlfinalizers.upperall
texfinalizers.lowerall = xmlfinalizers.lowerall
+
+function lxml.tobuffer(id,pattern,name,unescaped)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ if unescaped then
+ collected = xmlcontent(collected[1]) -- expanded entities !
+ else
+ collected = tostring(collected[1])
+ end
+ buffers.assign(name,collected)
+ else
+ buffers.erase(name)
+ end
+end
+
+-- relatively new:
+
+local permitted = nil
+local ctx_xmlinjector = context.xmlinjector
+
+xml.pihandlers["injector"] = function(category,rest,e)
+ local options = options_to_array(rest)
+ local action = options[1]
+ if not action then
+ return
+ end
+ local n = #options
+ if n > 1 then
+ local category = options[2]
+ if category == "*" then
+ ctx_xmlinjector(action)
+ elseif permitted then
+ if n == 2 then
+ if permitted[category] then
+ ctx_xmlinjector(action)
+ end
+ else
+ for i=2,n do
+ local category = options[i]
+ if category == "*" or permitted[category] then
+ ctx_xmlinjector(action)
+ return
+ end
+ end
+ end
+ end
+ else
+ ctx_xmlinjector(action)
+ end
+end
+
+local pattern = P("context-") * C((1-lpeg.patterns.whitespace)^1) * C(P(1)^1)
+
+function lxml.applyselectors(id)
+ local root = getid(id)
+ local function filter(e)
+ local dt = e.dt
+ local ndt = #dt
+ local done = false
+ local i = 1
+ while i <= ndt do
+ local dti = dt[i]
+ if type(dti) == "table" then
+ if dti.tg == "@pi@" then
+ local text = dti.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "select" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
+ end
+ if not trace_selectors then
+ -- skip
+ elseif okay then
+ report_lxml("accepting selector: %s",okay)
+ else
+ categories.begin = false
+ report_lxml("rejecting selector: % t",sortedkeys(categories))
+ end
+ for j=i,ndt do
+ local dtj = dt[j]
+ if type(dtj) == "table" then
+ local tg = dtj.tg
+ if tg == "@pi@" then
+ local text = dtj.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "select" then
+ local categories = options_to_hash(rest)
+ if categories["end"] then
+ i = j
+ break
+ else
+ -- error
+ end
+ end
+ elseif not okay then
+ dtj.tg = "@cm@"
+ end
+ else
+-- dt[j] = "" -- okay ?
+ end
+ end
+ end
+ elseif what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
+ end
+ if not trace_selectors then
+ -- skip
+ elseif okay then
+ report_lxml("accepting include: %s",okay)
+ else
+ categories.begin = false
+ report_lxml("rejecting include: % t",sortedkeys(categories))
+ end
+ if okay then
+ for j=i,ndt do
+ local dtj = dt[j]
+ if type(dtj) == "table" then
+ local tg = dtj.tg
+ if tg == "@cm@" then
+ local content = dtj.dt[1]
+ local element = root and xml.toelement(content,root)
+ dt[j] = element
+ element.__p__ = dt -- needs checking
+ done = true
+ elseif tg == "@pi@" then
+ local text = dtj.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["end"] then
+ i = j
+ break
+ else
+ -- error
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ else
+ filter(dti)
+ end
+ end
+ if done then
+ -- probably not needed
+ xml.reindex(dt)
+ end
+ end
+ i = i + 1
+ end
+ end
+ xmlwithelements(root,filter)
+end
+
+function xml.setinjectors(set)
+ local s = settings_to_set(set)
+ if permitted then
+ for k, v in next, s do
+ permitted[k] = true
+ end
+ else
+ permitted = s
+ end
+end
+
+function xml.resetinjectors(set)
+ if permitted and set and set ~= "" then
+ local s = settings_to_set(set)
+ for k, v in next, s do
+ if v then
+ permitted[k] = nil
+ end
+ end
+ else
+ permitted = nil
+ end
+end
+
+implement {
+ name = "xmlsetinjectors",
+ actions = xml.setinjectors,
+ arguments = "string"
+}
+
+implement {
+ name = "xmlresetinjectors",
+ actions = xml.resetinjectors,
+ arguments = "string"
+}
+
+implement {
+ name = "xmlapplyselectors",
+ actions = lxml.applyselectors,
+ arguments = "string"
+}
diff --git a/tex/context/base/m-chart.lua b/tex/context/base/m-chart.lua
index 2b9869379..f1e7f4cb9 100644
--- a/tex/context/base/m-chart.lua
+++ b/tex/context/base/m-chart.lua
@@ -19,8 +19,6 @@ local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
local report_chart = logs.reporter("chart")
-local points = number.points -- we can use %p instead
-
local variables = interfaces.variables
local v_yes = variables.yes
@@ -229,6 +227,8 @@ function commands.flow_start_cell(settings)
settings = settings,
x = 1,
y = 1,
+ realx = 1,
+ realy = 1,
name = "",
}
end
@@ -325,9 +325,13 @@ local function inject(includedata,data,hash)
if si.include then
inject(si,data,hash)
else
+ local x = si.x + xoffset
+ local y = si.y + yoffset
local t = {
- x = si.x + xoffset,
- y = si.y + yoffset,
+ x = x,
+ y = y,
+ realx = x,
+ realy = y,
settings = settings,
}
setmetatableindex(t,si)
@@ -451,10 +455,12 @@ function commands.flow_set_location(x,y)
else
y = tonumber(y)
end
- temp.x = x or 1
- temp.y = y or 1
- last_x = x or last_x
- last_y = y or last_y
+ temp.x = x or 1
+ temp.y = y or 1
+ temp.realx = x or 1
+ temp.realy = y or 1
+ last_x = x or last_x
+ last_y = y or last_y
end
function commands.flow_set_connection(location,displacement,name)
@@ -499,17 +505,17 @@ local function process_cells(chart,xoffset,yoffset)
local linesettings = settings.line
context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color)
context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(linesettings.rulethickness))
+ context("flow_shape_line_width := %p ; ", linesettings.rulethickness)
elseif focus[cell.focus] or focus[cell.name] then
local focussettings = settings.focus
context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor)
context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness))
+ context("flow_shape_line_width := %p ; ", focussettings.rulethickness)
else
local shapesettings = settings.shape
context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor)
context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness))
+ context("flow_shape_line_width := %p ; " , shapesettings.rulethickness)
end
context("flow_peepshape := false ;") -- todo
context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number)
@@ -580,7 +586,7 @@ local function process_connections(chart,xoffset,yoffset)
context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false")
context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0)
context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color)
- context("flow_connection_line_width := %s ;",points(linesettings.rulethickness))
+ context("flow_connection_line_width := %p ;",linesettings.rulethickness)
context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other)
context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;")
end
@@ -686,6 +692,7 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
print("no such chart",chartname)
return
end
+-- chart = table.copy(chart)
chart = expanded(chart,settings)
local chartsettings = chart.settings.chart
local autofocus = chart.settings.chart.autofocus
@@ -746,8 +753,8 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
-- relocate cells
for i=1,#data do
local cell = data[i]
- cell.x = cell.x - minx + 1
- cell.y = cell.y - miny + 1
+ cell.x = cell.realx - minx + 1
+ cell.y = cell.realy - miny + 1
end
chart.from_x = 1
chart.from_y = 1
@@ -756,7 +763,9 @@ local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
chart.nx = nx
chart.ny = ny
--
- -- inspect(chart)
+ chart.shift_x = minx + 1
+ chart.shift_y = miny + 1
+ --
return chart
end
@@ -792,14 +801,14 @@ local function makechart(chart)
local labeloffset = chartsettings.labeloffset
local exitoffset = chartsettings.exitoffset
local commentoffset = chartsettings.commentoffset
- context("flow_grid_width := %s ;", points(gridwidth))
- context("flow_grid_height := %s ;", points(gridheight))
- context("flow_shape_width := %s ;", points(shapewidth))
- context("flow_shape_height := %s ;", points(shapeheight))
- context("flow_chart_offset := %s ;", points(chartoffset))
- context("flow_label_offset := %s ;", points(labeloffset))
- context("flow_exit_offset := %s ;", points(exitoffset))
- context("flow_comment_offset := %s ;", points(commentoffset))
+ context("flow_grid_width := %p ;", gridwidth)
+ context("flow_grid_height := %p ;", gridheight)
+ context("flow_shape_width := %p ;", shapewidth)
+ context("flow_shape_height := %p ;", shapeheight)
+ context("flow_chart_offset := %p ;", chartoffset)
+ context("flow_label_offset := %p ;", labeloffset)
+ context("flow_exit_offset := %p ;", exitoffset)
+ context("flow_comment_offset := %p ;", commentoffset)
--
local radius = settings.line.radius
local rulethickness = settings.line.rulethickness
@@ -814,10 +823,10 @@ local function makechart(chart)
radius = dy
end
end
- context("flow_connection_line_width := %s ;", points(rulethickness))
- context("flow_connection_smooth_size := %s ;", points(radius))
- context("flow_connection_arrow_size := %s ;", points(radius))
- context("flow_connection_dash_size := %s ;", points(radius))
+ context("flow_connection_line_width := %p ;", rulethickness)
+ context("flow_connection_smooth_size := %p ;", radius)
+ context("flow_connection_arrow_size := %p ;", radius)
+ context("flow_connection_dash_size := %p ;", radius)
--
local offset = chartsettings.offset -- todo: pass string
if offset == v_none or offset == v_overlay or offset == "" then
@@ -825,7 +834,7 @@ local function makechart(chart)
elseif offset == v_standard then
offset = radius -- or rulethickness?
end
- context("flow_chart_offset := %s ;",points(offset))
+ context("flow_chart_offset := %p ;",offset)
--
context("flow_reverse_y := true ;")
process_cells(chart,0,0)
@@ -854,7 +863,7 @@ local function splitchart(chart)
local delta_x = splitsettings.dx or 0
local delta_y = splitsettings.dy or 0
--
- report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)",
+ report_chart("spliting %a from (%s,%s) upto (%s,%s) with steps (%s,%s) and overlap (%s,%s)",
name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y)
--
local part_x = 0
@@ -866,6 +875,9 @@ local function splitchart(chart)
if done then
last_x = to_x
end
+-- if first_x >= to_x then
+-- break
+-- end
local part_y = 0
local first_y = from_y
while true do
@@ -875,14 +887,31 @@ local function splitchart(chart)
if done then
last_y = to_y
end
+-- if first_y >= to_y then
+-- break
+-- end
--
+local data = chart.data
+for i=1,#data do
+ local cell = data[i]
+-- inspect(cell)
+ local cx, cy = cell.x, cell.y
+ if cx >= first_x and cx <= last_x then
+ if cy >= first_y and cy <= last_y then
report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y)
- local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1
+ local x = first_x
+ local y = first_y
+ local nx = last_x - first_x + 1
+ local ny = last_y - first_y + 1
context.beforeFLOWsplit()
context.handleFLOWsplit(function()
makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings !
end)
context.afterFLOWsplit()
+ break
+ end
+ end
+end
--
if done then
break
diff --git a/tex/context/base/m-chart.mkvi b/tex/context/base/m-chart.mkvi
index 2b1a7447c..a0c8b2244 100644
--- a/tex/context/base/m-chart.mkvi
+++ b/tex/context/base/m-chart.mkvi
@@ -97,7 +97,6 @@
[\c!framecolor=FLOWfocuscolor,
\c!background=\FLOWshapeparameter\c!background,
\c!backgroundcolor=\FLOWshapeparameter\c!backgroundcolor,
- \c!backgroundscreen=\FLOWshapeparameter\c!backgroundscreen,
\c!rulethickness=\FLOWshapeparameter\c!rulethickness,
\c!offset=\FLOWshapeparameter\c!offset]
@@ -177,7 +176,7 @@
corner = "\FLOWlineparameter\c!corner",
dash = "\FLOWlineparameter\c!dash",
arrow = "\FLOWlineparameter\c!arrow",
- offset = "\FLOWlineparameter\c!offset",
+ offset = \number\dimexpr\FLOWlineparameter\c!offset,
},
} }%
\endgroup}
@@ -193,7 +192,7 @@
\insidefloattrue
\dontcomplain
\setupFLOWchart[#settings]%
- \setupbodyfont[\FLOWchartparameter\c!bodyfont]%
+ \usebodyfontparameter\FLOWchartparameter
\ctxcommand{flow_make_chart {
chart = {
name = "#name",
@@ -259,8 +258,9 @@
\def\FLOW_charts[#name][#settings]
{\begingroup
- \setupFLOWsplit[\c!state=\v!start,#settings]%
- \FLOW_chart[#name][]%
+ \setupFLOWchart[\c!split=\v!yes]%
+ \setupFLOWsplit[#settings]%
+ \module_charts_process[#name][]% \FLOWchart...
\endgroup}
\appendtoks
diff --git a/tex/context/base/m-hemistich.mkiv b/tex/context/base/m-hemistich.mkiv
index 55fde7b92..7a849d415 100644
--- a/tex/context/base/m-hemistich.mkiv
+++ b/tex/context/base/m-hemistich.mkiv
@@ -33,7 +33,7 @@
\unexpanded\def\dodohemistiches#1[#2]#3#4%
{\dontleavehmode
\begingroup
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\edef\currenthemistich{#1}%
\setupcurrenthemistich[#2]}
{\def\currenthemistich{#2}}%
@@ -41,21 +41,29 @@
{\scratchwidth\availablehsize}
{\scratchwidth\hemistichparameter\c!width\relax}%
\spaceskip\zeropoint\s!plus\plusone\s!fill\relax
+ \dostarttagged\t!division\currenthemistich
\hbox to \scratchwidth\bgroup
\scratchwidth.5\dimexpr\scratchwidth-\hemistichparameter\c!distance\relax
\hbox to \scratchwidth\bgroup
+ \dostarttagged\t!construct\c!lefttext
\usehemistichstyleandcolor\c!leftstyle\c!leftcolor#3%
+ \dostoptagged
\egroup
\hss
\begingroup
+ \dostarttagged\t!construct\c!separator
\usehemistichstyleandcolor\c!separatorstyle\c!separatorcolor
\hemistichparameter\c!separator
+ \dostoptagged
\endgroup
\hss
\hbox to \scratchwidth\bgroup
+ \dostarttagged\t!construct\c!righttext
\usehemistichstyleandcolor\c!rightstyle\c!rightcolor#4%
+ \dostoptagged
\egroup
\egroup
+ \dostoptagged
\endgroup}
\unexpanded\def\hemistichescaesura#1#2#3%
diff --git a/tex/context/base/m-matrix.mkiv b/tex/context/base/m-matrix.mkiv
new file mode 100644
index 000000000..ccb376e39
--- /dev/null
+++ b/tex/context/base/m-matrix.mkiv
@@ -0,0 +1,495 @@
+%D \module
+%D [ file=m-matrix,
+%D version=2014.11.04, % already a year older
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=Matrices,
+%D author={Jeong Dalyoung \& Hans Hagen},
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This code is based on a post by Dalyoung on the context list. After that
+%D we turned it into a module and improved the code a bit. Feel free to ask
+%D us for more. Once we're satisfied, a more general helper l-matrix could
+%D be made. Dalyoung does the clever bits, and Hans only cleanes up and
+%D optimizes a bit.
+
+% \registerctxluafile{l-matrix}{1.001} % not yet
+
+\startmodule[matrix]
+
+\startluacode
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+local formatters = string.formatters
+local copy = table.copy
+local insert = table.insert
+local remove = table.remove
+
+local matrix = { }
+moduledata.matrix = matrix
+
+local f_matrix_slot = formatters["%s_{%s%s}"]
+
+function matrix.symbolic(sym, x, y, nx ,ny) -- symMatrix("a", "m", "n")
+ local nx = nx or 2
+ local ny = ny or nx
+ local function filled(i,y)
+ local mrow = { }
+ for j=1,nx do
+ mrow[#mrow+1] = f_matrix_slot(sym,i,j)
+ end
+ mrow[#mrow+1] = "\\cdots"
+ mrow[#mrow+1] = f_matrix_slot(sym,i,y)
+ return mrow
+ end
+ local function dummy()
+ local mrow = { }
+ for j=1,nx do
+ mrow[#mrow+1] = "\\vdots"
+ end
+ mrow[#mrow+1] = "\\ddots"
+ mrow[#mrow+1] = "\\vdots"
+ return mrow
+ end
+ --
+ local mm = { }
+ for i=1,ny do
+ mm[i] = filled(i,y)
+ end
+ mm[#mm+1] = dummy()
+ mm[#mm+1] = filled(x,y)
+ return mm
+end
+
+-- todo: define a matrix at the tex end so that we have more control
+
+local fences_p = {
+ left = "\\left(\\,",
+ right = "\\,\\right)",
+}
+
+local fences_b = {
+ left = "\\left[\\,",
+ right = "\\,\\right]",
+}
+
+function matrix.typeset(m,options)
+ local options = settings_to_hash(options or "")
+ context.startmatrix(options.determinant and fences_b or fences_p)
+ for i=1, #m do
+ local mi = m[i]
+ for j=1,#mi do
+ context.NC(mi[j])
+ end
+ context.NR()
+ end
+ context.stopmatrix()
+end
+
+-- interchange two rows (i-th, j-th)
+
+function matrix.swap(t,i,j)
+ t[i], t[j] = t[j], t[i]
+end
+
+-- replace i-th row with factor * (i-th row)
+
+function matrix.multiply(m,i,factor)
+ local mi = m[i]
+ for k=1,#mi do
+ mi[k] = factor * mi[k]
+ end
+ return m
+end
+
+-- scalar product "factor * m"
+
+function matrix.scalar(m, factor)
+ for i=1,#m do
+ local mi = m[i]
+ for j=1,#mi do
+ mi[j] = factor * mi[j]
+ end
+ end
+ return m
+end
+
+-- replace i-th row with i-th row + factor * (j-th row)
+
+function matrix.sumrow(m,i,j,factor)
+ local mi = m[i]
+ local mj = m[j]
+ for k=1,#mi do
+ mi[k] = mi[k] + factor * mj[k]
+ end
+end
+
+-- transpose of a matrix
+
+function matrix.transpose(m)
+ local t = { }
+ for j=1,#m[1] do
+ local r = { }
+ for i=1,#m do
+ r[i] = m[i][j]
+ end
+ t[j] = r
+ end
+ return t
+end
+
+-- inner product of two vectors
+
+function matrix.inner(u,v)
+ local nu = #u
+ if nu == 0 then
+ return 0
+ end
+ local nv = #v
+ if nv ~= nu then
+ return 0
+ end
+ local result = 0
+ for i=1,nu do
+ result = result + u[i] * v[i]
+ end
+ return result
+end
+
+-- product of two matrices
+
+function matrix.product(m1,m2)
+ local product = { }
+ if #m1[1] == #m2 then
+ for i=1,#m1 do
+ local m1i = m1[i]
+ local mrow = { }
+ for j=1,#m2[1] do
+ local temp = 0
+ for k=1,#m1[1] do
+ temp = temp + m1i[k] * m2[k][j]
+ end
+ mrow[j] = temp
+ end
+ product[i] = mrow
+ end
+ end
+ return product
+end
+
+local function uppertri(m,sign)
+ local temp = copy(m)
+ for i=1,#temp-1 do
+ local pivot = temp[i][i]
+ if pivot == 0 then
+ local pRow = i +1
+ while temp[pRow][i] == 0 do
+ pRow = pRow + 1
+ if pRow > #temp then -- if there is no nonzero number
+ return temp
+ end
+ end
+ temp[i], temp[pRow] = temp[pRow], temp[i]
+ if sign then
+ sign = -sign
+ end
+ end
+ local mi = temp[i]
+ for k=i+1, #temp do
+ local factor = -temp[k][i]/mi[i]
+ local mk = temp[k]
+ for l=i,#mk do
+ mk[l] = mk[l] + factor * mi[l]
+ end
+ end
+ end
+ if sign then
+ return temp, sign
+ else
+ return temp
+ end
+end
+
+matrix.uppertri = uppertri
+
+function matrix.determinant(m)
+ if #m == #m[1] then
+ local d = 1
+ local t, s = uppertri(m,1)
+ for i=1,#t do
+ d = d * t[i][i]
+ end
+ return s*d
+ else
+ return 0
+ end
+end
+
+local function rowechelon(m,r)
+ local temp = copy(m)
+ local pRow = 1
+ local pCol = 1
+ while pRow <= #temp do
+ local pivot = temp[pRow][pCol]
+ if pivot == 0 then
+ local i = pRow
+ local n = #temp
+ while temp[i][pCol] == 0 do
+ i = i + 1
+ if i > n then
+ -- no nonzero number in a column
+ pCol = pCol + 1
+ if pCol > #temp[pRow] then
+ -- there is no nonzero number in a row
+ return temp
+ end
+ i = pRow
+ end
+ end
+ temp[pRow], temp[i] = temp[i], temp[pRow]
+ end
+ local row = temp[pRow]
+ pivot = row[pCol]
+ for l=pCol,#row do
+ row[l] = row[l]/pivot
+ end
+
+ if r == 1 then
+ -- make the "reduced row echelon form"
+ local row = temp[pRow]
+ for k=1,pRow-1 do
+ local current = temp[k]
+ local factor = -current[pCol]
+ local mk = current
+ for l=pCol,#mk do
+ mk[l] = mk[l] + factor * row[l]
+ end
+ end
+ end
+ -- just make the row echelon form
+ local row = temp[pRow]
+ for k=pRow+1, #temp do
+ local current = temp[k]
+ local factor = -current[pCol]
+ local mk = current
+ for l=pCol,#mk do
+ mk[l] = mk[l] + factor * row[l]
+ end
+ end
+ pRow = pRow + 1
+ pCol = pCol + 1
+
+ if pRow > #temp or pCol > #temp[1] then
+ pRow = #temp + 1
+ end
+ end
+ return temp
+end
+
+matrix.rowechelon = rowechelon
+matrix.rowEchelon = rowechelon
+
+-- solve the linear equation m X = c
+
+local function solve(m,c)
+ local n = #m
+ if n ~= #c then
+ return copy(m)
+ end
+ local newm = copy(m)
+ local temp = copy(c)
+ for i=1,n do
+ insert(newm[i],temp[i])
+ end
+ return rowechelon(newm,1)
+end
+
+matrix.solve = solve
+
+-- find the inverse matrix of m
+
+local function inverse(m)
+ local n = #m
+ local temp = copy(m)
+ if n ~= #m[1] then
+ return temp
+ end
+ for i=1,n do
+ for j=1,n do
+ insert(temp[i],j == i and 1 or 0)
+ end
+ end
+ temp = rowechelon(temp,1)
+ for i=1,n do
+ for j=1,n do
+ remove(temp[i], 1)
+ end
+ end
+ return temp
+end
+
+matrix.inverse = inverse
+
+\stopluacode
+
+\stopmodule
+
+\unexpanded\def\ctxmodulematrix#1{\ctxlua{moduledata.matrix.#1}}
+
+\continueifinputfile{m-matrix.mkiv}
+
+\starttext
+
+\startluacode
+document.DemoMatrixA = {
+ { 0, 2, 4, -4, 1 },
+ { 0, 0, 2, 3, 4 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 0, -6, 9, 7 },
+ { 2, 3, 4, 5, 6 },
+ { 6, 6, -6, 6, 6 },
+}
+
+document.DemoMatrixB = {
+ { 0, 2, 4, -4, 1 },
+ { 0, 0, 2, 3, 4 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 0, -6, 9, 7 },
+ { 2, 2, -6, 2, 4 },
+ { 2, 2, -6, 2, 4 },
+}
+\stopluacode
+
+\startsubject[title={A symbolic matrix}]
+
+\ctxmodulematrix{typeset(moduledata.matrix.symbolic("a", "m", "n"))}
+\ctxmodulematrix{typeset(moduledata.matrix.symbolic("a", "m", "n", 4, 8))}
+
+\stopsubject
+
+\startsubject[title={Swap two rows (2 and 4)}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.swap(document.DemoMatrixA, 2, 4)
+context.blank()
+moduledata.matrix.typeset(document.DemoMatrixA)
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Multiply $3 \times r_2$}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.multiply(document.DemoMatrixA, 2, 3))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Row 2 + $3 \times r_4$}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.sumrow(document.DemoMatrixA, 2, 3, 4)
+context.blank()
+moduledata.matrix.typeset(document.DemoMatrixA)
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Transpose a matrix}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.transpose(document.DemoMatrixA))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={The inner product of two vectors}]
+
+\startluacode
+context(moduledata.matrix.inner({ 1, 2, 3 }, { 3, 1, 2 }))
+context.blank()
+context(moduledata.matrix.inner({ 1, 2, 3 }, { 3, 1, 2, 4 }))
+\stopluacode
+
+\startsubject[title={The product of two matrices}]
+
+\startluacode
+moduledata.matrix.typeset(document.DemoMatrixA)
+context.blank()
+moduledata.matrix.typeset(moduledata.matrix.product(document.DemoMatrixA,document.DemoMatrixA))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={An Upper Triangular Matrix}]
+
+\ctxmodulematrix{typeset(moduledata.matrix.uppertri(document.DemoMatrixB))}
+
+\startsubject[title={A determinant}]
+
+\startluacode
+local m = {
+ { 1, 2, 4 },
+ { 0, 0, 2 },
+ { 2, 2, -6 },
+}
+context(moduledata.matrix.determinant(m))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Row echelon form}]
+
+\startluacode
+local m = {
+ { 1, 3, -2, 0, 2, 0, 0 },
+ { 2, 6, -5, -2, 4, -3, -1 },
+ { 0, 0, 5, 10, 0, 15, 5 },
+ { 2, 6, 0, 8, 4, 18, 6 },
+}
+
+moduledata.matrix.typeset(m)
+moduledata.matrix.typeset(moduledata.matrix.rowechelon(m,1))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Solving linear equation}]
+
+\startluacode
+local m = {
+ { 1, 3, -2, 0 },
+ { 2, 0, 1, 2 },
+ { 6, -5, -2, 4 },
+ { -3, -1, 5, 10 },
+}
+
+local c = { 5, 2, 6, 8 }
+
+moduledata.matrix.typeset(moduledata.matrix.solve(m,c))
+\stopluacode
+
+\stopsubject
+
+\startsubject[title={Inverse matrix}]
+
+\startcombination[2*1]
+ {\ctxlua{moduledata.matrix.typeset { { 1, 1, 1 }, { 0, 2, 3 }, { 3, 2, 1 } }}} {}
+ {\ctxlua{moduledata.matrix.typeset(moduledata.matrix.inverse { { 1, 1, 1 }, { 0, 2, 3 }, { 3, 2, 1 } })}} {}
+\stopcombination
+
+\stopsubject
+
+\stoptext
diff --git a/tex/context/base/m-morse.mkvi b/tex/context/base/m-morse.mkvi
deleted file mode 100644
index a2c20dff7..000000000
--- a/tex/context/base/m-morse.mkvi
+++ /dev/null
@@ -1,273 +0,0 @@
-%D \module
-%D [ file=m-morse,
-%D version=2010.12.10,
-%D title=\CONTEXT\ Extra Modules,
-%D subtitle=Morse,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% todo: act upon the node list
-% make it a buffer operation
-% nice in cld manual
-
-\startluacode
-
-moduledata.morse = moduledata.morse or { }
-local morse = moduledata.morse
-
-local utfcharacters, gsub = string.utfcharacters, string.gsub
-local ucchars, shchars = characters.ucchars, characters.shchars
-
-local codes = {
-
- ["A"] = "·—",
- ["B"] = "—···",
- ["C"] = "—·—·",
- ["D"] = "—··",
- ["E"] = "·",
- ["F"] = "··—·",
- ["G"] = "——·",
- ["H"] = "····",
- ["I"] = "··",
- ["J"] = "·———",
- ["K"] = "—·—",
- ["L"] = "·—··",
- ["M"] = "——",
- ["N"] = "—·",
- ["O"] = "———",
- ["P"] = "·——·",
- ["Q"] = "——·—",
- ["R"] = "·—·",
- ["S"] = "···",
- ["T"] = "—",
- ["U"] = "··—",
- ["V"] = "···—",
- ["W"] = "·——",
- ["X"] = "—··—",
- ["Y"] = "—·——",
- ["Z"] = "——··",
-
- ["0"] = "—————",
- ["1"] = "·————",
- ["2"] = "··———",
- ["3"] = "···——",
- ["4"] = "····—",
- ["5"] = "·····",
- ["6"] = "—····",
- ["7"] = "——···",
- ["8"] = "———··",
- ["9"] = "————·",
-
- ["."] = "·—·—·—",
- [","] = "——··——",
- [":"] = "———···",
- [";"] = "—·—·—",
-
- ["?"] = "··——··",
- ["!"] = "—·—·——",
-
- ["-"] = "—····—",
- ["/"] = "—··—· ",
-
- ["("] = "—·——·",
- [")"] = "—·——·—",
-
- ["="] = "—···—",
- ["@"] = "·——·—·",
-
- ["'"] = "·————·",
- ['"'] = "·—··—·",
-
- ["À"] = "·——·—",
- ["Å"] = "·——·—",
- ["Ä"] = "·—·—",
- ["Æ"] = "·—·—",
- ["Ç"] = "—·—··",
- ["É"] = "··—··",
- ["È"] = "·—··—",
- ["Ñ"] = "——·——",
- ["Ö"] = "———·",
- ["Ø"] = "———·",
- ["Ü"] = "··——",
- ["ß"] = "··· ···",
-
-}
-
-morse.codes = codes
-
-local fallbackself = false
-
-local function codefallback(t,k)
- if k then
- local u = ucchars[k]
- local v = rawget(t,u) or rawget(t,shchars[u]) or false
- t[k] = v
- return v
- elseif fallbackself then
- return k
- else
- return false
- end
-end
-
-table.setmetatableindex(codes,codefallback)
-
-local MorseBetweenWords = context.MorseBetweenWords
-local MorseBetweenCharacters = context.MorseBetweenCharacters
-local MorseLong = context.MorseLong
-local MorseShort = context.MorseShort
-local MorseSpace = context.MorseSpace
-local MorseUnknown = context.MorseUnknown
-
-local function toverbose(str)
- str = gsub(str,"%s*+%s*","+")
- str = gsub(str,"%s+"," ")
- local done = false
- for m in utfcharacters(str) do
- if done then
- MorseBetweenCharacters()
- end
- if m == "·" or m == "." then
- MorseShort()
- done = true
- elseif m == "—" or m == "-" then
- MorseLong()
- done = true
- elseif m == " " then
- if done then
- MorseBetweenCharacters()
- end
- done = false
- elseif m == "+" then
- MorseBetweenWords()
- done = false
- else
- MorseUnknown(m)
- end
- end
-end
-
-local function toregular(str)
- local inmorse = false
- for s in utfcharacters(str) do
- local m = codes[s]
- if m then
- if inmorse then
- MorseBetweenWords()
- else
- inmorse = true
- end
- local done = false
- for m in utfcharacters(m) do
- if done then
- MorseBetweenCharacters()
- else
- done = true
- end
- if m == "·" then
- MorseShort()
- elseif m == "—" then
- MorseLong()
- elseif m == " " then
- MorseBetweenCharacters()
- end
- end
- inmorse = true
- elseif s == "\n" or s == " " then
- MorseSpace()
- inmorse = false
- else
- if inmorse then
- MorseBetweenWords()
- else
- inmorse = true
- end
- MorseUnknown(s)
- end
- end
-end
-
-local function tomorse(str,verbose)
- if verbose then
- toverbose(str)
- else
- toregular(str)
- end
-end
-
-morse.tomorse = tomorse
-
-function morse.filetomorse(name,verbose)
- tomorse(resolvers.loadtexfile(name),verbose)
-end
-
-function morse.showtable()
- context.starttabulate { "|l|l|" } -- { "|l|l|l|" }
- for k, v in table.sortedpairs(codes) do
- context.NC() context(k)
- -- context.NC() context(v)
- context.NC() tomorse(v,true)
- context.NC() context.NR()
- end
- context.stoptabulate()
-end
-
-\stopluacode
-
-\unprotect
-
-% todo: \setupmorse, but probably it's not worth the trouble.
-
-\def\MorseWidth {0.4em}
-\def\MorseHeight {0.2em}
-%def\MorseShort {\dontleavehmode\blackrule[\c!height=\MorseHeight,\c!width=\dimexpr\MorseWidth]}
-%def\MorseLong {\dontleavehmode\blackrule[\c!height=\MorseHeight,\c!width=3\dimexpr\MorseWidth]}
-\def\MorseShort {\dontleavehmode\vrule\!!width \dimexpr\MorseWidth\!!height\MorseHeight\!!depth\zeropoint\relax}
-\def\MorseLong {\dontleavehmode\vrule\!!width3\dimexpr\MorseWidth\!!height\MorseHeight\!!depth\zeropoint\relax}
-\def\MorseBetweenCharacters {\kern\MorseWidth}
-\def\MorseBetweenWords {\hskip3\dimexpr\MorseWidth\relax}
-\def\MorseSpace {\hskip7\dimexpr\MorseWidth\relax}
-\def\MorseUnknown #text{[\detokenize{#text}]}
-
-\unexpanded\def\MorseCode #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es,true)}}
-\unexpanded\def\MorseString #text{\ctxlua{moduledata.morse.tomorse(\!!bs#text\!!es)}}
-\unexpanded\def\MorseFile #text{\ctxlua{moduledata.morse.filetomorse("#text")}}
-\unexpanded\def\MorseTable {\ctxlua{moduledata.morse.showtable()}}
-
-\let\Morse \MorseString
-
-%def\MorseShort {·}
-%def\MorseLong {—}
-
-\protect
-
-\continueifinputfile{m-morse.mkvi}
-
-\starttext
-
-\MorseTable
-
-\startlines
-\MorseCode{—·—· ——— —· — · —··— —+—— —·— ·· ···—}
-\MorseCode{—·—· ——— —· — · —··— — + —— —·— ·· ···—}
-\Morse{ÀÁÂÃÄÅàáâãäå}
-\Morse{ÆÇæç}
-\Morse{ÈÉÊËèéêë}
-\Morse{ÌÍÎÏìíîï}
-\Morse{Ññ}
-\Morse{ÒÓÔÕÖòóôõö}
-\Morse{Øø}
-\Morse{ÙÚÛÜùúû}
-\Morse{Ýýÿ}
-\Morse{ß}
-\Morse{Ţţ}
-\stoplines
-
-\Morse{A B C D E F G H I J K L M N O P Q R S T U V W X Y Z}
-
-\stoptext
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
new file mode 100644
index 000000000..08c23e7cc
--- /dev/null
+++ b/tex/context/base/m-oldbibtex.mkiv
@@ -0,0 +1,16 @@
+%D \module
+%D [ file=m-oldbibtex,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=Falback on old method,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
+
+\endinput
diff --git a/tex/context/base/m-oldfun.mkiv b/tex/context/base/m-oldfun.mkiv
index 1c5a1d29d..3f2ec0263 100644
--- a/tex/context/base/m-oldfun.mkiv
+++ b/tex/context/base/m-oldfun.mkiv
@@ -131,7 +131,7 @@
\unexpanded\def\DroppedCaps#1#2#3#4#5#6#7% does not yet handle accented chars
{\defconvertedargument\asciia{#7}%
\defconvertedcommand \asciib{\DroppedString}%
- \doifinstringelse\asciia\asciib
+ \doifelseinstring\asciia\asciib
{\noindentation
\dontleavehmode
\checkindentation % redo this one
diff --git a/tex/context/base/m-oldnum.mkiv b/tex/context/base/m-oldnum.mkiv
index efc0af472..382c56eb6 100644
--- a/tex/context/base/m-oldnum.mkiv
+++ b/tex/context/base/m-oldnum.mkiv
@@ -73,8 +73,8 @@
\chardef\digitoutputmode=1 % 0..6
\chardef\digitsignmode =0 % 0..3
-\def\setdigitmode{\chardef\digitoutputmode}
-\def\setdigitsign{\chardef\digitsignmode}
+\unexpanded\def\setdigitmode{\chardef\digitoutputmode}
+\unexpanded\def\setdigitsign{\chardef\digitsignmode}
%D The digit modes are:
%D
@@ -100,7 +100,7 @@
\unexpanded\def\digits
{\bgroup
\let~@%
- \doifnextbgroupelse\dodigits{\doifnextcharelse\normalmathshift\domathdigits\grabdigit}}
+ \doifelsenextbgroup\dodigits{\doifelsenextchar\normalmathshift\domathdigits\grabdigit}}
\def\dodigits#1%
{\grabdigit#1\relax}
@@ -118,7 +118,7 @@
\ifx\normalmathshift\undefined \let\normalmathshift=$ \fi
-\def\scandigit
+\unexpanded\def\scandigit
{\ifx\next\blankspace
\let\next\handledigits
\else\ifx\next\nextobeyedline % the indirect one
@@ -139,10 +139,10 @@
%D typeset it in superscript. The space placeholders are
%D replaced by a \type {@}.
-\def\savedigit#1#2%
+\unexpanded\def\savedigit#1#2%
{\edef#1{#1\saveddigits#2}\let\saveddigits\empty}
-\long\def\collectdigit#1%
+\unexpanded\def\collectdigit#1%
{\ifx#1~%
\savedigit\collecteddigits @%
\else\if#1_% tricky as can be several catcodes ... will become lua code anyway
@@ -158,13 +158,13 @@
\chardef\powerdigits\plusone
\else
\savedigit\collecteddigits#1%
- %\doifnumberelse{#1}
+ %\doifelsenumber{#1}
% {\savedigit\collecteddigits#1}
% {\def\saveddigits{#1}}%
\fi\fi\fi
\else
\savedigit\savedpowerdigits#1%
- %\doifnumberelse{#1}
+ %\doifelsenumber{#1}
% {\savedigit\savedpowerdigits#1}
% {\def\saveddigits{#1}}%
\fi\fi\fi\fi
@@ -173,7 +173,7 @@
\let\handlemathdigits\firstofoneargument
\let\handletextdigits\mathematics
-\def\handledigits
+\unexpanded\def\handledigits
{%\ifcase\powerdigits
% \edef\collecteddigits{\collecteddigits\saveddigits}%
%\else
@@ -235,7 +235,7 @@
% 0,- is invalid, should be =
% 0,-- is invalid, should be ==
-\def\digitzeroamount
+\unexpanded\def\digitzeroamount
{\digitsgn\zeroamount
\def\digitzeroamount
{\hphantom
@@ -243,7 +243,7 @@
\hskip-\wd\scratchbox}%
\let\digitzeroamount\empty}}
-\def\scandigits#1%
+\unexpanded\def\scandigits#1%
{\if#1.\digitsep1\else
\if#1,\digitsep2\else
\if#1@\digitnop \else
@@ -261,7 +261,7 @@
\newbox\digitsepbox \chardef\autodigitmode=1
-\def\digitsep#1%
+\unexpanded\def\digitsep#1%
{\ifcase\autodigitmode
\doscandigit#1%
\else
@@ -277,7 +277,7 @@
%
% while this works
-\def\digitnop
+\unexpanded\def\digitnop
{\hbox{\hphantom{\box\digitsepbox}}%
\hphantom{0}\chardef\skipdigit1\relax}
@@ -287,7 +287,7 @@
% {\hphantom{\box\digitsepbox0}%
% \chardef\skipdigit1\relax}
-\def\digitsgn#1%
+\unexpanded\def\digitsgn#1%
{\ifcase\digitsignmode#1\else
\hbox
{\setbox\scratchbox\hbox{0}%
@@ -404,11 +404,11 @@
%D \digittemplate 12.000.000,00 % \digittemplate .,
%D \stoptyping
-\def\digittemplate #1 %
+\unexpanded\def\digittemplate #1 %
{\chardef\digitinputmode\zerocount
\handletokens#1\with\scandigittemplate}
-\def\scandigittemplate#1%
+\unexpanded\def\scandigittemplate#1%
{\if #1.\ifcase\digitinputmode\chardef\digitinputmode\plusone \fi% period
\else\if#1,\ifcase\digitinputmode\chardef\digitinputmode\plustwo \fi% comma
\fi\fi}
diff --git a/tex/context/base/m-pipemode.mkiv b/tex/context/base/m-pipemode.mkiv
new file mode 100644
index 000000000..e96394c43
--- /dev/null
+++ b/tex/context/base/m-pipemode.mkiv
@@ -0,0 +1,7 @@
+% For Mojca: context --global m-pipemode.mkiv
+
+\disabledirectives[system.errorcontext]
+
+\starttext
+
+\let\stoptext\relax
diff --git a/tex/context/base/m-pstricks.mkii b/tex/context/base/m-pstricks.mkii
index bdcf13b24..d41f19871 100644
--- a/tex/context/base/m-pstricks.mkii
+++ b/tex/context/base/m-pstricks.mkii
@@ -43,7 +43,7 @@
{\input multido \relax
\input pstricks \relax
\input pst-plot \relax
- \loadpstrickscolors{colo-rgb}}
+ \loadpstrickscolors{colo-rgb.mkii}}
{\writestatus{pstricks}{using indirect method; enable write18}}
\catcode`\|=\oldbarcode
diff --git a/tex/context/base/m-punk.mkiv b/tex/context/base/m-punk.mkiv
index 6bf92e4c0..c8021a92f 100644
--- a/tex/context/base/m-punk.mkiv
+++ b/tex/context/base/m-punk.mkiv
@@ -162,6 +162,7 @@ function fonts.handlers.vf.combiner.commands.metafont(g,v)
end
g.properties.virtualized = true
g.variants = list
+ print(g)
end
fonts.definers.methods.install( "punk", {
@@ -177,14 +178,29 @@ fonts.definers.methods.install( "punkboldslanted", {
{ "metafont", "mfplain", "punkfont-boldslanted.mp", 10 },
} )
-typesetters.cases.register("RandomPunk", function(current)
- local used = fontdata[current.font].variants
+-- typesetters.cases.register("RandomPunk", function(current)
+-- local used = fontdata[current].variants
+-- if used then
+-- local f = math.random(1,#used)
+-- current.font = used[f]
+-- return current, true
+-- else
+-- return current, false
+-- end
+-- end)
+
+local getfont = nodes.nuts.getfont
+local setfield = nodes.nuts.setfield
+local random = math.random
+
+typesetters.cases.register("RandomPunk", function(start)
+ local used = fontdata[getfont(start)].variants
if used then
- local f = math.random(1,#used)
- current.font = used[f]
- return current, true
+ local f = random(1,#used)
+ setfield(start,"font",used[f])
+ return start, true
else
- return current, false
+ return start, false
end
end)
@@ -214,7 +230,7 @@ end)
\unexpanded\def\StartRandomPunk {\begingroup\EnableRandomPunk}
\unexpanded\def\StopRandomPunk {\endgroup}
-\starttypescript [serif] [punk] [default]
+\starttypescript [serif] [punk]
\definefontsynonym [Serif] [demo@punk]
\definefontsynonym [SerifBold] [demobold@punkbold]
\definefontsynonym [SerifSlanted] [demoslanted@punkslanted]
diff --git a/tex/context/base/m-scite.mkiv b/tex/context/base/m-scite.mkiv
new file mode 100644
index 000000000..7a8e8b06e
--- /dev/null
+++ b/tex/context/base/m-scite.mkiv
@@ -0,0 +1,275 @@
+%D \module
+%D [ file=m-scite,
+%D version=2014.04.28,
+%D title=\CONTEXT\ Extra Modules,
+%D subtitle=\SCITE\ lexers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% We can simplify the scite lexers, as long as we're able to return the
+% lexed result table and provide alexer module with the functions that
+% the lexer expects (so I need to decipher the cxx file).
+%
+% lexer._TOKENSTYLES : table
+% lexer._CHILDREN : flag
+% lexer._EXTRASTYLES : table
+% lexer._GRAMMAR : flag
+%
+% lexers.load : function
+% lexers.lex : function
+%
+% And some properties that map styles onto scintilla styling. I get the
+% impression that we end up with something simpler, a hybrid between the
+% scite lexing and the current context way, so we get an intermediate
+% step, with some penalty for context, but at least I don't have to
+% maintain two sets (three sets as we also have a line based series).
+
+% TODO: as these files are in tds we can locate them and set the lexer root
+% to that one. Currently we're on: we're on context/documents.
+
+% This is an experiment: eventually we need to hook it into the verbatim code
+% and deal with widow lines and so.
+
+\startluacode
+
+-- todo: merge with collapse
+-- todo: prehash whitespaces
+
+-- todo: hook into the pretty print code
+-- todo: a simple catcode regime with only \ { }
+
+local gsub, sub, find = string.gsub, string.sub, string.find
+local concat = table.concat
+local formatters = string.formatters
+local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
+
+local scite = require("util-sci")
+buffers.scite = scite
+
+-- context output:
+
+local f_def_color = formatters["\\definecolor[slxc%s][h=%s%s%s]%%"]
+local f_fore_none = formatters["\\def\\slx%s#1{{\\slxc%s#1}}%%"]
+local f_fore_bold = formatters["\\def\\slx%s#1{{\\slxc%s\\bf#1}}%%"]
+local f_none_bold = formatters["\\def\\slx%s#1{{\\bf#1}}%%"]
+local f_none_none = formatters["\\def\\slx%s#1{{#1}}%%"]
+local f_texstyled = formatters["\\slx%s{%s}"]
+
+local f_mapping = [[
+\let\string\slxL\string\letterleftbrace
+\let\string\slxR\string\letterrightbrace
+\let\string\slxM\string\letterdollar
+\let\string\slxV\string\letterbar
+\let\string\slxU\string\letterhat
+\let\string\slxD\string\letterunderscore
+\let\string\slxH\string\letterhash
+\let\string\slxB\string\letterbackslash
+\let\string\slxP\string\letterpercent
+\let\string\slxS\string\fixedspace
+%]]
+
+local replacer = lpeg.replacer {
+ ["{"] = "\\slxL ",
+ ["}"] = "\\slxR ",
+ ["$"] = "\\slxM ",
+ ["^"] = "\\slxU ",
+ ["_"] = "\\slxD ",
+ ["|"] = "\\slxV ",
+ ["#"] = "\\slxH ",
+ ["\\"] = "\\slxB ",
+ ["%"] = "\\slxP ",
+ [" "] = "\\slxS ",
+}
+
+local colors = nil
+
+local function exportcolors()
+ if not colors then
+ scite.loadscitelexer()
+ local function black(f)
+ return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
+ end
+ local result, r = { f_mapping }, 1
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local fore = v.fore
+ if fore and not black(fore) then
+ r = r + 1
+ result[r] = f_def_color(k,fore[1],fore[2],fore[3])
+ end
+ end
+ r = r + 1
+ result[r] = "%"
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local bold = v.bold
+ local fore = v.fore
+ r = r + 1
+ if fore and not black(fore) then
+ if bold then
+ result[r] = f_fore_bold(k,k)
+ else
+ result[r] = f_fore_none(k,k)
+ end
+ else
+ if bold then
+ result[r] = f_none_bold(k)
+ else
+ result[r] = f_none_none(k)
+ end
+ end
+ end
+ colors = concat(result,"\n")
+ end
+ return colors
+end
+
+local function exportwhites()
+ return setmetatableindex(function(t,k)
+ local v = find(k,"white") and true or false
+ t[k] = v
+ return v
+ end)
+end
+
+local function exportstyled(lexer,text)
+ local result = lexer.lex(lexer,text,0)
+ local start = 1
+ local whites = exportwhites()
+ local buffer = { }
+ for i=1,#result,2 do
+ local style = result[i]
+ local position = result[i+1]
+ local txt = sub(text,start,position-1)
+ txt = lpegmatch(replacer,txt)
+ if whites[style] then
+ buffer[#buffer+1] = txt
+ else
+ buffer[#buffer+1] = f_texstyled(style,txt)
+ end
+ start = position
+ end
+ buffer = concat(buffer)
+ return buffer
+end
+
+function scite.installcommands()
+ context(exportcolors())
+end
+
+local function lexdata(data,lexname)
+ buffers.assign("lex",exportstyled(scite.loadedlexers[lexname],data or ""))
+end
+
+scite.lexdata = lexdata
+
+function scite.lexbuffer(name,lexname)
+ lexdata(buffers.getcontent(name) or "",lexname or "tex")
+end
+
+function scite.lexfile(filename,lexname)
+ lexdata(io.loaddata(filename) or "",lexname or file.suffix(filename))
+end
+
+-- html output
+
+\stopluacode
+
+% This is a preliminary interface.
+
+\unprotect
+
+\unexpanded\def\installscitecommands
+ {\ctxlua{buffers.scite.installcommands()}%
+ \let\installscitecommands\relax}
+
+\unexpanded\def\startscite{\startlines}
+\unexpanded\def\stopscite {\stoplines}
+
+\unexpanded\def\scitefile
+ {\dosingleargument\module_scite_file}
+
+\unexpanded\def\module_scite_file[#1]%
+ {\start
+ \ctxlua{buffers.scite.lexfile("#1")}%
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \setcatcodetable\ctxcatcodes % needed in xml
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\unexpanded\def\scitebuffer
+ {\dodoubleargument\module_scite_buffer}
+
+\unexpanded\def\module_scite_buffer[#1][#2]%
+ {\start
+ \ifsecondargument
+ \ctxlua{buffers.scite.lexbuffer("#2","#1")}%
+ \else
+ \ctxlua{buffers.scite.lexbuffer("#1","tex")}%
+ \fi
+ \installscitecommands
+ \tt
+ \dontcomplain
+ \setcatcodetable\ctxcatcodes % needed in xml
+ \startscite
+ \getbuffer[lex]%
+ \stopscite
+ \stop}
+
+\protect
+
+\continueifinputfile{m-scite.mkiv}
+
+\setupbodyfont[dejavu,8pt]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ header=1cm,
+ footer=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ backspace=1cm]
+
+\startbuffer[demo]
+\startsubsubject[title={oeps}]
+
+\startMPcode
+ draw fullcircle
+ scaled 2cm
+ withpen pencircle scaled 1mm
+ withcolor .5green;
+ draw textext (
+ lua (
+ "local function f(s) return string.upper(s) end mp.quoted(f('foo'))"
+ )
+ ) withcolor .5red ;
+\stopMPcode
+
+\startluacode
+ context("foo")
+\stopluacode
+
+\stopsubsubject
+\stopbuffer
+
+\starttext
+
+% \scitefile[../lexers/scite-context-lexer.lua] \page
+% \scitefile[t:/manuals/about/about-metafun.tex] \page
+% \scitefile[t:/sources/strc-sec.mkiv] \page
+% \scitefile[e:/tmp/mp.w] \page
+% \scitefile[t:/manuals/hybrid/tugboat.bib] \page
+\scitefile[e:/tmp/test.bib] \page
+
+% \getbuffer[demo] \scitebuffer[demo]
+
+\stoptext
diff --git a/tex/context/base/m-spreadsheet.lua b/tex/context/base/m-spreadsheet.lua
index f329acf9a..1b3c5cb34 100644
--- a/tex/context/base/m-spreadsheet.lua
+++ b/tex/context/base/m-spreadsheet.lua
@@ -129,10 +129,10 @@ function datacell(a,b,...)
end
local function checktemplate(s)
- if find(s,"%%") then
+ if find(s,"%",1,true) then
-- normal template
return s
- elseif find(s,"@") then
+ elseif find(s,"@",1,true) then
-- tex specific template
return gsub(s,"@","%%")
else
diff --git a/tex/context/base/m-spreadsheet.mkiv b/tex/context/base/m-spreadsheet.mkiv
index 5e0499184..914a2b57a 100644
--- a/tex/context/base/m-spreadsheet.mkiv
+++ b/tex/context/base/m-spreadsheet.mkiv
@@ -88,6 +88,8 @@
\unexpanded\def\doifelsespreadsheetcell
{\dosingleempty\module_spreadsheet_doifelse_cell}
+\let\doifspreadsheetcellelse\doifelsespreadsheetcell
+
\unexpanded\def\module_spreadsheet_doifelse_cell[#1]#2#3%
{\ctxlua{moduledata.spreadsheets.doifelsecell("#1",\number#2,\number#3)}}
@@ -118,7 +120,7 @@
\let\stoprow \module_spreadsheet_row_stop
\let\startcell\module_spreadsheet_cell_start
\let\stopcell \module_spreadsheet_cell_stop
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\module_spreadsheet_start
\directsetup{spreadsheet:before:\currentspreadsheet}%
\bTABLE[\c!align=\v!flushright,#1]}
@@ -136,7 +138,7 @@
\unexpanded\def\module_spreadsheet_row_stop {\eTR}
\unexpanded\def\module_spreadsheet_cell_start
- {\doifnextoptionalelse\module_spreadsheet_cell_start_yes\module_spreadsheet_cell_start_nop}
+ {\doifelsenextoptional\module_spreadsheet_cell_start_yes\module_spreadsheet_cell_start_nop}
\unexpanded\def\module_spreadsheet_cell_start_yes[#1]#2\stopcell
{\bTD[#1]\getspr{#2}\eTD}
diff --git a/tex/context/base/m-steps.lua b/tex/context/base/m-steps.lua
index 97759b799..8eb481550 100644
--- a/tex/context/base/m-steps.lua
+++ b/tex/context/base/m-steps.lua
@@ -10,7 +10,6 @@ if not modules then modules = { } end modules ['x-flow'] = {
moduledata.steps = moduledata.steps or { }
-local points = number.points -- number.pt
local variables = interfaces.variables
local trace_charts = false
@@ -100,22 +99,22 @@ function commands.step_make_chart(settings)
end
--
context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor)
- context("text_line_width := %s ;", points(textsettings.rulethickness))
+ context("text_line_width := %p ;", textsettings.rulethickness)
context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor)
- context("text_offset := %s ;", points(textsettings.offset))
- context("text_distance_set := %s ;", points(textsettings.distance))
+ context("text_offset := %p ;", textsettings.offset)
+ context("text_distance_set := %p ;", textsettings.distance)
--
context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor)
- context("cell_line_width := %s ;", points(cellsettings.rulethickness))
+ context("cell_line_width := %p ;", cellsettings.rulethickness)
context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor)
- context("cell_offset := %s ;", points(cellsettings.offset))
- context("cell_distance_x := %s ;", points(cellsettings.dx))
- context("cell_distance_y := %s ;", points(cellsettings.dy))
+ context("cell_offset := %p ;", cellsettings.offset)
+ context("cell_distance_x := %p ;", cellsettings.dx)
+ context("cell_distance_y := %p ;", cellsettings.dy)
--
context("line_line_color := \\MPcolor{%s} ;", linesettings.color)
- context("line_line_width := %s ;", points(linesettings.rulethickness))
- context("line_distance := %s ;", points(linesettings.distance))
- context("line_offset := %s ;", points(linesettings.offset))
+ context("line_line_width := %p ;", linesettings.rulethickness)
+ context("line_distance := %p ;", linesettings.distance)
+ context("line_offset := %p ;", linesettings.offset)
--
for i=1,#steps do
local step = steps[i]
diff --git a/tex/context/base/m-steps.mkvi b/tex/context/base/m-steps.mkvi
index a07ece3ae..c9c5a0636 100644
--- a/tex/context/base/m-steps.mkvi
+++ b/tex/context/base/m-steps.mkvi
@@ -98,7 +98,7 @@
\def\module_steps_start_chart[#name][#settings]%
{\startnointerference
\iffirstargument
- \doifassignmentelse{#name}
+ \doifelseassignment{#name}
{\let\currentSTEPchart\empty
\xdef\module_steps_flush_chart{\module_steps_chart[][#name]}}
{\edef\currentSTEPchart{#name}%
diff --git a/tex/context/base/m-translate.mkiv b/tex/context/base/m-translate.mkiv
index f36f9a9fb..2e6cbe950 100644
--- a/tex/context/base/m-translate.mkiv
+++ b/tex/context/base/m-translate.mkiv
@@ -22,12 +22,34 @@
local compiled, list = nil, nil
+ -- function translators.register(from,to)
+ -- local l = lpeg.P(from)/to
+ -- if not list then
+ -- list = l
+ -- else
+ -- list = list + l
+ -- end
+ -- compiled = nil
+ -- end
+ --
+ -- function translators.translate(s)
+ -- if list then
+ -- if not compiled then
+ -- compiled = lpeg.Cs((list + lpeg.P(1))^0)
+ -- end
+ -- return compiled:match(s)
+ -- else
+ -- return s
+ -- end
+ -- end
+
+ -- local function prepare()
+
function translators.register(from,to)
- local l = lpeg.P(from)/to
if not list then
- list = l
+ list = { [from] = to }
else
- list = list + l
+ list[from] = to
end
compiled = nil
end
@@ -35,7 +57,8 @@
function translators.translate(s)
if list then
if not compiled then
- compiled = lpeg.Cs((list + lpeg.P(1))^0)
+ local tree = lpeg.utfchartabletopattern(list)
+ compiled = lpeg.Cs((tree/list + lpeg.patterns.utf8character)^0 * lpeg.P(-1)) -- the P(1) is needed in order to accept non utf
end
return compiled:match(s)
else
diff --git a/tex/context/base/m-visual.mkiv b/tex/context/base/m-visual.mkiv
index 504c0d0c5..d50215966 100644
--- a/tex/context/base/m-visual.mkiv
+++ b/tex/context/base/m-visual.mkiv
@@ -161,7 +161,7 @@
{\freezerandomseed
\let\endstrut\relax
\let\begstrut\relax
- \doifinsetelse{#1}{\v!left,\v!right}
+ \doifelseinset{#1}{\v!left,\v!right}
{\fakewords{2}{4}}
{\fakewords{4}{10}}}%
{\doifinset{#1}{\v!left,\v!right}
@@ -192,8 +192,8 @@
{\dimen0\zeropoint
\getrandomcount\scratchcounter{3}{6}%
\dorecurse\scratchcounter
- {\getrandomdimen\scratchdimen{1em}{3em}%
- \mathinner{\red\fakerule\scratchdimen}%
+ {\getrandomdimen\scratchdimen{0.5em}{1.5em}%
+ \mathord{\red\fakerule\scratchdimen}%
\ifnum\recurselevel<\scratchcounter+\fi
\advance\scratchdimen\dimen0}%
=\mathinner{\red\fakerule\scratchdimen}}
@@ -762,7 +762,7 @@
\startoverlay
{\copy\scratchbox}
{\dodotagbox{#1}\scratchbox{\framed
- [\c!background=\v!screen,\c!backgroundscreen=1]{#2}}}
+ [\c!background=\v!color,\c!backgroundcolor=\v!gray]{#2}}}
\stopoverlay
\egroup
\nextboxwd\the\wd\scratchbox
diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua
index 879480dce..d0ea78990 100644
--- a/tex/context/base/math-act.lua
+++ b/tex/context/base/math-act.lua
@@ -90,6 +90,8 @@ end
sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
+-- AccentBaseHeight vs FlattenedAccentBaseHeight
+
function mathematics.checkaccentbaseheight(target,original)
local mathparameters = target.mathparameters
if mathparameters and mathparameters.AccentBaseHeight == 0 then
@@ -103,15 +105,23 @@ function mathematics.checkprivateparameters(target,original)
local mathparameters = target.mathparameters
if mathparameters then
local parameters = target.parameters
+ local properties = target.properties
if parameters then
- if not mathparameters.FractionDelimiterSize then
- mathparameters.FractionDelimiterSize = 1.01 * parameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size
+ local size = parameters.size
+ if size then
+ if not mathparameters.FractionDelimiterSize then
+ mathparameters.FractionDelimiterSize = 1.01 * size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * size
+ end
+ elseif properties then
+ report_math("invalid parameters in font %a",properties.fullname or "?")
+ else
+ report_math("invalid parameters in font")
end
- elseif target.properties then
- report_math("no parameters in font %a",target.properties.fullname or "?")
+ elseif properties then
+ report_math("no parameters in font %a",properties.fullname or "?")
else
report_math("no parameters and properties in font")
end
@@ -465,21 +475,17 @@ setmetatableindex(extensibles,function(extensibles,font)
return codes
end)
-function mathematics.extensiblecode(family,unicode)
+local function extensiblecode(family,unicode)
return extensibles[family_font(family or 0)][unicode][1]
end
-function commands.extensiblecode(family,unicode)
- context(extensibles[family_font(family or 0)][unicode][1])
-end
-
-- left : [head] ...
-- right : ... [head]
-- horizontal : [head] ... [head]
--
-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
-function commands.horizontalcode(family,unicode)
+local function horizontalcode(family,unicode)
local font = family_font(family or 0)
local data = extensibles[font][unicode]
local kind = data[1]
@@ -503,13 +509,30 @@ function commands.horizontalcode(family,unicode)
loffset = abs((left ["start"] or 0) - (left ["end"] or 0))
roffset = abs((right["start"] or 0) - (right["end"] or 0))
end
- else
end
- texsetdimen("scratchleftoffset",loffset)
- texsetdimen("scratchrightoffset",roffset)
- context(kind)
+ return kind, loffset, roffset
end
+mathematics.extensiblecode = extensiblecode
+mathematics.horizontalcode = horizontalcode
+
+interfaces.implement {
+ name = "extensiblecode",
+ arguments = { "integer", "integer" },
+ actions = { extensiblecode, context }
+}
+
+interfaces.implement {
+ name = "horizontalcode",
+ arguments = { "integer", "integer" },
+ actions = function(family,unicode)
+ local kind, loffset, roffset = horizontalcode(family,unicode)
+ texsetdimen("scratchleftoffset", loffset)
+ texsetdimen("scratchrightoffset",roffset)
+ context(kind)
+ end
+}
+
-- experiment
-- check: when true, only set when present in font
@@ -517,85 +540,6 @@ end
local blocks = characters.blocks -- this will move to char-ini
-blocks["uppercasenormal"] = { first = 0x00041, last = 0x0005A }
-blocks["uppercasebold"] = { first = 0x1D400, last = 0x1D419 }
-blocks["uppercaseitalic"] = { first = 0x1D434, last = 0x1D44D }
-blocks["uppercasebolditalic"] = { first = 0x1D468, last = 0x1D481 }
-blocks["uppercasescript"] = { first = 0x1D49C, last = 0x1D4B5 }
-blocks["uppercaseboldscript"] = { first = 0x1D4D0, last = 0x1D4E9 }
-blocks["uppercasefraktur"] = { first = 0x1D504, last = 0x1D51D }
-blocks["uppercasedoublestruck"] = { first = 0x1D538, last = 0x1D551 }
-blocks["uppercaseboldfraktur"] = { first = 0x1D56C, last = 0x1D585 }
-blocks["uppercasesansserifnormal"] = { first = 0x1D5A0, last = 0x1D5B9 }
-blocks["uppercasesansserifbold"] = { first = 0x1D5D4, last = 0x1D5ED }
-blocks["uppercasesansserifitalic"] = { first = 0x1D608, last = 0x1D621 }
-blocks["uppercasesansserifbolditalic"] = { first = 0x1D63C, last = 0x1D655 }
-blocks["uppercasemonospace"] = { first = 0x1D670, last = 0x1D689 }
-blocks["uppercasegreeknormal"] = { first = 0x00391, last = 0x003AA }
-blocks["uppercasegreekbold"] = { first = 0x1D6A8, last = 0x1D6C1 }
-blocks["uppercasegreekitalic"] = { first = 0x1D6E2, last = 0x1D6FB }
-blocks["uppercasegreekbolditalic"] = { first = 0x1D71C, last = 0x1D735 }
-blocks["uppercasegreeksansserifbold"] = { first = 0x1D756, last = 0x1D76F }
-blocks["uppercasegreeksansserifbolditalic"] = { first = 0x1D790, last = 0x1D7A9 }
-
-blocks["lowercasenormal"] = { first = 0x00061, last = 0x0007A }
-blocks["lowercasebold"] = { first = 0x1D41A, last = 0x1D433 }
-blocks["lowercaseitalic"] = { first = 0x1D44E, last = 0x1D467 }
-blocks["lowercasebolditalic"] = { first = 0x1D482, last = 0x1D49B }
-blocks["lowercasescript"] = { first = 0x1D4B6, last = 0x1D4CF }
-blocks["lowercaseboldscript"] = { first = 0x1D4EA, last = 0x1D503 }
-blocks["lowercasefraktur"] = { first = 0x1D51E, last = 0x1D537 }
-blocks["lowercasedoublestruck"] = { first = 0x1D552, last = 0x1D56B }
-blocks["lowercaseboldfraktur"] = { first = 0x1D586, last = 0x1D59F }
-blocks["lowercasesansserifnormal"] = { first = 0x1D5BA, last = 0x1D5D3 }
-blocks["lowercasesansserifbold"] = { first = 0x1D5EE, last = 0x1D607 }
-blocks["lowercasesansserifitalic"] = { first = 0x1D622, last = 0x1D63B }
-blocks["lowercasesansserifbolditalic"] = { first = 0x1D656, last = 0x1D66F }
-blocks["lowercasemonospace"] = { first = 0x1D68A, last = 0x1D6A3 }
-blocks["lowercasegreeknormal"] = { first = 0x003B1, last = 0x003CA }
-blocks["lowercasegreekbold"] = { first = 0x1D6C2, last = 0x1D6DB }
-blocks["lowercasegreekitalic"] = { first = 0x1D6FC, last = 0x1D715 }
-blocks["lowercasegreekbolditalic"] = { first = 0x1D736, last = 0x1D74F }
-blocks["lowercasegreeksansserifbold"] = { first = 0x1D770, last = 0x1D789 }
-blocks["lowercasegreeksansserifbolditalic"] = { first = 0x1D7AA, last = 0x1D7C3 }
-
-blocks["digitsnormal"] = { first = 0x00030, last = 0x00039 }
-blocks["digitsbold"] = { first = 0x1D7CE, last = 0x1D7D8 }
-blocks["digitsdoublestruck"] = { first = 0x1D7D8, last = 0x1D7E2 }
-blocks["digitssansserifnormal"] = { first = 0x1D7E2, last = 0x1D7EC }
-blocks["digitssansserifbold"] = { first = 0x1D7EC, last = 0x1D805 }
-blocks["digitsmonospace"] = { first = 0x1D7F6, last = 0x1D80F }
-
-blocks["mathematicaloperators"] = { first = 0x02200, last = 0x022FF }
-blocks["miscellaneousmathematicalsymbolsa"] = { first = 0x027C0, last = 0x027EF }
-blocks["miscellaneousmathematicalsymbolsb"] = { first = 0x02980, last = 0x029FF }
-blocks["supplementalmathematicaloperators"] = { first = 0x02A00, last = 0x02AFF }
-blocks["letterlikesymbols"] = { first = 0x02100, last = 0x0214F }
-blocks["miscellaneoustechnical"] = { first = 0x02308, last = 0x0230B }
-blocks["geometricshapes"] = { first = 0x025A0, last = 0x025FF }
-blocks["miscellaneoussymbolsandarrows"] = { first = 0x02B30, last = 0x02B4C }
-blocks["mathematicalalphanumericsymbols"] = { first = 0x00400, last = 0x1D7FF }
-
-blocks["digitslatin"] = { first = 0x00030, last = 0x00039 }
-blocks["digitsarabicindic"] = { first = 0x00660, last = 0x00669 }
-blocks["digitsextendedarabicindic"] = { first = 0x006F0, last = 0x006F9 }
-------["digitsdevanagari"] = { first = 0x00966, last = 0x0096F }
-------["digitsbengali"] = { first = 0x009E6, last = 0x009EF }
-------["digitsgurmukhi"] = { first = 0x00A66, last = 0x00A6F }
-------["digitsgujarati"] = { first = 0x00AE6, last = 0x00AEF }
-------["digitsoriya"] = { first = 0x00B66, last = 0x00B6F }
-------["digitstamil"] = { first = 0x00030, last = 0x00039 } -- no zero
-------["digitstelugu"] = { first = 0x00C66, last = 0x00C6F }
-------["digitskannada"] = { first = 0x00CE6, last = 0x00CEF }
-------["digitsmalayalam"] = { first = 0x00D66, last = 0x00D6F }
-------["digitsthai"] = { first = 0x00E50, last = 0x00E59 }
-------["digitslao"] = { first = 0x00ED0, last = 0x00ED9 }
-------["digitstibetan"] = { first = 0x00F20, last = 0x00F29 }
-------["digitsmyanmar"] = { first = 0x01040, last = 0x01049 }
-------["digitsethiopic"] = { first = 0x01369, last = 0x01371 }
-------["digitskhmer"] = { first = 0x017E0, last = 0x017E9 }
-------["digitsmongolian"] = { first = 0x01810, last = 0x01809 }
-
-- operators : 0x02200
-- symbolsa : 0x02701
-- symbolsb : 0x02901
diff --git a/tex/context/base/math-ali.mkiv b/tex/context/base/math-ali.mkiv
index 6bfde57b6..bba55ba72 100644
--- a/tex/context/base/math-ali.mkiv
+++ b/tex/context/base/math-ali.mkiv
@@ -31,6 +31,8 @@
\newtoks\c_math_align_b
\newtoks\c_math_align_c
+\def\displayopenupvalue{.25\bodyfontsize}
+
\def\math_build_eqalign
{\scratchtoks\emptytoks
\dorecurse{\mathalignmentparameter\c!m}\math_build_eqalign_step
@@ -38,19 +40,25 @@
\def\math_build_eqalign_step
{\ifnum\recurselevel>\plusone
- %\appendtoks
- % \tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint
- %\to\scratchtoks
\scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint}%
\fi
\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_a}}%
\dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
{\normalexpanded{\scratchtoks{\the\scratchtoks\the\c_math_align_b}}}}
-\def\math_math_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks\displaystyle{{}#1{}}$}
-\def\math_text_in_eqalign#1{$\tabskip\zeropoint\everycr\emptytoks#1$}
+\def\math_math_in_eqalign#1%
+ {\startforceddisplaymath
+ \tabskip\zeropoint
+ \everycr\emptytoks
+ {{}#1{}}%
+ \stopforceddisplaymath}
-\def\displayopenupvalue{.25\bodyfontsize}
+\def\math_text_in_eqalign#1%
+ {\startimath
+ \tabskip\zeropoint
+ \everycr\emptytoks
+ #1%
+ \stopimath}
\def\eqalign#1% why no halign here, probably because of displaywidth
{\emptyhbox
@@ -58,7 +66,15 @@
\vcenter
{\math_openup\displayopenupvalue % was: \openup\jot
\mathsurround\zeropoint
- \ialign{\strut\hfil$\displaystyle{\alignmark\alignmark}$\aligntab$\displaystyle{{}\alignmark\alignmark{}}$\hfil\crcr#1\crcr}}%
+ \ialign{%
+ \strut
+ \hfil
+ \startforceddisplaymath{\alignmark\alignmark}\stopforceddisplaymath
+ \aligntab
+ \startforceddisplaymath{{}\alignmark\alignmark{}}\stopforceddisplaymath
+ \hfil\crcr
+ #1\crcr}%
+ }%
\mskip\thinmuskip}
% preamble is scanned for tabskips so we need the span to prevent an error message
@@ -257,7 +273,7 @@
%
-\def\numberedeqalign
+\unexpanded\def\numberedeqalign
{\doifelse{\formulaparameter\c!location}\v!left
\math_handle_eqalign_no_l_aligned
\math_handle_eqalign_no_r_aligned}
@@ -605,16 +621,27 @@
{\iffirstargument
\setupcurrentmathmatrix[#1]%
\fi
- \emptyhbox
+ % \emptyhbox % noted at 25-05-2014: what was that one doing here? it messed up spacing
\math_matrix_align_method_analyze
\mathmatrixleft
+ % new per 13-10-2014
+ \edef\p_strut{\mathmatrixparameter\c!strut}%
+ \ifx\p_strut\v!no
+ \let\m_matrix_strut\relax
+ \else
+ \let\m_matrix_strut\strut
+ \ifx\p_strut\v!yes\else
+ \spacing\p_strut
+ \fi
+ \fi
+ %
\mathmatrixbox\bgroup
\pushmacro\math_matrix_NC
\let\endmath\relax
\def\NC{\math_matrix_NC}%
\def\MC{\math_matrix_NC\ifmmode\else\startimath\let\endmath\stopimath\fi}%
\global\let\math_matrix_NC\math_matrix_NC_indeed
- \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\crcr}%
+ \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\m_matrix_strut \crcr}%
\normalbaselines
\mathsurround\zeropoint
\everycr\emptytoks
@@ -700,7 +727,7 @@
%D
%D \typebuffer \getbuffer
%D
-%D \definemathmatrix[bmatrix][left={\left[\mskip\thinmuskip},right={\mskip\thinmuskip\right]}]
+%D \definemathmatrix[bmatrix][left={\left[\mskip\thinmuskip},right={\mskip\thinmuskip\right]},strut=1.25]
%D
%D \startbuffer
%D \placeformula \startformula[-] \startbmatrix
@@ -1024,8 +1051,7 @@
\strc_formulas_place_number
\setbox\scratchbox\math_hbox to \displaywidth\bgroup
\mathinnerstrut
- $%
- \displaystyle
+ \startforceddisplaymath
\ifcase\mathraggedstatus\or\hfill\or\hfill\fi}
\def\math_box_llapped_math_no
@@ -1047,7 +1073,7 @@
\fi}
\unexpanded\def\stopmathbox
- {$%
+ {\stopforceddisplaymath
\ifcase\mathraggedstatus\or\or\hfill\or\hfill\fi
\egroup
\setbox0\hbox{\unhcopy\scratchbox}%
diff --git a/tex/context/base/math-arr.mkiv b/tex/context/base/math-arr.mkiv
index 6824c362e..0e3a53f32 100644
--- a/tex/context/base/math-arr.mkiv
+++ b/tex/context/base/math-arr.mkiv
@@ -63,7 +63,7 @@
\setvalue{\??matharrowsettings\v!big }{\def\m_math_arrows_extra{20}}
\setvalue{\??matharrowsettings\v!normal }{}
\setvalue{\??matharrowsettings }{}
-\setvalue{\??matharrowsettings\s!unknown}{\doifnumberelse\p_math_spacing{\let\m_math_arrows_extra\p_math_spacing}\donothing}
+\setvalue{\??matharrowsettings\s!unknown}{\doifelsenumber\p_math_spacing{\let\m_math_arrows_extra\p_math_spacing}\donothing}
\def\math_arrows_construct#1#2#3#4#5% hm, looks like we do a double mathrel (a bit cleaned up .. needs checking)
{\begingroup
diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv
index 250986959..8247ac008 100644
--- a/tex/context/base/math-def.mkiv
+++ b/tex/context/base/math-def.mkiv
@@ -30,6 +30,9 @@
\definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}}
\definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}}
\definemathcommand [arctan] [nolop] {\mfunctionlabeltext{arctan}}
+\definemathcommand [acos] [nolop] {\mfunctionlabeltext{acos}}
+\definemathcommand [asin] [nolop] {\mfunctionlabeltext{asin}}
+\definemathcommand [atan] [nolop] {\mfunctionlabeltext{atan}}
\definemathcommand [arg] [nolop] {\mfunctionlabeltext{arg}}
\definemathcommand [cosh] [nolop] {\mfunctionlabeltext{cosh}}
\definemathcommand [cos] [nolop] {\mfunctionlabeltext{cos}}
@@ -64,6 +67,7 @@
\definemathcommand [sup] [limop] {\mfunctionlabeltext{sup}}
\definemathcommand [tanh] [nolop] {\mfunctionlabeltext{tanh}}
\definemathcommand [tan] [nolop] {\mfunctionlabeltext{tan}}
+\definemathcommand [diff] {\mfunctionlabeltext{diff}}
\let\normalmatharg\arg % todo: maybe automatically
diff --git a/tex/context/base/math-dir.lua b/tex/context/base/math-dir.lua
index 507a24e41..0f871beed 100644
--- a/tex/context/base/math-dir.lua
+++ b/tex/context/base/math-dir.lua
@@ -23,8 +23,19 @@ local trace_directions = false trackers.register("typesetters.directions.math
local report_directions = logs.reporter("typesetting","math directions")
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getlist = nuts.getlist
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
@@ -33,7 +44,7 @@ local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
@@ -61,9 +72,9 @@ local function processmath(head)
stop = nil
end
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local char = current.char
+ local char = getchar(current)
local cdir = chardirections[char]
if cdir == "en" or cdir == "an" then -- we could check for mathclass punctuation
if not start then
@@ -83,7 +94,7 @@ local function processmath(head)
if mirror then
local class = charclasses[char]
if class == "open" or class == "close" then
- current.char = mirror
+ setfield(current,"char",mirror)
if trace_directions then
report_directions("mirrored: %C to %C",char,mirror)
end
@@ -94,6 +105,13 @@ local function processmath(head)
end
elseif not start then
-- nothing
+if id == hlist_code or id == vlist_code then
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
+ if d then
+ done = true
+ end
+end
elseif start == stop then
start = nil
else
@@ -101,14 +119,14 @@ local function processmath(head)
-- math can pack things into hlists .. we need to make sure we don't process
-- too often: needs checking
if id == hlist_code or id == vlist_code then
- local list, d = processmath(current.list)
- current.list = list
+ local list, d = processmath(getlist(current))
+ setfield(current,"list",list)
if d then
done = true
end
end
end
- current = current.next
+ current = getnext(current)
end
if not start then
-- nothing
@@ -124,9 +142,11 @@ local enabled = false
function directions.processmath(head) -- style, penalties
if enabled then
- local a = head[a_mathbidi]
+ local h = tonut(head)
+ local a = getattr(h,a_mathbidi)
if a and a > 0 then
- return processmath(head)
+ local head, done = processmath(h)
+ return tonode(head), done
end
end
return head, false
@@ -142,4 +162,8 @@ function directions.setmath(n)
end
end
-commands.setmathdirection = directions.setmath
+interfaces.implement {
+ name = "setmathdirection",
+ actions = directions.setmath,
+ arguments = "integer"
+}
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index bd9a1d315..76dd1ad9b 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -20,7 +20,6 @@ local virtualcharacters = { }
local identifiers = fonts.hashes.identifiers
local lastmathids = fonts.hashes.lastmathids
-local tounicode16 = fonts.mappings.tounicode16
-- we need a trick (todo): if we define scriptscript, script and text in
-- that order we could use their id's .. i.e. we could always add a font
@@ -133,10 +132,8 @@ function fallbacks.apply(target,original)
else
-- something else
end
- if trace_fallbacks then
- if characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
- end
+ if trace_fallbacks and characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
end
end
end
@@ -182,22 +179,22 @@ end
-- virtualcharacters[0x208B] = 0x002B
virtualcharacters[0x207A] = function(data)
- data.replacement = 0x2212
+ data.replacement = 0x002B
return raised(data)
end
virtualcharacters[0x207B] = function(data)
- data.replacement = 0x002B
+ data.replacement = 0x2212
return raised(data)
end
virtualcharacters[0x208A] = function(data)
- data.replacement = 0x2212
+ data.replacement = 0x002B
return raised(data,true)
end
virtualcharacters[0x208B] = function(data)
- data.replacement = 0x002B
+ data.replacement = 0x2212
return raised(data,true)
end
@@ -332,11 +329,12 @@ end
-- we could move the defs from math-act here
-local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
+local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset,unicode)
local characters = target.characters
- local addprivate = fonts.helpers.addprivate
local olddata = characters[oldchr]
- if olddata and not olddata.commands then
+ -- brrr ... pagella has only next
+ if olddata and not olddata.commands and olddata.width > 0 then
+ local addprivate = fonts.helpers.addprivate
if swap then
swap = characters[swap]
height = swap.depth
@@ -351,6 +349,7 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
width = olddata.width,
height = height,
depth = depth,
+ unicode = unicode,
}
local glyphdata = newdata
local nextglyph = olddata.next
@@ -399,9 +398,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
end
end
- return glyphdata
+ return glyphdata, true
else
- return olddata
+ return olddata, false
end
end
@@ -416,7 +415,7 @@ virtualcharacters[0x203E] = function(data) -- could be FE33E instead
height = target.parameters.xheight/4
depth = height
end
- return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth)
+ return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth,nil,nil,0x203E)
end
virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
@@ -427,38 +426,40 @@ local function smashed(data,unicode,swap,private)
local original = data.original
local chardata = target.characters[unicode]
if chardata and chardata.height > target.parameters.xheight then
- return accent_to_extensible(target,private,original,unicode,0,0,swap)
+ return accent_to_extensible(target,private,original,unicode,0,0,swap,nil,unicode)
else
return original.characters[unicode]
end
end
-addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
-addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-virtualcharacters[0xFE3DF] = function(data) return data.original.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.original.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.original.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.unicode = 0x23DF return c end end
+virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.unicode = 0x23DD return c end end
+virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.unicode = 0x23B5 return c end end
-- todo: add some more .. numbers might change
-addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } )
-addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } )
+addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h", mathclass = "topaccent" } )
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
- c.top_accent = nil
+ local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height,unicode)
+ if done then
+ c.top_accent = nil -- or maybe also all the others
+ end
return c
end
@@ -466,15 +467,21 @@ virtualcharacters[0xFE302] = function(data) return smashed(data,0x0302,0xFE302)
virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303) end
-- another crazy hack .. doesn't work as we define scrscr first .. we now have smaller
--- primes so we have smaller primes for the moment, big ones will become an option
+-- primes so we have smaller primes for the moment, big ones will become an option ..
+-- these primes in fonts are a real mess .. kind of a dead end, so don't wonder about
+-- the values below
+
+-- todo: check tounicodes
local function smashed(data,unicode,optional)
local oldchar = data.characters[unicode]
if oldchar then
- local height = 1.2 * data.target.parameters.xheight
+ local xheight = data.target.parameters.xheight
+ local height = 1.2 * xheight
+ local shift = oldchar.height - height
local newchar = {
commands = {
- { "down", oldchar.height - height },
+ { "down", shift },
{ "char", unicode },
},
height = height,
@@ -486,6 +493,30 @@ local function smashed(data,unicode,optional)
end
end
+-- -- relocate all but less flexible so not used .. instead some noad hackery plus
+-- -- the above
+--
+-- local function smashed(data,unicode,optional)
+-- local oldchar = data.characters[unicode]
+-- if oldchar then
+-- local xheight = data.target.parameters.xheight
+-- local height = oldchar.height
+-- local shift = oldchar.height < 1.5*xheight and -(1.8*xheight-height) or 0
+-- local newchar = {
+-- commands = {
+-- { "down", shift },
+-- { "char", unicode },
+-- },
+-- unicode = unicode,
+-- height = height,
+-- width = oldchar.width,
+-- }
+-- return newchar
+-- elseif not optional then
+-- report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname)
+-- end
+-- end
+
addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } )
addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } )
addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } )
@@ -511,7 +542,7 @@ addextra(0xFE940, { category = "mn", description="SMALL ANNUITY SYMBOL", unicode
local function actuarian(data)
local characters = data.target.characters
local parameters = data.target.parameters
- local basechar = characters[0x0078] -- x (0x0058 X)
+ local basechar = characters[0x0078] -- x (0x0058 X) or 0x1D431
local linewidth = parameters.xheight / 10
local basewidth = basechar.width
local baseheight = basechar.height
@@ -519,7 +550,7 @@ local function actuarian(data)
-- todo: add alttext
-- compromise: lm has large hooks e.g. \actuarial{a}
width = basewidth + 4 * linewidth,
- tounicode = tounicode16(0x20E7),
+ unicode = 0x20E7,
commands = {
{ "right", 2 * linewidth },
{ "down", - baseheight - 3 * linewidth },
diff --git a/tex/context/base/math-fen.mkiv b/tex/context/base/math-fen.mkiv
index 94d93e4af..9570eac83 100644
--- a/tex/context/base/math-fen.mkiv
+++ b/tex/context/base/math-fen.mkiv
@@ -131,6 +131,14 @@
\definemathfence [angle] [\c!left="27E8,\c!right="27E9]
\definemathfence [doubleangle] [\c!left="27EA,\c!right="27EB]
\definemathfence [solidus] [\c!left="2044,\c!right="2044]
+\definemathfence [ceil] [\c!left="2308,\c!right="2309]
+\definemathfence [floor] [\c!left="230A,\c!right="230B]
+\definemathfence [moustache] [\c!left="23B0,\c!right="23B1]
+\definemathfence [uppercorner] [\c!left="231C,\c!right="231D]
+\definemathfence [lowercorner] [\c!left="231E,\c!right="231F]
+\definemathfence [group] [\c!left="27EE,\c!right="27EF]
+\definemathfence [openbracket] [\c!left="27E6,\c!right="27E7]
+
\definemathfence [nothing]
\definemathfence [mirrored] % \v!mirrored
@@ -144,36 +152,54 @@
\definemathfence [mirroredangle] [mirrored] [\c!right="27E8,\c!left="27E9]
\definemathfence [mirroreddoubleangle] [mirrored] [\c!right="27EA,\c!left="27EB]
\definemathfence [mirroredsolidus] [mirrored] [\c!right="2044,\c!left="2044]
-\definemathfence [mirrorednothing] [mirorred]
+\definemathfence [mirroredceil] [mirrored] [\c!right="2308,\c!left="2309]
+\definemathfence [mirroredfloor] [mirrored] [\c!right="230A,\c!left="230B]
+\definemathfence [mirroredmoustache] [mirrored] [\c!right="23B0,\c!left="23B1]
+\definemathfence [mirroreduppercorner] [mirrored] [\c!right="231C,\c!left="231D]
+\definemathfence [mirroredlowercorner] [mirrored] [\c!right="231E,\c!left="231F]
+\definemathfence [mirroredgroup] [mirrored] [\c!right="27EE,\c!left="27EF]
+\definemathfence [mirroredopenbracket] [mirrored] [\c!right="27E6,\c!left="27E7]
+
+\definemathfence [mirrorednothing] [mirrored]
%D A bonus:
-\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop{parenthesis}}
-\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop{bracket}}
-\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop{brace}}
-\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop{angle}}
-\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop{doubleangle}}
-\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop{bar}}
-\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop{doublebar}}
-\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop{triplebar}}
-\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop{solidus}}
-\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop{nothing}}
-
-\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop{mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}}
-\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop{mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}}
-\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop{mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}}
-\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop{mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}}
-\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop{mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}}
-\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop{mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}}
-\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop{mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}}
-\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop{mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}}
-\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop{mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}}
-\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop{mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}}
-
-%D And another one:
+\unexpanded\def\Lparent {\math_fenced_fenced_start{parenthesis}} \unexpanded\def\Rparent {\math_fenced_fenced_stop {parenthesis}}
+\unexpanded\def\Lbracket {\math_fenced_fenced_start{bracket}} \unexpanded\def\Rbracket {\math_fenced_fenced_stop {bracket}}
+\unexpanded\def\Lbrace {\math_fenced_fenced_start{brace}} \unexpanded\def\Rbrace {\math_fenced_fenced_stop {brace}}
+\unexpanded\def\Langle {\math_fenced_fenced_start{angle}} \unexpanded\def\Rangle {\math_fenced_fenced_stop {angle}}
+\unexpanded\def\Ldoubleangle {\math_fenced_fenced_start{doubleangle}} \unexpanded\def\Rdoubleangle {\math_fenced_fenced_stop {doubleangle}}
+\unexpanded\def\Lbar {\math_fenced_fenced_start{bar}} \unexpanded\def\Rbar {\math_fenced_fenced_stop {bar}}
+\unexpanded\def\Ldoublebar {\math_fenced_fenced_start{doublebar}} \unexpanded\def\Rdoublebar {\math_fenced_fenced_stop {doublebar}}
+\unexpanded\def\Ltriplebar {\math_fenced_fenced_start{triplebar}} \unexpanded\def\Rtriplebar {\math_fenced_fenced_stop {triplebar}}
+\unexpanded\def\Lsolidus {\math_fenced_fenced_start{solidus}} \unexpanded\def\Rsolidus {\math_fenced_fenced_stop {solidus}}
+\unexpanded\def\Lfloor {\math_fenced_fenced_start{floor}} \unexpanded\def\Rfloor {\math_fenced_fenced_stop {floor}}
+\unexpanded\def\Lceil {\math_fenced_fenced_start{ceil}} \unexpanded\def\Rceil {\math_fenced_fenced_stop {ceil}}
+\unexpanded\def\Lmoustache {\math_fenced_fenced_start{moustache}} \unexpanded\def\Rmoustache {\math_fenced_fenced_stop {moustache}}
+\unexpanded\def\Luppercorner {\math_fenced_fenced_start{uppercorner}} \unexpanded\def\Ruppercorner {\math_fenced_fenced_stop {uppercorner}}
+\unexpanded\def\Llowercorner {\math_fenced_fenced_start{lowercorner}} \unexpanded\def\Rlowercorner {\math_fenced_fenced_stop {lowercorner}}
+\unexpanded\def\Lgroup {\math_fenced_fenced_start{group}} \unexpanded\def\Rgroup {\math_fenced_fenced_stop {group}}
+\unexpanded\def\Lopenbracket {\math_fenced_fenced_start{openbracket}} \unexpanded\def\Ropenbracket {\math_fenced_fenced_stop {openbracket}}
+\unexpanded\def\Lnothing {\math_fenced_fenced_start{nothing}} \unexpanded\def\Rnothing {\math_fenced_fenced_stop {nothing}}
+
+\unexpanded\def\Lparentmirrored {\math_fenced_fenced_stop {mirroredparenthesis}} \unexpanded\def\Rparentmirrored {\math_fenced_fenced_start{mirroredparenthesis}}
+\unexpanded\def\Lbracketmirrored {\math_fenced_fenced_stop {mirroredbracket}} \unexpanded\def\Rbracketmirrored {\math_fenced_fenced_start{mirroredbracket}}
+\unexpanded\def\Lbracemirrored {\math_fenced_fenced_stop {mirroredbrace}} \unexpanded\def\Rbracemirrored {\math_fenced_fenced_start{mirroredbrace}}
+\unexpanded\def\Langlemirrored {\math_fenced_fenced_stop {mirroredangle}} \unexpanded\def\Ranglemirrored {\math_fenced_fenced_start{mirroredangle}}
+\unexpanded\def\Ldoubleanglemirrored {\math_fenced_fenced_stop {mirroreddoubleangle}} \unexpanded\def\Rdoubleanglemirrored {\math_fenced_fenced_start{mirroreddoubleangle}}
+\unexpanded\def\Lbarmirrored {\math_fenced_fenced_stop {mirroredbar}} \unexpanded\def\Rbarmirrored {\math_fenced_fenced_start{mirroredbar}}
+\unexpanded\def\Ldoublebarmirrored {\math_fenced_fenced_stop {mirroreddoublebar}} \unexpanded\def\Rdoublebarmirrored {\math_fenced_fenced_start{mirroreddoublebar}}
+\unexpanded\def\Ltriplebarmirrored {\math_fenced_fenced_stop {mirroredtriplebar}} \unexpanded\def\Rtriplebarmirrored {\math_fenced_fenced_start{mirroredtriplebar}}
+\unexpanded\def\Lsolidusmirrored {\math_fenced_fenced_stop {mirroredsolidus}} \unexpanded\def\Rsolidusmirrored {\math_fenced_fenced_start{mirroredsolidus}}
+\unexpanded\def\Lfloormirrored {\math_fenced_fenced_stop {mirroredfloor}} \unexpanded\def\Rfloormirrored {\math_fenced_fenced_start{mirroredfloor}}
+\unexpanded\def\Lceilmirrored {\math_fenced_fenced_stop {mirroredceil}} \unexpanded\def\Rceilmirrored {\math_fenced_fenced_start{mirroredceil}}
+\unexpanded\def\Lmoustachemirrored {\math_fenced_fenced_stop {mirroredmoustache}} \unexpanded\def\Rmoustachemirrored {\math_fenced_fenced_start{mirroredmoustache}}
+\unexpanded\def\Luppercornermirrored {\math_fenced_fenced_stop {mirroreduppercorner}} \unexpanded\def\Ruppercornermirrored {\math_fenced_fenced_start{mirroreduppercorner}}
+\unexpanded\def\Llowercornermirrored {\math_fenced_fenced_stop {mirroredlowercorner}} \unexpanded\def\Rlowercornermirrored {\math_fenced_fenced_start{mirroredlowercorner}}
+\unexpanded\def\Lgroupmirrored {\math_fenced_fenced_stop {mirroredgroup}} \unexpanded\def\Rgroupmirrored {\math_fenced_fenced_start{mirroredgroup}}
+\unexpanded\def\Lopenbracketmirrored {\math_fenced_fenced_stop {mirroredopenbracket}} \unexpanded\def\Ropenbracketmirrored {\math_fenced_fenced_start{mirroredopenbracket}}
+\unexpanded\def\Lnothingmirrored {\math_fenced_fenced_stop {mirrorednothing}} \unexpanded\def\Rnothingmirrored {\math_fenced_fenced_start{mirrorednothing}}
-% \setupmathfences[color=darkgreen]
-%
% \startformula
% \left{ \frac{1}{a} \right}
% \left[ \frac{1}{b} \right]
@@ -193,6 +219,8 @@
\newconditional\c_math_fenced_done
\newconditional\c_math_fenced_unknown \settrue\c_math_fenced_unknown
+% maybe use \detokenize ...
+
\def\math_left
{\settrue\c_math_fenced_done
\edef\m_math_left{\meaning\nexttoken}%
@@ -208,6 +236,21 @@
\edef\m_math_middle{\meaning\nexttoken}%
\csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname}
+\unexpanded\def\lfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_left{\meaning#1}%
+ \csname\??mathleft\ifcsname\??mathleft\m_math_left\endcsname\m_math_left\else\s!unknown\fi\endcsname}
+
+\unexpanded\def\rfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_right{\meaning#1}%
+ \csname\??mathright\ifcsname\??mathright\m_math_right\endcsname\m_math_right\else\s!unknown\fi\endcsname}
+
+\unexpanded\def\mfence#1%
+ {\settrue\c_math_fenced_done
+ \edef\m_math_middle{\meaning#1}%
+ \csname\??mathmiddle\ifcsname\??mathmiddle\m_math_middle\endcsname\m_math_middle\else\s!unknown\fi\endcsname}
+
\setvalue{\??mathleft \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalleft \nexttoken\fi}
\setvalue{\??mathright \s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalright \nexttoken\fi}
\setvalue{\??mathmiddle\s!unknown}{\setfalse\c_math_fenced_done\ifconditional\c_math_fenced_unknown\normalmiddle\nexttoken\fi}
@@ -216,67 +259,79 @@
{\expandafter\let\csname\??mathleft \meaning#1\endcsname#2%
\expandafter\let\csname\??mathright\meaning#3\endcsname#4}
-\expandafter\let\csname\??mathleft \meaning [\endcsname\Lbracket
-\expandafter\let\csname\??mathleft \meaning (\endcsname\Lparent
-\expandafter\let\csname\??mathleft \meaning <\endcsname\Langle
-\expandafter\let\csname\??mathleft \meaning ⟨\endcsname\Langle
-\expandafter\let\csname\??mathleft \meaning ⟪\endcsname\Ldoubleangle
-\expandafter\let\csname\??mathleft \meaning {\endcsname\Lbrace
-\expandafter\let\csname\??mathleft \meaning |\endcsname\Lbar
-\expandafter\let\csname\??mathleft \meaning ‖\endcsname\Ldoublebar
-\expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Ltriplebar
-\expandafter\let\csname\??mathleft \meaning /\endcsname\Lsolidus
-\expandafter\let\csname\??mathleft \meaning .\endcsname\Lnothing
-
-\expandafter\let\csname\??mathright\meaning ]\endcsname\Rbracket
-\expandafter\let\csname\??mathright\meaning )\endcsname\Rparent
-\expandafter\let\csname\??mathright\meaning >\endcsname\Rangle
-\expandafter\let\csname\??mathright\meaning ⟩\endcsname\Rangle
-\expandafter\let\csname\??mathright\meaning ⟫\endcsname\Rdoubleangle
-\expandafter\let\csname\??mathright\meaning }\endcsname\Rbrace
-\expandafter\let\csname\??mathright\meaning |\endcsname\Rbar
-\expandafter\let\csname\??mathright\meaning ⦀\endcsname\Rtriplebar
-\expandafter\let\csname\??mathright\meaning /\endcsname\Rsolidus
-\expandafter\let\csname\??mathright\meaning .\endcsname\Rnothing
-
-\expandafter\let\csname\??mathright\meaning [\endcsname\Lbracketmirrored
-\expandafter\let\csname\??mathright\meaning (\endcsname\Lparentmirrored
-\expandafter\let\csname\??mathright\meaning <\endcsname\Langlemirrored
-\expandafter\let\csname\??mathright\meaning ⟨\endcsname\Langlemirrored
-\expandafter\let\csname\??mathright\meaning ⟪\endcsname\Ldoubleanglemirrored
-\expandafter\let\csname\??mathright\meaning {\endcsname\Lbracemirrored
-%expandafter\let\csname\??mathright\meaning |\endcsname\Lbarmirrored
-%expandafter\let\csname\??mathright\meaning ‖\endcsname\Ldoublebarmirrored
-%expandafter\let\csname\??mathright\meaning ⦀\endcsname\Ltriplebarmirrored
-\expandafter\let\csname\??mathright\meaning /\endcsname\Lsolidusmirrored
-\expandafter\let\csname\??mathright\meaning .\endcsname\Lnothingmirrored
-
-\expandafter\let\csname\??mathleft \meaning ]\endcsname\Rbracketmirrored
-\expandafter\let\csname\??mathleft \meaning )\endcsname\Rparentmirrored
-\expandafter\let\csname\??mathleft \meaning >\endcsname\Ranglemirrored
-\expandafter\let\csname\??mathleft \meaning ⟩\endcsname\Ranglemirrored
-\expandafter\let\csname\??mathleft \meaning ⟫\endcsname\Rdoubleanglemirrored
-\expandafter\let\csname\??mathleft \meaning }\endcsname\Rbracemirrored
-%expandafter\let\csname\??mathleft \meaning |\endcsname\Rbarmirrored
-%expandafter\let\csname\??mathleft \meaning ‖\endcsname\Rdoublebarmirrored
-%expandafter\let\csname\??mathleft \meaning ⦀\endcsname\Rtriplebarmirrored
-\expandafter\let\csname\??mathleft \meaning /\endcsname\Rsolidusmirrored
-\expandafter\let\csname\??mathleft \meaning .\endcsname\Rnothingmirrored
+\normalexpanded{\installmathfencepair {|\detokenize {|}} \Ldoublebar {|\detokenize {|}} \Rdoublebar}
+\normalexpanded{\installmathfencepair {|\detokenize{||}} \Ltriplebar {|\detokenize{||}} \Rtriplebar}
+
+\installmathfencepair \bgroup \Lbrace \egroup \Rbrace
+\installmathfencepair \egroup \Rbracemirrored \bgroup \Lbracemirrored
+
+\installmathfencepair . \Lnothing . \Rnothing
+\installmathfencepair . \Rnothingmirrored . \Lnothingmirrored
+
+\installmathfencepair [ \Lbracket ] \Rbracket
+\installmathfencepair ] \Rbracketmirrored [ \Lbracketmirrored
+
+\installmathfencepair ( \Lparent ) \Rparent
+\installmathfencepair ) \Rparentmirrored ( \Lparentmirrored
+
+\installmathfencepair < \Langle > \Rangle
+\installmathfencepair > \Ranglemirrored < \Langlemirrored
+
+\installmathfencepair / \Lsolidus / \Rsolidus
+%installmathfencepair / \Rsolidusmirrored / \Lsolidusmirrored
+
+\installmathfencepair | \Lbar | \Rbar
+%installmathfencepair | \Rbarmirrored | \Lbarmirrored
+
+\installmathfencepair ⌊ \Lfloor ⌋ \Rfloor
+\installmathfencepair ⌋ \Rfloormirrored ⌊ \Lfloormirrored
+\installmathfencepair ⌈ \Lceil ⌉ \Rceil
+\installmathfencepair ⌉ \Rceilmirrored ⌈ \Lceilmirrored
+
+\installmathfencepair ⟨ \Langle ⟩ \Rangle
+\installmathfencepair ⟩ \Ranglemirrored ⟨ \Langlemirrored
+
+\installmathfencepair ⟪ \Ldoubleangle ⟫ \Rdoubleangle
+\installmathfencepair ⟫ \Rdoubleanglemirrored ⟪ \Ldoubleanglemirrored
+
+\installmathfencepair ‖ \Ldoublebar ‖ \Rdoublebar
+%installmathfencepair ‖ \Rdoublebarmirrored ‖ \Ldoublebarmirrored
+
+\installmathfencepair ⦀ \Ltriplebar ⦀ \Rtriplebar
+%installmathfencepair ⦀ \Rtriplebarmirrored ⦀ \Ltriplebarmirrored
+
+% \installmathfencepair { \Lbrace } \Rbrace
+% \installmathfencepair } \Rbracemirrored { \Lbracemirrored
+
+\appendtoks
+ \ignorediscretionaries % so $\mtext{a|b}$ works, this is ok because it's an \hbox
+\to \everymathematics
% todo paren parent
\let\lbrack\lbracket
\let\rbrack\rbracket
-\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace
-\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket
-\installmathfencepair \lparen \Lparen \rparen \Rparen
-\installmathfencepair \lparent \Lparent \rparent \Rparent
-\installmathfencepair \langle \Langle \rangle \Rangle
-%installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle
-%installmathfencepair \lbar \Lbar \rbar \Rbar
-\installmathfencepair \vert \Lbar \vert \Rbar
-\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus
+\installmathfencepair \lbrace \Lbrace \rbrace \Rbrace
+\installmathfencepair \lbracket \Lbracket \rbracket \Rbracket
+\installmathfencepair \lparen \Lparen \rparen \Rparen
+\installmathfencepair \lparent \Lparent \rparent \Rparent
+\installmathfencepair \langle \Langle \rangle \Rangle
+\installmathfencepair \lrangle \Ldoubleangle \rrangle \Rdoubleangle
+\installmathfencepair \lbar \Lbar \rbar \Rbar
+\installmathfencepair \lVert \Ldoublebar \rVert \Rdoublebar
+\installmathfencepair \vert \Lbar \vert \Rbar
+\installmathfencepair \solidus \Lsolidus \solidus \Rsolidus
+\installmathfencepair \lfloor \Lfloor \rfloor \Rfloor
+\installmathfencepair \lceil \Lceil \rceil \Rceil
+
+\installmathfencepair \ulcorner \Luppercorner \ulcorner \Ruppercorner
+\installmathfencepair \llcorner \Llowercorner \llcorner \Rlowercorner
+\installmathfencepair \lmoustache \Lmoustache \lmoustache \Rmoustache
+\installmathfencepair \llbracket \Lopenbracket \llbracket \Ropenbracket
+\installmathfencepair \lgroup \Lgroup \lgroup \Rgroup
+
+% \setupmathfences[color=darkgreen]
\unexpanded\def\{{\mathortext\lbrace \letterleftbrace } % or maybe a chardef
\unexpanded\def\}{\mathortext\rbrace \letterrightbrace } % or maybe a chardef
diff --git a/tex/context/base/math-frc.lua b/tex/context/base/math-frc.lua
index 4f531a530..639edc94b 100644
--- a/tex/context/base/math-frc.lua
+++ b/tex/context/base/math-frc.lua
@@ -25,13 +25,13 @@ table.setmetatableindex(resolved, function(t,k)
return v
end)
-local normalatop = context.normalatop
-local normalover = context.normalover
+local ctx_normalatop = context.normalatop
+local ctx_normalover = context.normalover
-function commands.math_frac(how,left,right,width)
- if how == v_no then
+local function mathfraction(how,left,right,width) -- of course we could use the scanners directly here which
+ if how == v_no then -- is faster but also less abstract ... maybe some day
if left == 0x002E and right == 0x002E then
- normalatop()
+ ctx_normalatop()
else
context("\\atopwithdelims%s%s",resolved[left],resolved[right])
end
@@ -43,9 +43,15 @@ function commands.math_frac(how,left,right,width)
end
else -- v_auto
if left == 0x002E and right == 0x002E then
- normalover()
+ ctx_normalover()
else
context("\\overwithdelims%s%s",resolved[left],resolved[right])
end
end
end
+
+interfaces.implement {
+ name = "mathfraction",
+ actions = mathfraction,
+ arguments = { "string", "number", "number", "dimen" }
+}
diff --git a/tex/context/base/math-frc.mkiv b/tex/context/base/math-frc.mkiv
index 65fa30942..cbe342b66 100644
--- a/tex/context/base/math-frc.mkiv
+++ b/tex/context/base/math-frc.mkiv
@@ -15,7 +15,14 @@
\unprotect
-%D todo: struts ... depends on demand
+%D I need to check it all again as there was a bug in luatex with dimensions that could
+%D resulted in side effects that made me mess with spacing.
+
+\unexpanded\def\topstrut{\vrule\s!width\zeropoint\s!height\strutht\s!depth\zeropoint\relax}
+\unexpanded\def\botstrut{\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\strutdp\relax}
+
+\unexpanded\def\mathtopstrut{\setbox\scratchbox\mathstylehbox{(}\vrule\s!width\zeropoint\s!height\ht\scratchbox\s!depth\zeropoint\relax}
+\unexpanded\def\mathbotstrut{\setbox\scratchbox\mathstylehbox{(}\vrule\s!width\zeropoint\s!height\zeropoint\s!depth\dp\scratchbox\relax}
%D This module is reimplemented in \MKIV\ style.
@@ -106,19 +113,97 @@
\c!rulethickness=.25\exheight,
\c!left=0x2E,
\c!right=0x2E,
+ \c!strut=\v!yes,
+ \c!topdistance=,
+ \c!bottomdistance=,
\c!rule=\v!auto]
\appendtoks
\setuevalue{\currentmathfraction}{\math_frac{\currentmathfraction}}%
\to \everydefinemathfraction
+% Sometimes users want control over the distances:
+
+\let\math_fraction_set_distance\relax
+
+\appendtoks
+ \math_fraction_set_distance
+\to \everymathematics
+
+% why only displaystyle .. a bit weak
+
+\unexpanded\def\math_fraction_set_distance_top
+ {\Umathfractionnumup \displaystyle\m_math_fraction_distance_top
+ \relax}
+
+\unexpanded\def\math_fraction_set_distance_bot
+ {\Umathfractiondenomdown\displaystyle\m_math_fraction_distance_bot
+ \relax}
+
+\unexpanded\def\math_fraction_set_distance_all
+ {\Umathfractionnumup \displaystyle\m_math_fraction_distance_top
+ \Umathfractiondenomdown\displaystyle\m_math_fraction_distance_bot
+ \relax}
+
+\appendtoks
+ \ifx\currentmathfraction\empty
+ \edef\m_math_fraction_distance_top{\mathfractionparameter\c!topdistance}%
+ \edef\m_math_fraction_distance_bot{\mathfractionparameter\c!bottomdistance}%
+ \ifx\m_math_fraction_distance_top\empty
+ \ifx\m_math_fraction_distance_bot\empty
+ \let\math_fraction_set_distance\relax
+ \else
+ \let\math_fraction_set_distance\math_fraction_set_distance_bot
+ \fi
+ \else
+ \ifx\m_math_fraction_distance_bot\empty
+ \let\math_fraction_set_distance\math_fraction_set_distance_top
+ \else
+ \let\math_fraction_set_distance\math_fraction_set_distance_all
+ \fi
+ \fi
+ \fi
+\to \everysetupmathfraction
+
+% So far for control.
+
+\installcorenamespace{mathfractionstrut}
+
+\setvalue{\??mathfractionstrut\v!yes}%
+ {\let\m_fractions_strut_top\mathstrut
+ \let\m_fractions_strut_bot\mathstrut}
+
+\setvalue{\??mathfractionstrut\v!math}%
+ {\let\m_fractions_strut_top\mathstrut
+ \let\m_fractions_strut_bot\mathstrut}
+
+\setvalue{\??mathfractionstrut\v!no}%
+ {\let\m_fractions_strut_top\relax
+ \let\m_fractions_strut_bot\relax}
+
+\setvalue{\??mathfractionstrut\v!tight}%
+ {\let\m_fractions_strut_top\mathbotstrut % indeed swapped name
+ \let\m_fractions_strut_bot\mathtopstrut} % indeed swapped name
+
+\let\m_fractions_strut_top\relax
+\let\m_fractions_strut_bot\relax
+
\newdimen\d_math_fraction_margin
\unexpanded\def\math_frac#1%
{\begingroup
\edef\currentmathfraction{#1}%
+ %
\d_math_fraction_margin\mathfractionparameter\c!margin
+ %
\edef\p_math_fractions_color{\mathfractionparameter\c!color}%
+ %
+ \edef\p_math_fractions_strut{\mathfractionparameter\c!strut}%
+ \csname
+ \??mathfractionstrut
+ \ifcsname\??mathfractionstrut\p_math_fractions_strut\endcsname\p_math_fractions_strut\else\v!no\fi
+ \endcsname
+ %
\ifx\p_math_fractions_color\empty
\expandafter\math_frac_normal
\else
@@ -136,18 +221,17 @@
% we use utfchar anyway so we can as well do all at the lua end
\def\math_frac_command
- {\ctxcommand{math_frac(%
- "\mathfractionparameter\c!rule",%
- \number\mathfractionparameter\c!left,%
- \number\mathfractionparameter\c!right,%
- \number\dimexpr\mathfractionparameter\c!rulethickness%
- )}}
-
-% Having a \withmarginornot{#1}{#2} makes not much sense nor do
-% 4 tests or 4 redundant kerns (longer node lists plus possible
-% interference). A split in normal and margin also makes testing
-% easier. When left and right margins are needed we might merge
-% the variants again. After all, these are not real installers.
+ {\clf_mathfraction
+ {\mathfractionparameter\c!rule}%
+ \mathfractionparameter\c!left\space
+ \mathfractionparameter\c!right\space
+ \dimexpr\mathfractionparameter\c!rulethickness\relax
+ \relax}
+
+% Having a \withmarginornot{#1}{#2} makes not much sense nor do 4 tests or 4 redundant
+% kerns (longer node lists plus possible interference). A split in normal and margin
+% also makes testing easier. When left and right margins are needed we might merge the
+% variants again. After all, these are not real installers.
\setvalue{\??mathfractionalternative\v!inner}%
{\ifcase\d_math_fraction_margin
@@ -156,24 +240,6 @@
\expandafter\math_fraction_inner_margin
\fi}
-\def\math_fraction_inner_normal#1#2%
- {\Ustack{%
- {\usemathstyleparameter\mathfractionparameter{#1}}% we should store this one
- \math_frac_command
- {\usemathstyleparameter\mathfractionparameter{#2}}% and reuse it here
- }\endgroup}
-
-\def\math_fraction_inner_margin#1#2%
- {\Ustack{%
- {\kern\d_math_fraction_margin
- \usemathstyleparameter\mathfractionparameter{#1}% we should store this one
- \kern\d_math_fraction_margin}%
- \math_frac_command
- {\kern\d_math_fraction_margin
- \usemathstyleparameter\mathfractionparameter{#2}% and reuse it here
- \kern\d_math_fraction_margin}%
- }\endgroup}
-
\setvalue{\??mathfractionalternative\v!outer}%
{\ifcase\d_math_fraction_margin
\expandafter\math_fraction_outer_normal
@@ -181,21 +247,84 @@
\expandafter\math_fraction_outer_margin
\fi}
+\setvalue{\??mathfractionalternative\v!both}%
+ {\ifcase\d_math_fraction_margin
+ \expandafter\math_fraction_both_normal
+ \else
+ \expandafter\math_fraction_both_margin
+ \fi}
+
+% todo: store first state and reuse second time
+
+\def\math_fraction_inner_normal#1#2%
+ {\Ustack{%
+ {%
+ {\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_top#1}}%
+ \math_frac_command
+ {\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}}%
+ }%
+ }\endgroup}
+
\def\math_fraction_outer_normal#1#2%
{\Ustack{%
\usemathstyleparameter\mathfractionparameter
- {{#1}\math_frac_command{#2}}%
+ {%
+ {\m_fractions_strut_top#1}%
+ \math_frac_command
+ {\m_fractions_strut_bot#2}%
+ }%
+ }\endgroup}
+
+\def\math_fraction_both_normal#1#2%
+ {\Ustack{%
+ \usemathstyleparameter\mathfractionparameter
+ {%
+ {\usemathstyleparameter\mathfractionparameter\m_fractions_strut_top#1}%
+ \math_frac_command
+ {\usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2}%
+ }%
+ }\endgroup}
+
+\def\math_fraction_inner_margin#1#2%
+ {\Ustack{%
+ {%
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{\m_fractions_strut_top#1}%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}%
+ \kern\d_math_fraction_margin}%
+ }%
}\endgroup}
\def\math_fraction_outer_margin#1#2%
{\Ustack{%
\usemathstyleparameter\mathfractionparameter
- {{\kern\d_math_fraction_margin#1\kern\d_math_fraction_margin}%
- \math_frac_command
- {\kern\d_math_fraction_margin#2\kern\d_math_fraction_margin}}%
+ {%
+ {\kern\d_math_fraction_margin
+ \m_fractions_strut_top#1%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \m_fractions_strut_bot#2%
+ \kern\d_math_fraction_margin}%
+ }%
}\endgroup}
-\definemathfraction[frac][\c!mathstyle=]
+\def\math_fraction_both_margin#1#2%
+ {\Ustack{%
+ \usemathstyleparameter\mathfractionparameter
+ {%
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter\m_fractions_strut_top#1%
+ \kern\d_math_fraction_margin}%
+ \math_frac_command
+ {\kern\d_math_fraction_margin
+ \usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2%
+ \kern\d_math_fraction_margin}%
+ }%
+ }\endgroup}
\unexpanded\def\xfrac {\begingroup\let\xfrac\xxfrac\math_frac_alternative\scriptstyle}
\unexpanded\def\xxfrac{\begingroup \math_frac_alternative\scriptscriptstyle}
@@ -206,6 +335,40 @@
{\begingroup
\math_frac_alternative\scriptscriptstyle{#1}{\raise.25\exheight\hbox{$\scriptscriptstyle#2$}}}
+%D Spacing:
+
+\unexpanded\def\nomathfractiongaps {\normalexpanded{\math_no_fraction_gaps \triggermathstyle\mathstyle}} % maybe collect settings
+\unexpanded\def\overlaymathfractiongaps{\normalexpanded{\math_overlay_fraction_gaps\triggermathstyle\mathstyle}} % maybe collect settings
+
+\unexpanded\def\math_no_fraction_gaps#1%
+ {\Umathfractionnumup #1\zeropoint
+ \Umathfractiondenomdown#1\zeropoint}
+
+\unexpanded\def\math_overlay_fraction_gaps#1%
+ {\Umathfractionnumup #1\zeropoint
+ \Umathfractionnumvgap #1\zeropoint
+ %Umathfractionrule #1\zeropoint
+ \Umathfractiondenomvgap#1\zeropoint
+ \Umathfractiondenomdown#1\zeropoint}
+
+\installcorenamespace{mathfractiondistance}
+
+\letvalue{\??mathfractiondistance\v!none }\nomathfractiongaps
+\letvalue{\??mathfractiondistance\v!no }\nomathfractiongaps
+\letvalue{\??mathfractiondistance\v!overlay}\overlaymathfractiongaps
+
+\setupmathfractions
+ [\c!distance=\v!none]
+
+\appendtoks
+ \edef\p_distance{\rootmathfractionparameter\c!distance}%
+ \ifx\p_distance\empty\else
+ \ifcsname\??mathfractiondistance\p_distance\endcsname
+ \csname\??mathfractiondistance\p_distance\endcsname
+ \fi
+ \fi
+\to \everymathematics
+
%D \macros
%D {dfrac, tfrac, frac, dbinom, tbinom, binom}
%D
@@ -232,9 +395,36 @@
% \unexpanded\def\dfrac #1#2{{\displaystyle {{#1}\normalover {#2}}}}
% \unexpanded\def\tfrac #1#2{{\textstyle {{#1}\normalover {#2}}}}
-\definemathfraction[dfrac][\c!alternative=\v!outer,\c!mathstyle=\s!display]
-\definemathfraction[tfrac][\c!alternative=\v!outer,\c!mathstyle=\s!text]
-\definemathfraction[sfrac][\c!alternative=\v!outer,\c!mathstyle=\s!script]
+\definemathfraction[i:frac] [\c!alternative=\v!inner,\c!mathstyle=] % was script and then small but nothing needed
+\definemathfraction[i:tfrac][\c!alternative=\v!inner,\c!mathstyle=\s!text] % was script (before luatex fix)
+\definemathfraction[i:sfrac][\c!alternative=\v!inner,\c!mathstyle=\s!scriptscript]
+\definemathfraction[i:dfrac][\c!alternative=\v!inner,\c!mathstyle=\s!display]
+
+\definemathfraction[d:frac] [\c!alternative=\v!inner,\c!mathstyle=\s!cramped] % was cramped,text
+\definemathfraction[d:tfrac][\c!alternative=\v!both ,\c!mathstyle={\s!cramped,\s!text}] % was cramped,script (before luatex fix)
+\definemathfraction[d:sfrac][\c!alternative=\v!both ,\c!mathstyle={\s!cramped,\s!scriptscript}]
+\definemathfraction[d:dfrac][\c!alternative=\v!inner,\c!mathstyle=\s!display]
+
+%D \unexpanded\def\ShowMathFractions#1#2%
+%D {\dontleavehmode
+%D \begingroup
+%D \showmathstruts
+%D \mathematics{x+\tfrac{#1}{#2}+1+\frac{#1}{#2}+2+\sfrac{#1}{#2}+g}%
+%D \endgroup}
+%D
+%D The default \type {tfrac}, \type {frac} and \type \sfrac} look like this:
+%D
+%D \blank
+%D \ShowMathFractions{a}{a}\par
+%D \ShowMathFractions{1}{x}\par
+%D \ShowMathFractions{a}{b}\par
+%D \ShowMathFractions{1}{b}\par
+%D \blank
+
+\unexpanded\def\frac {\csname\inlineordisplaymath id:frac\endcsname}
+\unexpanded\def\tfrac{\csname\inlineordisplaymath id:tfrac\endcsname}
+\unexpanded\def\sfrac{\csname\inlineordisplaymath id:sfrac\endcsname}
+\unexpanded\def\dfrac{\csname\inlineordisplaymath id:dfrac\endcsname}
% \definemathfraction[ddfrac][\c!mathstyle=\s!display]
% \definemathfraction[ttfrac][\c!mathstyle=\s!text]
@@ -274,7 +464,7 @@
%D \getbuffer
\unexpanded\def\cfrac
- {\doifnextoptionalelse\math_cfrac_yes\math_cfrac_nop}
+ {\doifelsenextoptionalcs\math_cfrac_yes\math_cfrac_nop}
\def\math_cfrac_nop {\math_cfrac_indeed[cc]}
\def\math_cfrac_yes[#1]{\math_cfrac_indeed[#1cc]}
@@ -319,6 +509,11 @@
\unexpanded\def\splitdfrac#1#2%
{{\displaystyle{{ #1\quad\hfill}\normalabove\zeropoint{ \hfill\quad\mathstrut#2}}}}
+%D For testing:
+
+% \unexpanded\def\ShowMathFractions#1#2%
+% {\mathematics{x+\tfrac{#1}{#2}+1+\frac{#1}{#2}+2+\sfrac{#1}{#2}+g}}
+
\protect \endinput
% I have no clue what \mthfrac and \mthsqrt are supposed to do but
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 6be06e634..e6a35c39e 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -18,30 +18,33 @@ if not modules then modules = { } end modules ['math-ini'] = {
local formatters, find = string.formatters, string.find
local utfchar, utfbyte, utflength = utf.char, utf.byte, utf.length
local floor = math.floor
+local toboolean = toboolean
-local context = context
-local commands = commands
+local context = context
+local commands = commands
+local implement = interfaces.implement
-local contextsprint = context.sprint
-local contextfprint = context.fprint -- a bit inefficient
+local context_sprint = context.sprint
+----- context_fprint = context.fprint -- a bit inefficient
+local ctx_doifelsesomething = commands.doifelsesomething
-local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
-local report_math = logs.reporter("mathematics","initializing")
+local report_math = logs.reporter("mathematics","initializing")
-mathematics = mathematics or { }
-local mathematics = mathematics
+mathematics = mathematics or { }
+local mathematics = mathematics
-mathematics.extrabase = 0xFE000 -- here we push some virtuals
-mathematics.privatebase = 0xFF000 -- here we push the ex
+mathematics.extrabase = 0xFE000 -- here we push some virtuals
+mathematics.privatebase = 0xFF000 -- here we push the ex
-local unsetvalue = attributes.unsetvalue
-local allocate = utilities.storage.allocate
-local chardata = characters.data
+local unsetvalue = attributes.unsetvalue
+local allocate = utilities.storage.allocate
+local chardata = characters.data
-local texsetattribute = tex.setattribute
-local setmathcode = tex.setmathcode
-local setdelcode = tex.setdelcode
+local texsetattribute = tex.setattribute
+local setmathcode = tex.setmathcode
+local setdelcode = tex.setdelcode
local families = allocate {
mr = 0,
@@ -213,28 +216,28 @@ local f_char = formatters[ [[\Umathchardef\%s "%X "%X "%X ]] ]
local setmathsymbol = function(name,class,family,slot) -- hex is nicer for tracing
if class == classes.accent then
- contextsprint(f_accent(name,family,slot))
+ context_sprint(f_accent(name,family,slot))
elseif class == classes.topaccent then
- contextsprint(f_topaccent(name,family,slot))
+ context_sprint(f_topaccent(name,family,slot))
elseif class == classes.botaccent then
- contextsprint(f_botaccent(name,family,slot))
+ context_sprint(f_botaccent(name,family,slot))
elseif class == classes.over then
- contextsprint(f_over(name,family,slot))
+ context_sprint(f_over(name,family,slot))
elseif class == classes.under then
- contextsprint(f_under(name,family,slot))
+ context_sprint(f_under(name,family,slot))
elseif class == open_class or class == close_class or class == middle_class then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(f_fence(name,class,family,slot))
+ context_sprint(f_fence(name,class,family,slot))
elseif class == classes.delimiter then
setdelcode("global",slot,{family,slot,0,0})
- contextsprint(f_delimiter(name,family,slot))
+ context_sprint(f_delimiter(name,family,slot))
elseif class == classes.radical then
- contextsprint(f_radical(name,family,slot))
+ context_sprint(f_radical(name,family,slot))
elseif class == classes.root then
- contextsprint(f_root(name,family,slot))
+ context_sprint(f_root(name,family,slot))
else
-- beware, open/close and other specials should not end up here
- contextsprint(f_char(name,class,family,slot))
+ context_sprint(f_char(name,class,family,slot))
end
end
@@ -350,10 +353,12 @@ local utf8byte = lpeg.patterns.utf8byte * lpeg.P(-1)
local somechar = { }
table.setmetatableindex(somechar,function(t,k)
- local b = lpegmatch(utf8byte,k)
- local v = b and chardata[b] or false
- t[k] = v
- return v
+ if k then
+ local b = lpegmatch(utf8byte,k)
+ local v = b and chardata[b] or false
+ t[k] = v
+ return v
+ end
end)
local function utfmathclass(chr, default)
@@ -462,7 +467,7 @@ end
local function utfmathfiller(chr, default)
local cd = somechar[chr]
- local cmd = cd and (cd.mathfiller or cd.mathname)
+ local cmd = cd and cd.mathfiller -- or cd.mathname
return cmd or default or ""
end
@@ -470,23 +475,76 @@ mathematics.utfmathclass = utfmathclass
mathematics.utfmathstretch = utfmathstretch
mathematics.utfmathcommand = utfmathcommand
mathematics.utfmathfiller = utfmathfiller
+mathematics.utfmathaccent = utfmathaccent
-- interfaced
-function commands.utfmathclass (...) context(utfmathclass (...)) end
-function commands.utfmathstretch(...) context(utfmathstretch(...)) end
-function commands.utfmathcommand(...) context(utfmathcommand(...)) end
-function commands.utfmathfiller (...) context(utfmathfiller (...)) end
+implement {
+ name = "utfmathclass",
+ actions = { utfmathclass, context },
+ arguments = "string"
+}
-function commands.doifelseutfmathaccent(chr,asked)
- commands.doifelse(utfmathaccent(chr,nil,asked))
-end
+implement {
+ name = "utfmathstretch",
+ actions = { utfmathstretch, context },
+ arguments = "string"
+}
-function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end
-function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end
+implement {
+ name = "utfmathcommand",
+ actions = { utfmathcommand, context },
+ arguments = "string"
+}
-function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end
-function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end
+implement {
+ name = "utfmathfiller",
+ actions = { utfmathfiller, context },
+ arguments = "string"
+}
+
+implement {
+ name = "utfmathcommandabove",
+ actions = { utfmathcommand, context },
+ arguments = { "string", false, "'topaccent'","'over'" }
+}
+
+implement {
+ name = "utfmathcommandbelow",
+ actions = { utfmathcommand, context },
+ arguments = { "string", false, "'botaccent'","'under'" }
+}
+implement {
+ name = "utfmathcommandfiller",
+ actions = { utfmathfiller, context },
+ arguments = "string"
+}
+
+-- todo: make this a helper:
+
+implement {
+ name = "doifelseutfmathabove",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = { "string", false, "'topaccent'", "'over'" }
+}
+
+implement {
+ name = "doifelseutfmathbelow",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = { "string", false, "'botaccent'", "'under'" }
+}
+
+implement {
+ name = "doifelseutfmathaccent",
+ actions = { utfmathaccent, ctx_doifelsesomething },
+ arguments = "string",
+}
+
+implement {
+ name = "doifelseutfmathfiller",
+ actions = { utfmathfiller, ctx_doifelsesomething },
+ arguments = "string",
+}
-- helpers
--
@@ -596,9 +654,20 @@ local noffunctions = 1000 -- offset
categories.functions = functions
-function commands.taggedmathfunction(tag,label,apply)
- local delta = apply and 1000 or 0
- if label then
+implement {
+ name = "tagmfunctiontxt",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
+ texsetattribute(a_mathcategory,1000 + delta)
+ end
+}
+
+implement {
+ name = "tagmfunctionlab",
+ arguments = { "string", "conditional" },
+ actions = function(tag,apply)
+ local delta = apply and 1000 or 0
local n = functions[tag]
if not n then
noffunctions = noffunctions + 1
@@ -608,18 +677,14 @@ function commands.taggedmathfunction(tag,label,apply)
else
texsetattribute(a_mathcategory,n + delta)
end
- context.mathlabeltext(tag)
- else
- texsetattribute(a_mathcategory,1000 + delta)
- context(tag)
end
-end
+}
--
local list
-function commands.resetmathattributes()
+function mathematics.resetattributes()
if not list then
list = { }
for k, v in next, attributes.numbers do
@@ -632,3 +697,16 @@ function commands.resetmathattributes()
texsetattribute(list[i],unsetvalue)
end
end
+
+implement {
+ name = "resetmathattributes",
+ actions = mathematics.resetattributes
+}
+
+-- weird to do this here but it's a side affect of math anyway
+
+interfaces.implement {
+ name = "enableasciimode",
+ onlyonce = true,
+ actions = resolvers.macros.enablecomment,
+}
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index bf9f5278c..83c7554eb 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -13,6 +13,18 @@
\writestatus{loading}{ConTeXt Math Macros / Initializations}
+% Todo in luatex maincontrol.w: also accept a number here:
+%
+% case set_math_param_cmd:
+% p = cur_chr;
+% get_token();
+% if (cur_cmd != math_style_cmd) {
+%
+% plus two new math styles: larger/smaller
+%
+% \unexpanded\def\Umathshow#1%
+% {\hbox{\infofont(\string#1:\the#1\textstyle,\the#1\scriptstyle,\the#1\scriptscriptstyle)}}
+
%D This module provides namespaces for math fonts, thereby permitting mixed usage of
%D math fonts. Although not strictly needed, we also provide a family name mapping
%D mechanism as used in the (original) AMS math definition files, but here these
@@ -26,6 +38,15 @@
%D restore a changed mathstyle so best avoid that one. However, there are cases where
%D we really need to use such grouping.
+% Weird, these fail, maybe amp is solved in a later state from char noads (needs a
+% fix in luatex):
+%
+% $\char"26$
+% $\a$
+% $\string&$
+
+% mathop applied to characters centers it vertically
+
\unprotect
%D We move these definitions into the format:
@@ -80,7 +101,35 @@
\setnewconstant\defaultmathfamily \zerocount % 255
-\unexpanded\def\resetmathattributes{\ctxcommand{resetmathattributes()}}
+\unexpanded\def\resetmathattributes{\clf_resetmathattributes}
+
+% handy
+
+\newconditional\indisplaymath
+
+\appendtoks
+ \setfalse\indisplaymath
+\to \everymath
+
+\appendtoks
+ \settrue\indisplaymath
+\to \everydisplay
+
+\def\inlineordisplaymath{\ifconditional\indisplaymath\expandafter\secondoftwoarguments\else\expandafter\firstoftwoarguments\fi}
+
+\unexpanded\def\forcedisplaymath
+ {\ifmmode
+ \displaystyle
+ \settrue\indisplaymath
+ \fi}
+
+\unexpanded\def\startforceddisplaymath
+ {\startimath
+ \displaystyle
+ \settrue\indisplaymath}
+
+\unexpanded\def\stopforceddisplaymath
+ {\stopimath}
% \unexpanded\def\rawmathcharacter#1% slow but only for tracing
% {\begingroup
@@ -94,9 +143,9 @@
\unexpanded\def\rawmathematics#1% slow but only for tracing
{\begingroup
\ifmmode
- \resetmathattributes#1%
+ \clf_resetmathattributes#1%
\else
- \startimath\resetmathattributes#1\stopimath
+ \startimath\clf_resetmathattributes#1\stopimath
\fi
\endgroup}
@@ -117,7 +166,7 @@
\installswitchcommandhandler \??mathematics {mathematics} \??mathematics
\unexpanded\def\startmathematics % no grouping, if ever then also an optional second
- {\doifnextoptionalelse\math_mathematics_start_yes\math_mathematics_start_nop}
+ {\doifelsenextoptionalcs\math_mathematics_start_yes\math_mathematics_start_nop}
\unexpanded\def\math_mathematics_start_yes[#1]%
{\pushmacro\currentmathematics
@@ -135,22 +184,149 @@
\definemathematics[\v!default] % not needed, but nicer when nesting back to normal
-% Normally this is applied to only one character.
+% Now we redefine \type {\mathematics} and \type {\m}:
+
+\unexpanded\def\mathematics
+ {\doifelsenextoptionalcs\math_m_yes\math_m_nop}
+
+\def\math_m_yes
+ {\relax
+ \ifmmode
+ \expandafter\math_m_yes_math
+ \else
+ \expandafter\math_m_yes_text
+ \fi}
+
+\def\math_m_yes_math[#1]#2%
+ {#2}
+
+\def\math_m_yes_text[#1]%
+ {\begingroup
+ \edef\currentmathematics{#1}% check for valid
+ \edef\p_openup{\mathematicsparameter\c!openup}%
+ \ifx\p_openup\v!yes
+ \expandafter\math_m_yes_text_openedup
+ \else
+ \expandafter\math_m_yes_text_normal
+ \fi}
+
+\def\math_m_yes_text_openedup#1%
+ {\setbox\scratchbox\hbox\bgroup
+ \normalstartimath
+ \the\everyswitchmathematics\relax
+ #1%
+ \normalstopimath
+ \egroup
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ \unhbox\scratchbox % \normalstartimath#1\normalstopimath
+ \math_inline_openup_stop
+ \endgroup}
+
+\def\math_m_yes_text_normal#1%
+ {\normalstartimath
+ \the\everyswitchmathematics\relax
+ #1%
+ \normalstopimath
+ \endgroup}
+
+\def\math_m_nop#1%
+ {\relax
+ \ifmmode
+ #1%
+ \else
+ \normalstartimath
+ #1%
+ \normalstopimath
+ \fi}
+
+\let\m\mathematics
+
+% e.g.: \definemathematics[i:mp][setups=i:tight,openup=yes]
+
+\newmuskip\defaultthickmuskip \defaultthickmuskip 5mu plus 5mu
+\newmuskip\defaultmedmuskip \defaultmedmuskip 4mu plus 2mu minus 4mu
+\newmuskip\defaultthinmuskip \defaultthinmuskip 3mu
+
+\newmuskip\halfthickmuskip \halfthickmuskip 2.5mu plus 2.5mu
+\newmuskip\halfmedmuskip \halfmedmuskip 2.0mu plus 1.0mu minus 2.0mu
+\newmuskip\halfthinmuskip \halfthinmuskip 1.5mu
+
+\newcount \defaultrelpenalty \defaultrelpenalty 500
+\newcount \defaultbinoppenalty \defaultbinoppenalty 700
+
+
+\startsetups math:spacing:default
+ \thickmuskip \defaultthickmuskip
+ \medmuskip \defaultmedmuskip
+ \thinmuskip \defaultthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:half
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+\stopsetups
+
+\startsetups math:spacing:tight
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \maxdimen
+\stopsetups
+
+\startsetups math:spacing:fixed
+ \ifcase\raggedstatus
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \else
+ \thickmuskip 1\halfthickmuskip
+ \medmuskip 1\halfmedmuskip
+ \thinmuskip 1\halfthinmuskip
+ \fi
+ \relpenalty \maxdimen
+ \binoppenalty \maxdimen
+\stopsetups
+
+% \dorecurse{80}{test \m[i:tight]{\red \fakeformula} test }
+
+\definemathematics[i:default][\c!setups=math:spacing:equal]
+\definemathematics[i:half] [\c!setups=math:spacing:half]
+\definemathematics[i:tight] [\c!setups=math:spacing:tight]
+\definemathematics[i:fixed] [\c!setups=math:spacing:fixed]
+
+% Normally the next is applied to only one character.
%
% $ABC$ $\cal ABC$ $\mathaltcal ABC$
% todo: only in mmode
-
% these commands are semi-public but should not be used directly (lua names wil change)
-\unexpanded\def\math_set_attribute #1#2{\ifmmode\ctxcommand{setmathattribute("#1","#2")}\fi}
-\unexpanded\def\math_set_alphabet #1{\ifmmode\ctxcommand{setmathalphabet("#1")}\fi}
-\unexpanded\def\math_set_font_style #1{\ifmmode\ctxcommand{setmathstyle("#1")}\fi}
-\unexpanded\def\math_set_font_alternate#1{\ifmmode\ctxcommand{setmathalternate(\number\defaultmathfamily,"#1")}\fi}
+\unexpanded\def\math_set_attribute #1#2{\ifmmode\clf_setmathattribute{#1}{#2}\fi}
+\unexpanded\def\math_set_alphabet #1{\ifmmode\clf_setmathalphabet{#1}\fi}
+\unexpanded\def\math_set_font_style #1{\ifmmode\clf_setmathstyle{#1}\fi}
+\unexpanded\def\math_set_font_alternate#1{\ifmmode\clf_setmathalternate\defaultmathfamily{#1}\fi}
\installcorenamespace{mathstylealternate} % might become a setuphandler
-\unexpanded\def\math_set_font_style_alterternate#1%
+\unexpanded\def\math_set_font_style_alternate#1%
{\ifcsname\??mathstylealternate\fontclass:#1\endcsname
\expandafter\math_set_font_alternate\csname\??mathstylealternate\fontclass:#1\endcsname
\else\ifcsname\??mathstylealternate#1\endcsname
@@ -169,31 +345,31 @@
\unexpanded\def\mathaltcal{\math_set_font_alternate{cal}\cal} % ss01 in xits
-\let\setmathattribute \math_set_attribute
-\let\setmathalphabet \math_set_alphabet
-\let\setmathfontstyle \math_set_font_style
-\let\setmathfontalternate \math_set_font_alternate
-\let\setmathfontstylealterternate\math_set_font_style_alterternate
+\let\setmathattribute \math_set_attribute
+\let\setmathalphabet \math_set_alphabet
+\let\setmathfontstyle \math_set_font_style
+\let\setmathfontalternate \math_set_font_alternate
+\let\setmathfontstylealternate\math_set_font_style_alternate
-\let\mathalternate \math_set_font_alternate % obsolete
+\let\mathalternate \math_set_font_alternate % obsolete
-\unexpanded\def\mathupright {\math_set_attribute\s!regular\s!tf\math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathdefault {\math_set_attribute\s!regular\s!it\math_set_font_style_alterternate\s!it}
-\unexpanded\def\mathscript {\math_set_alphabet \s!script \math_set_font_style_alterternate\s!script}
-\unexpanded\def\mathfraktur {\math_set_alphabet \s!fraktur \math_set_font_style_alterternate\s!fraktur}
-\unexpanded\def\mathblackboard{\math_set_alphabet \s!blackboard \math_set_font_style_alterternate\s!blackboard}
+\unexpanded\def\mathupright {\math_set_attribute\s!regular\s!tf\math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathdefault {\math_set_attribute\s!regular\s!it\math_set_font_style_alternate\s!it}
+\unexpanded\def\mathscript {\math_set_alphabet \s!script \math_set_font_style_alternate\s!script}
+\unexpanded\def\mathfraktur {\math_set_alphabet \s!fraktur \math_set_font_style_alternate\s!fraktur}
+\unexpanded\def\mathblackboard{\math_set_alphabet \s!blackboard \math_set_font_style_alternate\s!blackboard}
-\unexpanded\def\mathrm {\math_set_attribute\s!rm\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathss {\math_set_attribute\s!ss\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathtt {\math_set_attribute\s!tt\s!tf \math_set_font_style_alterternate\s!tf}
+\unexpanded\def\mathrm {\math_set_attribute\s!rm\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathss {\math_set_attribute\s!ss\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathtt {\math_set_attribute\s!tt\s!tf \math_set_font_style_alternate\s!tf}
-\unexpanded\def\mathtf {\math_set_font_style\s!tf \math_set_font_style_alterternate\s!tf}
-\unexpanded\def\mathsl {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it} % no sl
-\unexpanded\def\mathit {\math_set_font_style\s!it \math_set_font_style_alterternate\s!it}
+\unexpanded\def\mathtf {\math_set_font_style\s!tf \math_set_font_style_alternate\s!tf}
+\unexpanded\def\mathsl {\math_set_font_style\s!it \math_set_font_style_alternate\s!it} % no sl
+\unexpanded\def\mathit {\math_set_font_style\s!it \math_set_font_style_alternate\s!it}
-\unexpanded\def\mathbf {\math_set_font_style\s!bf \math_set_font_style_alterternate\s!bf}
-\unexpanded\def\mathbs {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi} % no sl
-\unexpanded\def\mathbi {\math_set_font_style\s!bi \math_set_font_style_alterternate\s!bi}
+\unexpanded\def\mathbf {\math_set_font_style\s!bf \math_set_font_style_alternate\s!bf}
+\unexpanded\def\mathbs {\math_set_font_style\s!bi \math_set_font_style_alternate\s!bi} % no sl
+\unexpanded\def\mathbi {\math_set_font_style\s!bi \math_set_font_style_alternate\s!bi}
\let\tfmath\mathtf % maybe a grouped command
\let\slmath\mathsl
@@ -216,12 +392,12 @@
\unexpanded\def\mathfrak#1{{\mathfraktur #1}} % for AMS compatibility
\unexpanded\def\mathbb #1{{\mathblackboard#1}} % for AMS compatibility
-\let\normaltf\tf \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi}
-\let\normalbf\bf \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi}
-\let\normalit\it \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi}
-\let\normalsl\sl \unexpanded\def\sl{\ifmmode\mathsl\else\normalsl\fi}
-\let\normalbi\bi \unexpanded\def\bi{\ifmmode\mathbi\else\normalbi\fi}
-\let\normalbs\bs \unexpanded\def\bs{\ifmmode\mathbs\else\normalbs\fi}
+\ifdefined\normaltf\else\let\normaltf\tf\fi \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi}
+\ifdefined\normalbf\else\let\normalbf\bf\fi \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi}
+\ifdefined\normalit\else\let\normalit\it\fi \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi}
+\ifdefined\normalsl\else\let\normalsl\sl\fi \unexpanded\def\sl{\ifmmode\mathsl\else\normalsl\fi}
+\ifdefined\normalbi\else\let\normalbi\bi\fi \unexpanded\def\bi{\ifmmode\mathbi\else\normalbi\fi}
+\ifdefined\normalbs\else\let\normalbs\bs\fi \unexpanded\def\bs{\ifmmode\mathbs\else\normalbs\fi}
\let\normalrm\rm \unexpanded\def\rm{\ifmmode\mathrm\else\normalrm\fi}
\let\normalss\ss \unexpanded\def\ss{\ifmmode\mathss\else\normalss\fi}
@@ -230,6 +406,12 @@
\ifdefined\mr \else \let\mr\relax \fi
\ifdefined\mb \else \let\mb\relax \fi
+% 1: $\setmathattribute{ss}{bf}3$
+% 2: $\setmathattribute{ss}{bf}\setmathfontstylealternate{bf}3$
+% 3: $\setmathattribute{ss}{bf}\setmathfontstyle{bf}3$
+% 4: $\setmathattribute{ss}{bf}\setmathfontstyle{bf}\setmathfontstylealternate{bf}3$
+% 5: $e=mc^2 \quad \mb e=mc^2$
+
\prependtoks
\mathdefault
\to \everymathematics
@@ -254,7 +436,7 @@
\newcount\c_math_renderings_attribute
\appendtoks
- \c_math_renderings_attribute\ctxcommand{mathrenderset("\mathematicsparameter\c!symbolset")}\relax
+ \c_math_renderings_attribute\clf_mathrenderset{\mathematicsparameter\c!symbolset}\relax
\to \everysetupmathematics % only in mathematics
\appendtoks
@@ -276,24 +458,26 @@
\unexpanded\def\boldsymbol
{\mathortext\mathboldsymbol\bold}
-%D Helpers:
-
-\def\utfmathclass #1{\ctxcommand{utfmathclass (\!!bs#1\!!es)}}
-\def\utfmathstretch#1{\ctxcommand{utfmathstretch(\!!bs#1\!!es)}}
-\def\utfmathcommand#1{\ctxcommand{utfmathcommand(\!!bs#1\!!es)}}
-\def\utfmathfiller #1{\ctxcommand{utfmathfiller (\!!bs#1\!!es)}}
+%D Helpers
-\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
-\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
+\def\utfmathclass #1{\clf_utfmathclass {#1}}
+\def\utfmathstretch#1{\clf_utfmathstretch{#1}}
+\def\utfmathcommand#1{\clf_utfmathcommand{#1}}
+\def\utfmathfiller #1{\clf_utfmathfiller {#1}}
-\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}}
-\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}}
+\def\utfmathcommandabove #1{\clf_utfmathcommandabove {#1}}
+\def\utfmathcommandbelow #1{\clf_utfmathcommandbelow {#1}}
+\def\utfmathcommandfiller#1{\clf_utfmathcommandfiller{#1}}
-\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
-\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
+\unexpanded\def\doifelseutfmathaccent#1{\clf_doifelseutfmathaccent{#1}}
+\unexpanded\def\doifelseutfmathabove #1{\clf_doifelseutfmathabove {#1}}
+\unexpanded\def\doifelseutfmathbelow #1{\clf_doifelseutfmathbelow {#1}}
+\unexpanded\def\doifelseutfmathfiller#1{\clf_doifelseutfmathfiller{#1}}
-\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}}
-\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}}
+\let\doifutfmathaccentelse \doifelseutfmathaccent
+\let\doifutfmathaboveelse \doifelseutfmathabove
+\let\doifutfmathbelowelse \doifelseutfmathbelow
+\let\doifutfmathfillerelse \doifelseutfmathfiller
%D Not used that much:
@@ -366,10 +550,39 @@
%D Let's define a few comands here:
-\definemathcommand [mathstrut] {\vphantom{(}}
+%definemathcommand [mathstrut] {\vphantom{(}}
%definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
\definemathcommand [joinrel] [rel] {\mkern-3mu}
+\chardef\c_math_strut"28
+
+\unexpanded\def\math_strut_htdp#1%
+ {\s!height\fontcharht#1\c_math_strut
+ \s!depth \fontchardp#1\c_math_strut}
+
+\unexpanded\def\math_strut_normal
+ {\vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width \zeropoint
+ \relax}
+
+\unexpanded\def\math_strut_visual
+ {\hskip-.01\emwidth
+ \vrule
+ \normalexpanded{\math_strut_htdp{\mathstylefont\normalmathstyle}}%
+ \s!width .02\emwidth
+ \relax
+ \hskip-.01\emwidth}
+
+\unexpanded\def\showmathstruts % let's not overload \math_strut_normal
+ {\let\math_strut\math_strut_visual}
+
+\let\math_strut\math_strut_normal
+
+% \unexpanded\def\mathstrut{\mathcodecommand{nothing}{\math_strut}}
+
+\definemathcommand [mathstrut] {\math_strut}
+
%D We could have a arg variant \unknown\ but not now.
\unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
@@ -390,13 +603,14 @@
\unexpanded\def\mathop
{\normalmathop
\bgroup
- \let\rm\mf
+ % no: \let\rm\mf
\let\nexttoken=}
% this one too: \letvalue{\??mathcodecommand op}\mathop ?
\unexpanded\def\normalmbox
- {\normalhbox\bgroup\mf
+ {\normalhbox\bgroup
+ \usemathematicsstyleandcolor\c!textstyle\c!textcolor % new
\dowithnextboxcs\math_mbox_finish\normalhbox}
\def\math_mbox_finish
@@ -421,10 +635,13 @@
\startimath#1\stopimath
\egroup
\ht\scratchbox\strutht
- \dp\scratchbox\strutht
+ \dp\scratchbox\strutdp
\box\scratchbox
\endgroup}
+\unexpanded\def\mtext#1%
+ {\text{\usemathematicsstyleandcolor\c!textstyle\c!textcolor#1}}
+
%D The next hack is needed needed for sine, cosine etc.
\let\mathfunction\firstofoneunexpanded
@@ -438,19 +655,69 @@
\unexpanded\def\math_tags_mn#1{\begingroup\mathupright#1\endgroup}
\unexpanded\def\math_tags_ms#1{\begingroup\mathupright#1\endgroup}
-\unexpanded\def\mfunction #1{{\mathupright\math_tags_function{#1}}}
-\unexpanded\def\mfunctionlabeltext#1{{\mathupright\math_tags_functionlabeltext{#1}}}
-
% Once this is stable we can store the number at the tex end which is
% faster. Functions getnumbers >= 1000.
-\expanded\def\math_tags_mathfunction_indeed #1{\ctxcommand{taggedmathfunction("#1",false,\ifconditional\c_apply_function true\else false\fi)}}
-\expanded\def\math_tags_mathfunctionlabeltext_indeed#1{\ctxcommand{taggedmathfunction("#1",true ,\ifconditional\c_apply_function true\else false\fi)}}
+\setupmathematics
+ [\c!textstyle=, % rm ss etc i.e. known alternatives, otherwise math
+ \c!textcolor=,
+ \c!functionstyle=, % rm ss etc i.e. known alternatives, otherwise math
+ \c!functioncolor=]
+
+% \unexpanded\def\math_mfunction_styled
+% {\edef\m_math_text_choice_face{\textstyleface\normalmathstyle}%
+% \dowithnextbox
+% {\mathop{\box\nextbox}}%
+% \hbox\bgroup
+% \usemathematicsstyleandcolor\c!functionstyle\c!functioncolor
+% \m_math_text_choice_face
+% \let\next}
+
+\unexpanded\def\math_mfunction_styled
+ {\begingroup
+ \usemathematicscolorparameter\c!functioncolor
+ \edef\p_functionstyle{\mathematicsparameter\c!functionstyle}%
+ \ifx\p_functionstyle\empty
+ \expandafter\math_mfunction_styled_none
+ \else\ifcsname\??alternativestyles\p_functionstyle\endcsname
+ \doubleexpandafter\math_mfunction_styled_text
+ \else
+ \doubleexpandafter\math_mfunction_styled_math
+ \fi\fi}
-\expanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
-\expanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
-\expanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
-\expanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup}
+\unexpanded\def\math_mfunction_styled_text#1%
+ {\mathoptext{\csname\??alternativestyles\p_functionstyle\endcsname#1}%
+ \endgroup}
+
+\unexpanded\def\math_mfunction_styled_math#1%
+ {\p_functionstyle
+ #1%
+ \endgroup}
+
+\unexpanded\def\math_mfunction_styled_none#1%
+ {\mathupright
+ #1%
+ \endgroup}
+
+\unexpanded\def\mfunction#1%
+ {\begingroup
+ \math_tags_mfunctiontxt{#1}\c_apply_function
+ \math_mfunction_styled{#1}%
+ \endgroup}
+
+\unexpanded\def\mfunctionlabeltext#1%
+ {\begingroup
+ \math_tags_mfunctionlab{#1}\c_apply_function
+ \math_mfunction_styled{\mathlabeltext{#1}}%
+ \endgroup}
+
+\let\math_tags_mfunctiontxt\gobbletwoarguments
+\let\math_tags_mfunctionlab\gobbletwoarguments
+
+\unexpanded\def\math_tags_mo_indeed#1{\begingroup \attribute\mathcategoryattribute\plusone #1\endgroup}
+\unexpanded\def\math_tags_mi_indeed#1{\begingroup \attribute\mathcategoryattribute\plustwo #1\endgroup}
+\unexpanded\def\math_tags_mn_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusthree#1\endgroup}
+\unexpanded\def\math_tags_ms_indeed#1{\begingroup\mathupright\attribute\mathcategoryattribute\plusfour #1\endgroup} % todo: mathoptext
\newconditional\c_apply_function
@@ -465,12 +732,12 @@
\endgroup}
\appendtoks
- \let\math_tags_function \math_tags_mathfunction_indeed
- \let\math_tags_functionlabeltext\math_tags_mathfunctionlabeltext_indeed
- \let\math_tags_mo \math_tags_mo_indeed
- \let\math_tags_mi \math_tags_mi_indeed
- \let\math_tags_mn \math_tags_mn_indeed
- \let\math_tags_ms \math_tags_ms_indeed
+ \let\math_tags_mfunctiontxt\clf_tagmfunctiontxt
+ \let\math_tags_mfunctionlab\clf_tagmfunctionlab
+ \let\math_tags_mo \math_tags_mo_indeed
+ \let\math_tags_mi \math_tags_mi_indeed
+ \let\math_tags_mn \math_tags_mn_indeed
+ \let\math_tags_ms \math_tags_ms_indeed
\to \everyenableelements
\appendtoks
@@ -484,21 +751,21 @@
% \def\mlimitsfunction #1{\mathlimopcomm{{\mr#1}}
% \def\mnolimitsfunction#1{\mathnolopcomm{{\mr#1}}
-%D Taco posted this solution as response to a mail by Olivier, so let's integrate
-%D it here.
-
-\def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
-
-\unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
-\unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
-\unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
-
-\unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
- {\doifsomething{#1}
- {\def\currentmscaledstyle{#1}%
- \let\mathopnolimits \math_function_style_opnolimits
- \let\mfunction \math_function_style_mfunction
- \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
+% %D Taco posted this solution as response to a mail by Olivier, so let's integrate
+% %D it here.
+%
+% \def\currentmscaledstyle{rm} % will be plugged into the typeface text=ss option
+%
+% \unexpanded\def\math_function_style_opnolimits #1{\mathop{\mscaledtext{#1}}\nolimits}
+% \unexpanded\def\math_function_style_mfunction #1{\mscaledtext{\math_tags_function{#1}}}
+% \unexpanded\def\math_function_style_mfunctionlabeltext#1{\mscaledtext{\math_tags_functionlabeltext{#1}}}
+%
+% \unexpanded\def\setmathfunctionstyle#1% rm ss tt (can be made faster if needed)
+% {\doifsomething{#1}
+% {\def\currentmscaledstyle{#1}%
+% \let\mathopnolimits \math_function_style_opnolimits
+% \let\mfunction \math_function_style_mfunction
+% \let\mfunctionlabeltext\math_function_style_mfunctionlabeltext}}
\unexpanded\def\mscaledtext#1%
{\mathchoice
@@ -507,6 +774,9 @@
{\hbox{\csname\currentmscaledstyle\endcsname\tfx #1}}
{\hbox{\csname\currentmscaledstyle\endcsname\tfxx#1}}}
+\unexpanded\def\setmathfunctionstyle#1%
+ {\setupmathematics[\c!functionstyle=#1]} % for old times sake
+
%D We can force the way functions are typeset by manipulating the text option:
%D
%D \starttyping
@@ -788,17 +1058,16 @@
% \ifconditional\knuthmode\else\donknuthmode\fi}
\unexpanded\def\enableasciimode
- {\ctxlua{resolvers.macros.enablecomment()}%
- \glet\enableasciimode\relax}
+ {\clf_enableasciimode} % relaxes itself
\unexpanded\def\asciimode
{\catcodetable\txtcatcodes
- \enableasciimode}
+ \clf_enableasciimode}
\unexpanded\def\startasciimode
{\pushcatcodetable
\catcodetable\txtcatcodes
- \enableasciimode}
+ \clf_enableasciimode}
\unexpanded\def\stopasciimode
{\popcatcodetable}
@@ -816,7 +1085,7 @@
\to \everysetupmathematics
\setupmathematics
- [\c!compact=no]
+ [\c!compact=\v!no]
% \enabletrackers[typesetters.directions.math]
@@ -867,8 +1136,8 @@
\newcount\c_math_bidi
-\setvalue{\??mathbidi\v!no }{\ctxcommand{setmathdirection(0)}\c_math_bidi\attributeunsetvalue}
-\setvalue{\??mathbidi\v!yes}{\ctxcommand{setmathdirection(1)}\c_math_bidi\plusone}
+\setvalue{\??mathbidi\v!no }{\clf_setmathdirection\zerocount\relax\c_math_bidi\attributeunsetvalue}
+\setvalue{\??mathbidi\v!yes}{\clf_setmathdirection\plusone \relax\c_math_bidi\plusone}
\appendtoks
\edef\p_bidi{\mathematicsparameter\c!bidi}%
@@ -950,7 +1219,7 @@
\def\math_italics_initialize
{\ifnum\c_math_italics_attribute=\attributeunsetvalue \else
- \ctxcommand{setmathitalics()}% one time
+ \clf_setmathitalics % one time
\global\let\math_italics_initialize\relax
\fi}
@@ -1236,6 +1505,19 @@
\expandafter#3\else
\expandafter#1\fi}
+% \def\textstyleface#1% #1 is number (\normalmathstyle)
+% {\ifcase\numexpr#1\relax
+% \tf \or
+% \tf \or
+% \tf \or
+% \tf \or
+% \tfx \or
+% \tfx \or
+% \tfxx \or
+% \tfxx \else
+% \tf
+% \fi}
+
\unexpanded\def\verbosemathstyle#1% #1 is number (\normalmathstyle)
{{\normalexpanded{\relax\darkgray\ttxx[\number#1:\ifcase\numexpr#1\relax
display\or % 0
@@ -1267,7 +1549,9 @@
%D
%D \typebuffer \getbuffer
-\unexpanded\def\mathstylehbox#1%
+% to be tested: {#1} but it could have side effects
+
+\unexpanded\def\mathstylehbox#1% sensitive for: a \over b => {a\over b} or \frac{a}{b}
{\normalexpanded{\hbox\bgroup
\startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
@@ -1454,6 +1738,16 @@
\crampedscriptstyle \or
\fi}
+\newcount\c_math_saved_style
+
+\unexpanded\def\pushmathstyle % assumes begingroup .. endgroup
+ {\c_math_saved_style\mathstyle}
+
+\unexpanded\def\popmathstyle
+ {\ifnum\mathstyle=\c_math_saved_style\else
+ \triggermathstyle\c_math_saved_style
+ \fi}
+
\installcorenamespace{mathstylecommand}
\installcorenamespace{mathstylecache}
@@ -1789,7 +2083,14 @@
\ifdefined\text\else \let\text\hbox \fi
-\unexpanded\def\mathoptext#1{\mathop{\text{#1}}}
+% \unexpanded\def\mathoptext#1{\mathop{\text{#1}}}
+
+\unexpanded\def\mathoptext
+ {\normalizebodyfontsize\m_math_text_choice_face{\mathstyleface\normalmathstyle}%
+ %\showmathstyle
+ \dowithnextbox
+ {\mathop{\box\nextbox}}%
+ \hbox\bgroup\font_basics_switchtobodyfont\m_math_text_choice_face\let\next}
% new:
@@ -1814,11 +2115,140 @@
% this should be a primitive:
-% \def\mathextensiblecode#1#2%
-% {\cldcontext{mathematics.extensiblecode(\number#1,\number#2)}}
+\def\mathextensiblecode#1#2{\clf_extensiblecode\numexpr#1\relax\numexpr#2\relax}
+\def\mathhorizontalcode#1#2{\clf_horizontalcode\numexpr#1\relax\numexpr#2\relax}
+
+% experimental:
+
+%D \starttyping
+%D \enabletrackers[math.openedup]
+%D
+%D \dorecurse{10}{\dorecurse{#1}{whatever }}
+%D
+%D \startitemize[packed]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D \startitemize[packed,columns]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D
+%D \dorecurse{5}{\dorecurse{#1}{whatever }\openedupimath{\frac{1}{2}} }
+%D
+%D \startitemize[packed,columns]
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \startitem whatever \openedupimath{1+2} whatever
+%D \stopitemize
+%D
+%D \dorecurse{5}{\dorecurse{#1}{whatever }\openedupimath{1+2} }
+%D
+%D \startitemize[packed]
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \startitem whatever \openedupimath{\frac{1}{2}} whatever
+%D \stopitemize
+%D
+%D \dorecurse{10}{whatever }
+%D \dorecurse {5}{\dorecurse{#1}{whatever }\openedupimath{\frac{1}{2}} }
+%D \dorecurse{10}{whatever }
+%D \stoptyping
+
+\def\m_math_inline_openup_ht{\dimexpr\ifinsidecolumns\strutdp\else\lineheight\fi/\plusfour\relax}
+\def\m_math_inline_openup_dp{\dimexpr\ifinsidecolumns\strutdp\else\lineheight\fi/\plusfour\relax}
+
+% \def\m_math_inline_openup_ht{\dimexpr\lineheight/\ifinsidecolumns\pluseight\else\plusfour\fi\relax}
+% \def\m_math_inline_openup_dp{\dimexpr\lineheight/\ifinsidecolumns\pluseight\else\plusfour\fi\relax}
+
+\installtextracker
+ {math.openedup}
+ {\let\math_inline_openup_start_yes\math_inline_openup_traced_start}
+ {\let\math_inline_openup_start_yes\math_inline_openup_normal_start}
+
+\unexpanded\def\math_inline_openup_normal_start
+ {\scratchheight\dimexpr\ht\scratchbox+\m_math_inline_openup_ht\relax
+ \scratchdepth \dimexpr\dp\scratchbox+\m_math_inline_openup_dp\relax
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax
+ \begingroup
+ \let\math_inline_openup_stop\math_inline_openup_normal_stop}
+
+\unexpanded\def\math_inline_openup_normal_stop
+ {\endgroup
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax}
+
+\unexpanded\def\math_inline_openup_traced_start
+ {\scratchtopoffset \ht\scratchbox
+ \scratchbottomoffset\dp\scratchbox
+ \scratchheight \dimexpr\scratchtopoffset +\m_math_inline_openup_ht\relax
+ \scratchdepth \dimexpr\scratchbottomoffset+\m_math_inline_openup_dp\relax
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax
+ \begingroup
+ \dofastcoloractivation{darkred}%
+ \vrule\s!width\emwidth\s!height\scratchheight\s!depth-\scratchtopoffset\relax
+ \endgroup
+ \kern-\emwidth
+ \begingroup
+ \let\math_inline_openup_stop\math_inline_openup_traced_stop}
+
+\unexpanded\def\math_inline_openup_traced_stop
+ {\endgroup
+ \kern-\emwidth
+ \begingroup
+ \dofastcoloractivation{darkblue}%
+ \vrule\s!width\emwidth\s!height-\scratchbottomoffset\s!depth\scratchdepth\relax
+ \endgroup
+ \vrule\s!width\zeropoint\s!height\scratchheight\s!depth\scratchdepth\relax}
+
+\let\math_inline_openup_start_yes\math_inline_openup_normal_start
+\let\math_inline_openup_stop \relax
+
+\def\math_inline_openup_start_nop
+ {\let\math_inline_openup_stop\relax}
-\def\mathextensiblecode#1#2{\ctxcommand{extensiblecode(\number#1,\number#2)}}
-\def\mathhorizontalcode#1#2{\ctxcommand{horizontalcode(\number#1,\number#2)}}
+\unexpanded\def\openedupimath
+ {\dontleavehmode
+ \begingroup
+ \ifmmode
+ \expandafter\openedupimath_math
+ \else
+ \expandafter\openedupimath_text
+ \fi}
+
+\unexpanded\def\openedupimath_math#1%
+ {\setbox\scratchbox\mathstylehbox{#1}%
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ #1%
+ \math_inline_openup_stop
+ \endgroup}
+
+\unexpanded\def\openedupimath_text#1%
+ {\setbox\scratchbox\hbox{\startimath#1\stopimath}%
+ \ifdim\ht\scratchbox>\strutht
+ \math_inline_openup_start_yes
+ \else\ifdim\dp\scratchbox>\strutdp
+ \math_inline_openup_start_yes
+ \else
+ \math_inline_openup_start_nop
+ \fi\fi
+ \startimath
+ #1%
+ \stopimath
+ \math_inline_openup_stop
+ \endgroup}
\protect \endinput
diff --git a/tex/context/base/math-int.mkiv b/tex/context/base/math-int.mkiv
index 6b480961b..6b65738ff 100644
--- a/tex/context/base/math-int.mkiv
+++ b/tex/context/base/math-int.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Math Macros / Integrals}
+% todo: int and sum etc can be stackers
+
\unprotect
%D \startbuffer
diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua
index 8d301ac33..add6afa4c 100644
--- a/tex/context/base/math-map.lua
+++ b/tex/context/base/math-map.lua
@@ -44,8 +44,8 @@ local registerotffeature = otffeatures.register
local setmetatableindex = table.setmetatableindex
-local texgetattribute = tex.getattribute
-local texsetattribute = tex.setattribute
+local texgetattribute = tex.getattribute
+local texsetattribute = tex.setattribute
local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
local report_remapping = logs.reporter("mathematics","remapping")
@@ -53,13 +53,60 @@ local report_remapping = logs.reporter("mathematics","remapping")
mathematics = mathematics or { }
local mathematics = mathematics
+local implement = interfaces.implement
+
-- Unfortunately some alphabets have gaps (thereby troubling all applications that
-- need to deal with math). Somewhat strange considering all those weird symbols that
-- were added afterwards. The following trickery (and data) is only to be used for
-- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with
-- it otherwise.
-mathematics.gaps = {
+-- todo: allocate
+
+mathematics.styles = allocate { "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard" }
+mathematics.alternatives = allocate { "normal", "bold", "italic", "bolditalic" }
+mathematics.sets = allocate { "ucletters", "lcletters", "digits", "ucgreek", "lcgreek", "symbols" }
+
+mathematics.charactersets = allocate {
+ ucletters = {
+ 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
+ 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
+ 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
+ 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
+ 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
+ 0x0005A,
+ },
+ lcletters = {
+ 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
+ 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
+ 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
+ 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
+ 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
+ 0x0007A,
+ },
+ digits = {
+ 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
+ 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
+ },
+ ucgreek = {
+ 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
+ 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
+ 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
+ 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
+ 0x03A6, 0x03A7, 0x03A8, 0x03A9
+ },
+ lcgreek = {
+ 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
+ 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
+ 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
+ 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
+ 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
+ 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
+ 0x03F4, 0x03F5
+ },
+}
+
+mathematics.gaps = allocate {
[0x1D455] = 0x0210E, -- ℎ h
[0x1D49D] = 0x0212C, -- ℬ script B
[0x1D4A0] = 0x02130, -- ℰ script E
@@ -112,9 +159,10 @@ registerotffeature {
-- following approach permits easier remapping of a-a, A-Z and 0-9 to
-- fallbacks; symbols is currently mostly greek
-local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
-local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
-local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
+local function todigit (n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
+local function toupper (n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
+local function tolower (n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
+local function tovector(t) return t end
local regular_tf = {
digits = todigit(0x00030),
@@ -138,12 +186,12 @@ local regular_tf = {
},
symbols = {
[0x2202]=0x2202, [0x2207]=0x2207,
- [0x0027]=0x2032, -- prime
+ [0x0027]=0x2032, -- prime
},
}
local regular_it = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = toupper(0x1D434),
lcletters = { -- H
[0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452,
@@ -202,7 +250,7 @@ local regular_bf= {
}
local regular_bi = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D468),
lcletters = tolower(0x1D482),
ucgreek = {
@@ -238,18 +286,18 @@ local sansserif_tf = {
digits = todigit(0x1D7E2),
ucletters = toupper(0x1D5A0),
lcletters = tolower(0x1D5BA),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local sansserif_it = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = toupper(0x1D608),
lcletters = tolower(0x1D622),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local sansserif_bf = {
@@ -279,7 +327,7 @@ local sansserif_bf = {
}
local sansserif_bi = {
- digits = sansserif_bf.digits,
+ digits = tovector(sansserif_bf.digits),
ucletters = toupper(0x1D63C),
lcletters = tolower(0x1D656),
ucgreek = {
@@ -315,16 +363,20 @@ local monospaced_tf = {
digits = todigit(0x1D7F6),
ucletters = toupper(0x1D670),
lcletters = tolower(0x1D68A),
- lcgreek = sansserif_tf.lcgreek,
- ucgreek = sansserif_tf.ucgreek,
- symbols = sansserif_tf.symbols,
+ lcgreek = tovector(sansserif_tf.lcgreek),
+ ucgreek = tovector(sansserif_tf.ucgreek),
+ symbols = tovector(sansserif_tf.symbols),
}
+local monospaced_it = tovector(sansserif_it)
+local monospaced_bf = tovector(sansserif_bf)
+local monospaced_bi = tovector(sansserif_bi)
+
local monospaced = {
tf = monospaced_tf,
- it = sansserif_tf,
- bf = sansserif_tf,
- bi = sansserif_bf,
+ it = monospaced_tf,
+ bf = monospaced_tf,
+ bi = monospaced_bf,
}
local blackboard_tf = {
@@ -362,7 +414,7 @@ local blackboard = {
}
local fraktur_tf= {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = { -- C H I R Z
[0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508,
[0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D,
@@ -372,18 +424,18 @@ local fraktur_tf= {
[0x0005A]=0x02128,
},
lcletters = tolower(0x1D51E),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local fraktur_bf = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D56C),
lcletters = tolower(0x1D586),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
+ lcgreek = tovector(regular_bf.lcgreek),
+ ucgreek = tovector(regular_bf.ucgreek),
+ symbols = tovector(regular_bf.symbols),
}
local fraktur = { -- ok
@@ -394,7 +446,7 @@ local fraktur = { -- ok
}
local script_tf = {
- digits = regular_tf.digits,
+ digits = tovector(regular_tf.digits),
ucletters = { -- B E F H I L M R -- P 2118
[0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130,
[0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5,
@@ -411,18 +463,18 @@ local script_tf = {
[0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE,
[0x0007A]=0x1D4CF,
},
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
+ lcgreek = tovector(regular_tf.lcgreek),
+ ucgreek = tovector(regular_tf.ucgreek),
+ symbols = tovector(regular_tf.symbols),
}
local script_bf = {
- digits = regular_bf.digits,
+ digits = tovector(regular_bf.digits),
ucletters = toupper(0x1D4D0),
lcletters = tolower(0x1D4EA),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
+ lcgreek = tovector(regular_bf.lcgreek),
+ ucgreek = tovector(regular_bf.ucgreek),
+ symbols = tovector(regular_bf.symbols),
}
local script = {
@@ -441,80 +493,83 @@ local alphabets = allocate {
script = script,
}
-mathematics.alphabets = alphabets
+alphabets.tt = tovector(monospaced)
+alphabets.ss = tovector(sansserif)
+alphabets.rm = tovector(regular)
+alphabets.bb = tovector(blackboard)
+alphabets.fr = tovector(fraktur)
+alphabets.sr = tovector(script)
-local boldmap = { }
-
-local function remap(tf,bf)
- for _, alphabet in next, alphabets do
- local tfdata = alphabet[tf]
- local bfdata = alphabet[bf]
- if tfdata then
- for k, tfd in next, tfdata do
- if type(tfd) == "table" then
- local bfd = bfdata[k]
- if bfd then
- for n, u in next, tfd do
- local bn = bfd[n]
- if bn then
- boldmap[u] = bn
- end
- end
- end
- end
- end
- end
- end
-end
+monospaced.normal = tovector(monospaced_tf)
+monospaced.italic = tovector(monospaced_it)
+monospaced.bold = tovector(monospaced_bf)
+monospaced.bolditalic = tovector(monospaced_bi)
+
+sansserif.normal = tovector(sansserif_tf)
+sansserif.italic = tovector(sansserif_it)
+sansserif.bold = tovector(sansserif_bf)
+sansserif.bolditalic = tovector(sansserif_bi)
+
+regular.normal = tovector(regular_tf)
+regular.italic = tovector(regular_it)
+regular.bold = tovector(regular_bf)
+regular.bolditalic = tovector(regular_bi)
-remap("tf","bf")
-remap("it","bi")
+alphabets.serif = tovector(regular)
+alphabets.type = tovector(monospaced)
+alphabets.teletype = tovector(monospaced)
+
+mathematics.alphabets = alphabets
-mathematics.boldmap = boldmap
+local mathremap = allocate { }
+mathematics.mapremap = mathremap
-local mathremap = allocate { }
+local boldmap = allocate { }
+mathematics.boldmap = boldmap
+
+-- all math (a bit of redundancy here)
for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
for style, data in next, styles do
-- let's keep the long names (for tracing)
local n = #mathremap + 1
- data.attribute = n
- data.alphabet = alphabet
- data.style = style
- mathremap[n] = data
+ local d = {
+ attribute = n,
+ alphabet = alphabet,
+ style = style,
+ }
+ styles[style] = d
+ setmetatableindex(d,data) -- we could use a alphadata table
+ mathremap[n] = d
end
end
-mathematics.mapremap = mathremap
-
--- beware, these are shared tables (no problem since they're not
--- in unicode)
-
-alphabets.tt = monospaced
-alphabets.ss = sansserif
-alphabets.rm = regular
-alphabets.bb = blackboard
-alphabets.fr = fraktur
-alphabets.sr = script
-
-alphabets.serif = regular
-alphabets.type = monospaced
-alphabets.teletype = monospaced
-
-regular.normal = regular_tf
-regular.italic = regular_it
-regular.bold = regular_bf
-regular.bolditalic = regular_bi
+-- bold math
+
+local function remapbold(tf,bf)
+ local styles = mathematics.styles
+ local sets = mathematics.sets
+ for i=1,#styles do
+ for j=1,#sets do
+ local one = styles[i]
+ local two = sets[j]
+ local a = alphabets[one]
+ local tf = a[tf][two]
+ local bf = a[bf][two]
+ if tf and bf then
+ for k, v in next, tf do
+ boldmap[v] = bf[k]
+ end
+ end
+ end
+ end
+end
-sansserif.normal = sansserif_tf
-sansserif.italic = sansserif_it
-sansserif.bold = sansserif_bf
-sansserif.bolditalic = sansserif_bi
+remapbold("tf","bf")
+remapbold("it","bi")
-monospaced.normal = monospaced_tf
-monospaced.italic = monospaced_it
-monospaced.bold = monospaced_bf
-monospaced.bolditalic = monospaced_bi
+-- table.save("e:/tmp/a.lua",alphabets)
+-- table.save("e:/tmp/b.lua",boldmap)
function mathematics.tostyle(attribute)
local r = mathremap[attribute]
@@ -564,6 +619,39 @@ function mathematics.syncname(alphabet)
texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
end
+implement {
+ name = "setmathattribute",
+ arguments = { "string", "string" },
+ actions = function(alphabet,style)
+ local data = alphabets[alphabet] or regular
+ data = data[style] or data.tf
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
+implement {
+ name = "setmathstyle",
+ arguments = "string",
+ actions = function(style)
+ local r = mathremap[texgetattribute(mathalphabet)]
+ local alphabet = r and r.alphabet or "regular"
+ local data = alphabets[alphabet][style]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
+implement {
+ name = "setmathalphabet",
+ arguments = "string",
+ actions = function(alphabet)
+ -- local r = mathremap[mathalphabet]
+ local r = mathremap[texgetattribute(mathalphabet)]
+ local style = r and r.style or "tf"
+ local data = alphabets[alphabet][style]
+ texsetattribute(mathalphabet,data and data.attribute or texattribute[mathalphabet])
+ end
+}
+
local islcgreek = regular_tf.lcgreek
local isucgreek = regular_tf.ucgreek
local issygreek = regular_tf.symbols
@@ -689,9 +777,3 @@ function mathematics.addfallbacks(main)
checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek)
checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek)
end
-
--- interface
-
-commands.setmathattribute = mathematics.syncboth
-commands.setmathalphabet = mathematics.syncname
-commands.setmathstyle = mathematics.syncstyle
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index f3987c12f..cdbbe36c3 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -28,44 +28,67 @@ local otf = fonts.handlers.otf
local otffeatures = fonts.constructors.newfeatures("otf")
local registerotffeature = otffeatures.register
-local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end)
-local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end)
-local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
-local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end)
-local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end)
-local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
-local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
-local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end)
-local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end)
-local trace_families = false trackers.register("math.families", function(v) trace_families = v end)
-
-local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end)
-
-local report_processing = logs.reporter("mathematics","processing")
-local report_remapping = logs.reporter("mathematics","remapping")
-local report_normalizing = logs.reporter("mathematics","normalizing")
-local report_collapsing = logs.reporter("mathematics","collapsing")
-local report_goodies = logs.reporter("mathematics","goodies")
-local report_variants = logs.reporter("mathematics","variants")
-local report_alternates = logs.reporter("mathematics","alternates")
-local report_italics = logs.reporter("mathematics","italics")
-local report_families = logs.reporter("mathematics","families")
-
-local a_mathrendering = attributes.private("mathrendering")
-local a_exportstatus = attributes.private("exportstatus")
-
-local mlist_to_hlist = node.mlist_to_hlist
-local font_of_family = node.family_font
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local free_node = node.free
-local new_node = node.new -- todo: pool: math_noad math_sub
-local copy_node = node.copy
+local privateattribute = attributes.private
+local registertracker = trackers.register
+local registerdirective = directives.register
+local logreporter = logs.reporter
+
+local trace_remapping = false registertracker("math.remapping", function(v) trace_remapping = v end)
+local trace_processing = false registertracker("math.processing", function(v) trace_processing = v end)
+local trace_analyzing = false registertracker("math.analyzing", function(v) trace_analyzing = v end)
+local trace_normalizing = false registertracker("math.normalizing", function(v) trace_normalizing = v end)
+local trace_collapsing = false registertracker("math.collapsing", function(v) trace_collapsing = v end)
+local trace_patching = false registertracker("math.patching", function(v) trace_patching = v end)
+local trace_goodies = false registertracker("math.goodies", function(v) trace_goodies = v end)
+local trace_variants = false registertracker("math.variants", function(v) trace_variants = v end)
+local trace_alternates = false registertracker("math.alternates", function(v) trace_alternates = v end)
+local trace_italics = false registertracker("math.italics", function(v) trace_italics = v end)
+local trace_families = false registertracker("math.families", function(v) trace_families = v end)
+
+local check_coverage = true registerdirective("math.checkcoverage", function(v) check_coverage = v end)
+
+local report_processing = logreporter("mathematics","processing")
+local report_remapping = logreporter("mathematics","remapping")
+local report_normalizing = logreporter("mathematics","normalizing")
+local report_collapsing = logreporter("mathematics","collapsing")
+local report_patching = logreporter("mathematics","patching")
+local report_goodies = logreporter("mathematics","goodies")
+local report_variants = logreporter("mathematics","variants")
+local report_alternates = logreporter("mathematics","alternates")
+local report_italics = logreporter("mathematics","italics")
+local report_families = logreporter("mathematics","families")
+
+local a_mathrendering = privateattribute("mathrendering")
+local a_exportstatus = privateattribute("exportstatus")
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+local tonut = nuts.tonut
+local nutstring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local free_node = nuts.free
+local new_node = nuts.new -- todo: pool: math_noad math_sub
+local copy_node = nuts.copy
+
+local mlist_to_hlist = nodes.mlist_to_hlist
-local new_kern = nodes.pool.kern
-local new_rule = nodes.pool.rule
+local font_of_family = node.family_font
-local topoints = number.points
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -126,23 +149,23 @@ local function process(start,what,n,parent)
if n then n = n + 1 else n = 0 end
local prev = nil
while start do
- local id = start.id
+ local id = getid(start)
if trace_processing then
if id == math_noad then
- report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
+ report_processing("%w%S, class %a",n*2,nutstring(start),noadcodes[getsubtype(start)])
elseif id == math_char then
- local char = start.char
- local fam = start.fam
+ local char = getchar(start)
+ local fam = getfield(start,"fam")
local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
else
- report_processing("%w%S",n*2,start)
+ report_processing("%w%S",n*2,nutstring(start))
end
end
local proc = what[id]
if proc then
-- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or getprev(start)
if newstart then
start = newstart
-- report_processing("stop processing (new start)")
@@ -154,60 +177,102 @@ local function process(start,what,n,parent)
elseif id == math_noad then
if prev then
-- we have no proper prev in math nodes yet
- start.prev = prev
+ setfield(start,"prev",prev)
end
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
+
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
elseif id == math_box or id == math_sub then
- -- local noad = start.list if noad then process(noad,what,n,start) end -- list
- local noad = start.head if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"list") if noad then process(noad,what,n,start) end -- list (not getlist !)
elseif id == math_fraction then
- local noad = start.num if noad then process(noad,what,n,start) end -- list
- noad = start.denom if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.right if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"denom") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"right") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_choice then
- local noad = start.display if noad then process(noad,what,n,start) end -- list
- noad = start.text if noad then process(noad,what,n,start) end -- list
- noad = start.script if noad then process(noad,what,n,start) end -- list
- noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"display") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"text") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"script") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"scriptscript") if noad then process(noad,what,n,start) end -- list
elseif id == math_fence then
- local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
+ local noad = getfield(start,"delim") if noad then process(noad,what,n,start) end -- delimiter
elseif id == math_radical then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.degree if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"left") if noad then process(noad,what,n,start) end -- delimiter
+ noad = getfield(start,"degree") if noad then process(noad,what,n,start) end -- list
elseif id == math_accent then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.accent if noad then process(noad,what,n,start) end -- list
- noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
+ local noad = getfield(start,"nucleus") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sup") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"sub") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"accent") if noad then process(noad,what,n,start) end -- list
+ noad = getfield(start,"bot_accent") if noad then process(noad,what,n,start) end -- list
elseif id == math_style then
-- has a next
else
-- glue, penalty, etc
end
prev = start
- start = start.next
+ start = getnext(start)
end
end
local function processnoads(head,actions,banner)
if trace_processing then
report_processing("start %a",banner)
- process(head,actions)
+ process(tonut(head),actions)
report_processing("stop %a",banner)
else
- process(head,actions)
+ process(tonut(head),actions)
end
end
noads.process = processnoads
+--
+
+local unknowns = { }
+local checked = { } -- simple case
+local tracked = false trackers.register("fonts.missing", function(v) tracked = v end)
+local cached = table.setmetatableindex("table") -- complex case
+
+local function errorchar(font,char)
+ local done = unknowns[char]
+ if done then
+ unknowns[char] = done + 1
+ else
+ unknowns[char] = 1
+ end
+ if tracked then
+ -- slower as we check each font too and we always replace as math has
+ -- more demands than text
+ local fake = cached[font][char]
+ if fake then
+ return fake
+ else
+ local kind, fake = fonts.checkers.placeholder(font,char)
+ if not fake or kind ~= "char" then
+ fake = 0x3F
+ end
+ cached[font][char] = fake
+ return fake
+ end
+ else
+ -- only simple checking, report at the end so one should take
+ -- action anyway ... we can miss a few checks but that is ok
+ -- as there is at least one reported
+ if not checked[char] then
+ if trace_normalizing then
+ report_normalizing("character %C is not available",char)
+ end
+ checked[char] = true
+ end
+ return 0x3F
+ end
+end
+
-- experiment (when not present fall back to fam 0) -- needs documentation
-- 0-2 regular
@@ -218,7 +283,7 @@ noads.process = processnoads
-- might as well do this
local families = { }
-local a_mathfamily = attributes.private("mathfamily")
+local a_mathfamily = privateattribute("mathfamily")
local boldmap = mathematics.boldmap
local familymap = { [0] =
@@ -234,36 +299,36 @@ local familymap = { [0] =
}
families[math_char] = function(pointer)
- if pointer.fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
- local char = pointer.char
+ local char = getchar(pointer)
local bold = boldmap[char]
local newa = a - 3
if not bold then
if trace_families then
report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
- pointer.fam = newa
- elseif not fontcharacters[font_of_family(newa)][bold] then
+ setfield(pointer,"fam",newa)
+ elseif not fontcharacters[font_of_family(newa)][bold] then
if trace_families then
report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
if newa > 3 then
- pointer.fam = newa - 3
+ setfield(pointer,"fam",newa-3)
end
else
- pointer[a_exportstatus] = char
- pointer.char = bold
+ setattr(pointer,a_exportstatus,char)
+ setfield(pointer,"char",bold)
if trace_families then
report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
- pointer.fam = newa
+ setfield(pointer,"fam",newa)
end
else
- local char = pointer.char
+ local char = getchar(pointer)
if not fontcharacters[font_of_family(a)][char] then
if trace_families then
report_families("no bold replacement for %C",char)
@@ -272,7 +337,7 @@ families[math_char] = function(pointer)
if trace_families then
report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
end
- pointer.fam = a
+ setfield(pointer,"fam",a)
end
end
end
@@ -280,31 +345,31 @@ families[math_char] = function(pointer)
end
families[math_delim] = function(pointer)
- if pointer.small_fam == 0 then
- local a = pointer[a_mathfamily]
+ if getfield(pointer,"small_fam") == 0 then
+ local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
- pointer[a_mathfamily] = 0
+ setattr(pointer,a_mathfamily,0)
if a > 5 then
-- no bold delimiters in unicode
a = a - 3
end
- local char = pointer.small_char
+ local char = getfield(pointer,"small_char")
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- pointer.small_fam = a
+ setfield(pointer,"small_fam",a)
elseif a > 2 then
- pointer.small_fam = a - 3
+ setfield(pointer,"small_fam",a-3)
end
- local char = pointer.large_char
+ local char = getfield(pointer,"large_char")
local okay = fontcharacters[font_of_family(a)][char]
if okay then
- pointer.large_fam = a
+ setfield(pointer,"large_fam",a)
elseif a > 2 then
- pointer.large_fam = a - 3
+ setfield(pointer,"large_fam",a-3)
end
else
- pointer.small_fam = 0
- pointer.large_fam = 0
+ setfield(pointer,"small_fam",0)
+ setfield(pointer,"large_fam",0)
end
end
end
@@ -318,8 +383,8 @@ end
-- character remapping
-local a_mathalphabet = attributes.private("mathalphabet")
-local a_mathgreek = attributes.private("mathgreek")
+local a_mathalphabet = privateattribute("mathalphabet")
+local a_mathgreek = privateattribute("mathgreek")
processors.relocate = { }
@@ -332,8 +397,8 @@ local fallbackstyleattr = mathematics.fallbackstyleattr
local setnodecolor = nodes.tracers.colors.set
local function checked(pointer)
- local char = pointer.char
- local fam = pointer.fam
+ local char = getchar(pointer)
+ local fam = getfield(pointer,"fam")
local id = font_of_family(fam)
local tc = fontcharacters[id]
if not tc[char] then
@@ -346,37 +411,37 @@ local function checked(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer[a_exportstatus] = char -- testcase: exponentiale
- pointer.char = newchar
+ setattr(pointer,a_exportstatus,char) -- testcase: exponentiale
+ setfield(pointer,"char",newchar)
return true
end
end
end
processors.relocate[math_char] = function(pointer)
- local g = pointer[a_mathgreek] or 0
- local a = pointer[a_mathalphabet] or 0
+ local g = getattr(pointer,a_mathgreek) or 0
+ local a = getattr(pointer,a_mathalphabet) or 0
+ local char = getchar(pointer)
+ local fam = getfield(pointer,"fam")
+ local font = font_of_family(fam)
+ local characters = fontcharacters[font]
if a > 0 or g > 0 then
if a > 0 then
- pointer[a_mathgreek] = 0
+ setattr(pointer,a_mathgreek,0)
end
if g > 0 then
- pointer[a_mathalphabet] = 0
+ setattr(pointer,a_mathalphabet,0)
end
- local char = pointer.char
local newchar = remapalphabets(char,a,g)
if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
if characters[newchar] then
if trace_remapping then
- report_remap("char",id,char,newchar)
+ report_remap("char",font,char,newchar)
end
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
else
local fallback = fallbackstyleattr(a)
@@ -385,25 +450,28 @@ processors.relocate[math_char] = function(pointer)
if newchar then
if characters[newchar] then
if trace_remapping then
- report_remap("char",id,char,newchar," (fallback remapping used)")
+ report_remap("char",font,char,newchar," (fallback remapping used)")
end
if trace_analyzing then
setnodecolor(pointer,"font:isol")
end
- pointer.char = newchar
+ setfield(pointer,"char",newchar)
return true
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback character)")
+ report_remap("char",font,char,newchar," fails (no fallback character)")
end
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback remap character)")
+ report_remap("char",font,char,newchar," fails (no fallback remap character)")
end
elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback style)")
+ report_remap("char",font,char,newchar," fails (no fallback style)")
end
end
end
end
+ if not characters[char] then
+ setfield(pointer,"char",errorchar(font,char))
+ end
if trace_analyzing then
setnodecolor(pointer,"font:medi")
end
@@ -436,19 +504,19 @@ processors.render = { }
local rendersets = mathematics.renderings.numbers or { } -- store
processors.render[math_char] = function(pointer)
- local attr = pointer[a_mathrendering]
+ local attr = getattr(pointer,a_mathrendering)
if attr and attr > 0 then
- local char = pointer.char
+ local char = getchar(pointer)
local renderset = rendersets[attr]
if renderset then
local newchar = renderset[char]
if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
+ local fam = getfield(pointer,"fam")
+ local font = font_of_family(fam)
+ local characters = fontcharacters[font]
if characters and characters[newchar] then
- pointer.char = newchar
- pointer[a_exportstatus] = char
+ setfield(pointer,"char",newchar)
+ setattr(pointer,a_exportstatus,char)
end
end
end
@@ -470,24 +538,24 @@ end
-- todo: just replace the character by an ord noad
-- and remove the right delimiter as well
-local mathsize = attributes.private("mathsize")
+local mathsize = privateattribute("mathsize")
local resize = { } processors.resize = resize
resize[math_fence] = function(pointer)
- local subtype = pointer.subtype
+ local subtype = getsubtype(pointer)
if subtype == left_fence_code or subtype == right_fence_code then
- local a = pointer[mathsize]
+ local a = getattr(pointer,mathsize)
if a and a > 0 then
local method, size = div(a,100), a % 100
- pointer[mathsize] = 0
- local delimiter = pointer.delim
- local chr = delimiter.small_char
+ setattr(pointer,mathsize,0)
+ local delimiter = getfield(pointer,"delim")
+ local chr = getfield(delimiter,"small_char")
if chr > 0 then
- local fam = delimiter.small_fam
+ local fam = getfield(delimiter,"small_fam")
local id = font_of_family(fam)
if id > 0 then
- delimiter.small_char = mathematics.big(fontdata[id],chr,size,method)
+ setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
end
end
end
@@ -499,148 +567,23 @@ function handlers.resize(head,style,penalties)
return true
end
-
-local collapse = { } processors.collapse = collapse
-
-local mathpairs = characters.mathpairs
-
-mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
-mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
-mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
-
-mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
-mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
-
-mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
-mathpairs[0x222C] = { [0x222B] = 0x222D }
-
-mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
-mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
-
-local movesub = {
- -- primes
- [0x2032] = 0xFE932,
- [0x2033] = 0xFE933,
- [0x2034] = 0xFE934,
- [0x2057] = 0xFE957,
- -- reverse primes
- [0x2035] = 0xFE935,
- [0x2036] = 0xFE936,
- [0x2037] = 0xFE937,
-}
-
-local validpair = {
- [noad_rel] = true,
- [noad_ord] = true,
- [noad_opdisplaylimits] = true,
- [noad_oplimits] = true,
- [noad_opnolimits] = true,
-}
-
-local function movesubscript(parent,current_nucleus,current_char)
- local prev = parent.prev
- if prev and prev.id == math_noad then
- if not prev.sup and not prev.sub then
- current_nucleus.char = movesub[current_char or current_nucleus.char]
- -- {f} {'}_n => f_n^'
- local nucleus = parent.nucleus
- local sub = parent.sub
- local sup = parent.sup
- prev.sup = nucleus
- prev.sub = sub
- local dummy = copy_node(nucleus)
- dummy.char = 0
- parent.nucleus = dummy
- parent.sub = nil
- if trace_collapsing then
- report_collapsing("fixing subscript")
- end
- end
- end
-end
-
-local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
- if parent then
- if validpair[parent.subtype] then
- local current_nucleus = parent.nucleus
- if current_nucleus.id == math_char then
- local current_char = current_nucleus.char
- if not parent.sub and not parent.sup then
- local mathpair = mathpairs[current_char]
- if mathpair then
- local next_noad = parent.next
- if next_noad and next_noad.id == math_noad then
- if validpair[next_noad.subtype] then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local newchar = mathpair[next_char]
- if newchar then
- local fam = current_nucleus.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- if trace_collapsing then
- report_collapsing("%U + %U => %U",current_char,next_char,newchar)
- end
- current_nucleus.char = newchar
- local next_next_noad = next_noad.next
- if next_next_noad then
- parent.next = next_next_noad
- next_next_noad.prev = parent
- else
- parent.next = nil
- end
- parent.sup = next_noad.sup
- parent.sub = next_noad.sub
- next_noad.sup = nil
- next_noad.sub = nil
- free_node(next_noad)
- collapsepair(pointer,what,n,parent,true)
- if not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus)
- end
- end
- end
- end
- end
- end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
- end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
- end
- end
- end
- end
-end
-
-collapse[math_char] = collapsepair
-
-function noads.handlers.collapse(head,style,penalties)
- processnoads(head,collapse,"collapse")
- return true
-end
-
-- normalize scripts
-local unscript = { } noads.processors.unscript = unscript
-
+local unscript = { } noads.processors.unscript = unscript
local superscripts = characters.superscripts
local subscripts = characters.subscripts
-
-local replaced = { }
+local fractions = characters.fractions
+local replaced = { }
local function replace(pointer,what,n,parent)
pointer = parent -- we're following the parent list (chars trigger this)
- local next = pointer.next
+ local next = getnext(pointer)
local start_super, stop_super, start_sub, stop_sub
local mode = "unset"
- while next and next.id == math_noad do
- local nextnucleus = next.nucleus
- if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
- local char = nextnucleus.char
+ while next and getid(next) == math_noad do
+ local nextnucleus = getfield(next,"nucleus")
+ if nextnucleus and getid(nextnucleus) == math_char and not getfield(next,"sub") and not getfield(next,"sup") then
+ local char = getchar(nextnucleus)
local s = superscripts[char]
if s then
if not start_super then
@@ -650,8 +593,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_super = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("superscript %C becomes %C",char,s)
@@ -666,8 +609,8 @@ local function replace(pointer,what,n,parent)
break
end
stop_sub = next
- next = next.next
- nextnucleus.char = s
+ next = getnext(next)
+ setfield(nextnucleus,"char",s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
report_normalizing("subscript %C becomes %C",char,s)
@@ -682,29 +625,29 @@ local function replace(pointer,what,n,parent)
end
if start_super then
if start_super == stop_super then
- pointer.sup = start_super.nucleus
+ setfield(pointer,"sup",getfield(start_super,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_super
- pointer.sup = list
+ setfield(list,"list",start_super)
+ setfield(pointer,"sup",list)
end
if mode == "super" then
- pointer.next = stop_super.next
+ setfield(pointer,"next",getnext(stop_super))
end
- stop_super.next = nil
+ setfield(stop_super,"next",nil)
end
if start_sub then
if start_sub == stop_sub then
- pointer.sub = start_sub.nucleus
+ setfield(pointer,"sub",getfield(start_sub,"nucleus"))
else
local list = new_node(math_sub) -- todo attr
- list.head = start_sub
- pointer.sub = list
+ setfield(list,"list",start_sub)
+ setfield(pointer,"sub",list)
end
if mode == "sub" then
- pointer.next = stop_sub.next
+ setfield(pointer,"next",getnext(stop_sub))
end
- stop_sub.next = nil
+ setfield(stop_sub,"next",nil)
end
-- we could return stop
end
@@ -713,18 +656,27 @@ unscript[math_char] = replace -- not noads as we need to recurse
function handlers.unscript(head,style,penalties)
processnoads(head,unscript,"unscript")
+-- processnoads(head,checkers,"checkers")
return true
end
-statistics.register("math script replacements", function()
- if next(replaced) then
+local function collected(list)
+ if list and next(list) then
local n, t = 0, { }
- for k, v in table.sortedpairs(replaced) do
+ for k, v in table.sortedpairs(list) do
n = n + v
t[#t+1] = formatters["%C"](k)
end
return formatters["% t (n=%s)"](t,n)
end
+end
+
+statistics.register("math script replacements", function()
+ return collected(replaced)
+end)
+
+statistics.register("unknown math characters", function()
+ return collected(unknowns)
end)
-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
@@ -770,7 +722,7 @@ registerotffeature {
local getalternate = otf.getalternate
-local a_mathalternate = attributes.private("mathalternate")
+local a_mathalternate = privateattribute("mathalternate")
local alternate = { } -- processors.alternate = alternate
@@ -785,20 +737,20 @@ function mathematics.setalternate(fam,tag)
end
alternate[math_char] = function(pointer)
- local a = pointer[a_mathalternate]
+ local a = getattr(pointer,a_mathalternate)
if a and a > 0 then
- pointer[a_mathalternate] = 0
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ setattr(pointer,a_mathalternate,0)
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
if mathalternatesattributes then
local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
+ local alt = getalternate(tfmdata,getchar(pointer),what.feature,what.value)
if alt then
if trace_alternates then
report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),pointer.char,alt)
+ tostring(what.feature),tostring(what.value),getchar(pointer),alt)
end
- pointer.char = alt
+ setfield(pointer,"char",alt)
end
end
end
@@ -814,7 +766,12 @@ end
-- = we check for correction first because accessing nodes is slower
-- = the actual glyph is not that important (we can control it with numbers)
-local a_mathitalics = attributes.private("mathitalics")
+-- Italic correction in luatex math is a mess. There are all kind of assumptions based on
+-- old fonts and new font. Eventually there should be a flag that can signal to ignore all
+-- those heuristics. We want to deal with it ourselves also in the perspective of mxed math
+-- and text.
+
+local a_mathitalics = privateattribute("mathitalics")
local italics = { }
local default_factor = 1/20
@@ -882,21 +839,22 @@ local function getcorrection(method,font,char) -- -- or character.italic -- (thi
end
+local setcolor = nodes.tracers.colors.set
+local resetcolor = nodes.tracers.colors.reset
+local italic_kern = new_kern
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+
local function insert_kern(current,kern)
local sub = new_node(math_sub) -- todo: pool
local noad = new_node(math_noad) -- todo: pool
- sub.head = kern
- kern.next = noad
- noad.nucleus = current
+ setfield(sub,"list",kern)
+ setfield(kern,"next",noad)
+ setfield(noad,"nucleus",current)
return sub
end
-local setcolor = nodes.tracers.colors.set
-local italic_kern = new_kern
-local c_positive_d = "trace:db"
-local c_negative_d = "trace:dr"
-
-trackers.register("math.italics", function(v)
+registertracker("math.italics.visualize", function(v)
if v then
italic_kern = function(k,font)
local ex = 1.5 * fontexheights[font]
@@ -913,44 +871,46 @@ trackers.register("math.italics", function(v)
end)
italics[math_char] = function(pointer,what,n,parent)
- local method = pointer[a_mathitalics]
+ local method = getattr(pointer,a_mathitalics)
if method and method > 0 then
- local char = pointer.char
- local font = font_of_family(pointer.fam) -- todo: table
+ local char = getchar(pointer)
+ local font = font_of_family(getfield(pointer,"fam")) -- todo: table
local correction, visual = getcorrection(method,font,char)
if correction then
- local pid = parent.id
+ local pid = getid(parent)
local sub, sup
if pid == math_noad then
- sup = parent.sup
- sub = parent.sub
+ sup = getfield(parent,"sup")
+ sub = getfield(parent,"sub")
end
if sup or sub then
- local subtype = parent.subtype
+ local subtype = getsubtype(parent)
if subtype == noad_oplimits then
if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
end
end
if sub then
local correction = - correction
- parent.sub = insert_kern(sub,italic_kern(correction,font))
+ setfield(parent,"sub",insert_kern(sub,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
end
end
- else
- if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
+ elseif sup then
+ if pointer ~= sub then
+ setfield(parent,"sup",insert_kern(sup,italic_kern(correction,font)))
if trace_italics then
report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
end
+ else
+ -- otherwise we inject twice
end
end
else
- local next_noad = parent.next
+ local next_noad = getnext(parent)
if not next_noad then
if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
if trace_italics then
@@ -958,12 +918,12 @@ italics[math_char] = function(pointer,what,n,parent)
end
insert_node_after(parent,parent,italic_kern(correction,font))
end
- elseif next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
+ elseif getid(next_noad) == math_noad then
+ local next_subtype = getsubtype(next_noad)
if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
+ local next_nucleus = getfield(next_noad,"nucleus")
+ if getid(next_nucleus) == math_char then
+ local next_char = getchar(next_nucleus)
local next_data = chardata[next_char]
local visual = next_data.visual
if visual == "it" or visual == "bi" then
@@ -1026,6 +986,147 @@ function mathematics.resetitalics()
texsetattribute(a_mathitalics,unsetvalue)
end
+-- primes and such
+
+local collapse = { } processors.collapse = collapse
+
+local mathpairs = characters.mathpairs
+
+mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
+mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
+mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
+
+mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
+mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
+
+mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
+mathpairs[0x222C] = { [0x222B] = 0x222D }
+
+mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
+mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
+
+local movesub = {
+ -- primes
+ [0x2032] = 0xFE932,
+ [0x2033] = 0xFE933,
+ [0x2034] = 0xFE934,
+ [0x2057] = 0xFE957,
+ -- reverse primes
+ [0x2035] = 0xFE935,
+ [0x2036] = 0xFE936,
+ [0x2037] = 0xFE937,
+}
+
+local validpair = {
+ [noad_rel] = true,
+ [noad_ord] = true,
+ [noad_opdisplaylimits] = true,
+ [noad_oplimits] = true,
+ [noad_opnolimits] = true,
+}
+
+local function movesubscript(parent,current_nucleus,current_char)
+ local prev = getfield(parent,"prev")
+ if prev and getid(prev) == math_noad then
+ if not getfield(prev,"sup") and not getfield(prev,"sub") then
+ -- {f} {'}_n => f_n^'
+ setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local nucleus = getfield(parent,"nucleus")
+ local sub = getfield(parent,"sub")
+ local sup = getfield(parent,"sup")
+ setfield(prev,"sup",nucleus)
+ setfield(prev,"sub",sub)
+ local dummy = copy_node(nucleus)
+ setfield(dummy,"char",0)
+ setfield(parent,"nucleus",dummy)
+ setfield(parent,"sub",nil)
+ if trace_collapsing then
+ report_collapsing("fixing subscript")
+ end
+ elseif not getfield(prev,"sup") then
+ -- {f} {'}_n => f_n^'
+ setfield(current_nucleus,"char",movesub[current_char or getchar(current_nucleus)])
+ local nucleus = getfield(parent,"nucleus")
+ local sup = getfield(parent,"sup")
+ setfield(prev,"sup",nucleus)
+ local dummy = copy_node(nucleus)
+ setfield(dummy,"char",0)
+ setfield(parent,"nucleus",dummy)
+ if trace_collapsing then
+ report_collapsing("fixing subscript")
+ end
+ end
+ end
+end
+
+local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
+ if parent then
+ if validpair[getsubtype(parent)] then
+ local current_nucleus = getfield(parent,"nucleus")
+ if getid(current_nucleus) == math_char then
+ local current_char = getchar(current_nucleus)
+ if not getfield(parent,"sub") and not getfield(parent,"sup") then
+ local mathpair = mathpairs[current_char]
+ if mathpair then
+ local next_noad = getnext(parent)
+ if next_noad and getid(next_noad) == math_noad then
+ if validpair[getsubtype(next_noad)] then
+ local next_nucleus = getfield(next_noad,"nucleus")
+ local next_char = getchar(next_nucleus)
+ if getid(next_nucleus) == math_char then
+ local newchar = mathpair[next_char]
+ if newchar then
+ local fam = getfield(current_nucleus,"fam")
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters and characters[newchar] then
+ if trace_collapsing then
+ report_collapsing("%U + %U => %U",current_char,next_char,newchar)
+ end
+ setfield(current_nucleus,"char",newchar)
+ local next_next_noad = getnext(next_noad)
+ if next_next_noad then
+ setfield(parent,"next",next_next_noad)
+ setfield(next_next_noad,"prev",parent)
+ else
+ setfield(parent,"next",nil)
+ end
+ setfield(parent,"sup",getfield(next_noad,"sup"))
+ setfield(parent,"sub",getfield(next_noad,"sub"))
+ setfield(next_noad,"sup",nil)
+ setfield(next_noad,"sub",nil)
+ free_node(next_noad)
+ collapsepair(pointer,what,n,parent,true)
+ -- if not nested and movesub[current_char] then
+ -- movesubscript(parent,current_nucleus,current_char)
+ -- end
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ end
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ elseif not nested and movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+ end
+ end
+ end
+ end
+end
+
+collapse[math_char] = collapsepair
+
+function noads.handlers.collapse(head,style,penalties)
+ processnoads(head,collapse,"collapse")
+ return true
+end
+
-- variants
local variants = { }
@@ -1047,15 +1148,15 @@ local validvariants = { -- fast check on valid
}
variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = pointer.char
+ local char = getchar(pointer)
local selector = validvariants[char]
if selector then
- local next = parent.next
- if next and next.id == math_noad then
- local nucleus = next.nucleus
- if nucleus and nucleus.id == math_char and nucleus.char == selector then
+ local next = getnext(parent)
+ if next and getid(next) == math_noad then
+ local nucleus = getfield(next,"nucleus")
+ if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
local variant
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local tfmdata = fontdata[font_of_family(getfield(pointer,"fam"))] -- we can also have a famdata
local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
mathvariants = mathvariants[selector]
@@ -1064,8 +1165,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
end
end
if variant then
- pointer.char = variant
- pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
+ setfield(pointer,"char",variant)
+ setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
if trace_variants then
report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
end
@@ -1074,8 +1175,8 @@ variants[math_char] = function(pointer,what,n,parent) -- also set export value
report_variants("no variant (%U,%U)",char,selector)
end
end
- next.prev = pointer
- parent.next = next.next
+ setfield(next,"prev",pointer)
+ setfield(parent,"next",getnext(next))
free_node(next)
end
end
@@ -1108,7 +1209,7 @@ local colors = {
}
classes[math_char] = function(pointer,what,n,parent)
- local color = colors[parent.subtype]
+ local color = colors[getsubtype(parent)]
if color then
setcolor(pointer,color)
else
@@ -1121,7 +1222,7 @@ function handlers.classes(head,style,penalties)
return true
end
-trackers.register("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end)
+registertracker("math.classes",function(v) tasks.setaction("math","noads.handlers.classes",v) end)
-- just for me
@@ -1129,7 +1230,7 @@ function handlers.showtree(head,style,penalties)
inspect(nodes.totree(head))
end
-trackers.register("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end)
+registertracker("math.showtree",function(v) tasks.setaction("math","noads.handlers.showtree",v) end)
-- the normal builder
@@ -1184,6 +1285,20 @@ end)
-- interface
-commands.setmathalternate = mathematics.setalternate
-commands.setmathitalics = mathematics.setitalics
-commands.resetmathitalics = mathematics.resetitalics
+local implement = interfaces.implement
+
+implement {
+ name = "setmathalternate",
+ actions = mathematics.setalternate,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "setmathitalics",
+ actions = mathematics.setitalics
+}
+
+implement {
+ name = "resetmathitalics",
+ actions = mathematics.resetitalics
+}
diff --git a/tex/context/base/math-rad.mkvi b/tex/context/base/math-rad.mkvi
index c6053071e..23e056c1f 100644
--- a/tex/context/base/math-rad.mkvi
+++ b/tex/context/base/math-rad.mkvi
@@ -28,13 +28,13 @@
\def\root#1\of{\rootradical{#1}} % #2
-\unexpanded\def\sqrt{\doifnextoptionalelse\rootwithdegree\rootwithoutdegree}
+\unexpanded\def\sqrt{\doifelsenextoptionalcs\rootwithdegree\rootwithoutdegree}
-\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
+\unexpanded\def\styledrootradical#1#2% so that \text works ok ... \rootradical behaves somewhat weird
{\normalexpanded{\rootradical{\normalunexpanded{#1}}{\noexpand\triggermathstyle{\normalmathstyle}\normalunexpanded{#2}}}}
-\def\rootwithdegree[#1]{\rootradical{#1}}
-\def\rootwithoutdegree {\rootradical {}}
+\unexpanded\def\rootwithdegree[#1]{\rootradical{#1}}
+\unexpanded\def\rootwithoutdegree {\rootradical {}}
%D Even older stuff:
@@ -62,7 +62,7 @@
\unexpanded\def\math_radical_handle#tag%
{\begingroup
\edef\currentmathradical{#tag}%
- \doifnextoptionalelse\math_radical_degree_yes\math_radical_degree_nop}
+ \doifelsenextoptionalcs\math_radical_degree_yes\math_radical_degree_nop}
\def\math_radical_alternative{\csname\??mathradicalalternative\mathradicalparameter\c!alternative\endcsname}
@@ -74,8 +74,8 @@
\def\math_radical_indeed#body%
{\math_radical_alternative{#body}\endgroup}
-\setvalue{\??mathradicalalternative\v!default}% #1%
- {\rootradical{\currentmathradicaldegree}}
+\setvalue{\??mathradicalalternative\v!default}% #body%
+ {\rootradical{\currentmathradicaldegree}} % {#body}}
\setvalue{\??mathradicalalternative\v!normal}#body%
{\edef\p_color{\mathradicalparameter\c!color}%
@@ -173,11 +173,11 @@
\setbox\nextbox\mathstylehbox{#body}%
% we use the \overlay variables as these are passes anyway and
% it's more efficient than using parameters
- \edef\overlaywidth {\the\wd\nextbox}%
- \edef\overlayheight {\the\ht\nextbox}%
- \edef\overlaydepth {\the\dp\nextbox}%
- \edef\overlayoffset {\the\scratchoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \wd\nextbox
+ \d_overlay_height \ht\nextbox
+ \d_overlay_depth \dp\nextbox
+ \d_overlay_offset \scratchoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathradicalparameter\c!color}%
%
\edef\p_mp{\mathradicalparameter\c!mp}%
@@ -233,11 +233,11 @@
{\begingroup
\scratchoffset\mathornamentparameter\c!mpoffset
\setbox\nextbox\mathstylehbox{#body}%
- \edef\overlaywidth {\the\wd\nextbox}%
- \edef\overlayheight {\the\ht\nextbox}%
- \edef\overlaydepth {\the\dp\nextbox}%
- \edef\overlayoffset {\the\scratchoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \wd\nextbox
+ \d_overlay_height \ht\nextbox
+ \d_overlay_depth \dp\nextbox
+ \d_overlay_offset \scratchoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathornamentparameter\c!color}%
\edef\p_mp{\mathornamentparameter\c!mp}%
% thw width of the graphic determines the width of the final result
diff --git a/tex/context/base/math-ren.lua b/tex/context/base/math-ren.lua
index 5c4c13369..4628ffe55 100644
--- a/tex/context/base/math-ren.lua
+++ b/tex/context/base/math-ren.lua
@@ -60,6 +60,8 @@ end
mathematics.renderset = renderset
-function commands.mathrenderset(list)
- context(renderset(list))
-end
+interfaces.implement {
+ name = "mathrenderset",
+ actions = { renderset, context },
+ arguments = "string",
+}
diff --git a/tex/context/base/math-stc.mkvi b/tex/context/base/math-stc.mkvi
index 76a07db5c..a879d157f 100644
--- a/tex/context/base/math-stc.mkvi
+++ b/tex/context/base/math-stc.mkvi
@@ -16,6 +16,12 @@
\unprotect
+%D WARNING: If the code here changes, the export needs to be checked! Stackers are rather
+%D special because the order in mathml matters, so we flush in [base under over] order. We
+%D also do some analysis at the \TEX\ end (passing the right variant). It's easy in the
+%D export to deal with it but in the pdf stream less trivial as we don't actually analyze
+%D there.
+
%D At some point the \MKII\ arrow mechanism has been converted to \MKIV, but we kept
%D most of the logic. We now have a more generic variant dealing with extensibles.
%D There are a few demands than we need to meet:
@@ -78,7 +84,7 @@
{\mathstylehbox{\usemathstackerscolorparameter\c!color
\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
-% these delimiters are a unuseable as theu don't center for small arguments:
+% these delimiters are a unuseable as they don't center for small arguments:
%
% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
@@ -121,6 +127,18 @@
\def\math_stackers_skip_indeed#amount%
{\filledhboxk{\unsetteststrut\strut\hskip#amount}} % \dontshowstruts
+\let\math_stackers_start_tagged_mid\relax
+\let\math_stackers_start_tagged_top\relax
+\let\math_stackers_start_tagged_bot\relax
+\let\math_stackers_stop_tagged \relax
+
+\appendtoks
+ \def\math_stackers_start_tagged_mid{\dostarttagged\t!mathstackermid\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_top{\dostarttagged\t!mathstackertop\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_bot{\dostarttagged\t!mathstackerbot\empty\hbox\bgroup}%
+ \def\math_stackers_stop_tagged {\egroup\dostoptagged}%
+\to \everysetuptagging
+
%D We define a full featured command handler.
\installcorenamespace {mathstackers}
@@ -139,9 +157,10 @@
\c!mpoffset=.25\exheight,
\c!voffset=.25\exheight,
\c!hoffset=.5\emwidth,
+ \c!distance=\mathstackersparameter\c!voffset, % distance between symbol and base (can be different from voffset)
\c!minheight=\exheight,
\c!mindepth=\zeropoint,
- \c!minwidth=\emwidth,
+ \c!minwidth=.5\emwidth,
\c!order=\v!normal,
\c!strut=,
\c!color=, % todo: when I need it
@@ -203,11 +222,11 @@
\setvalue{\??mathstackersalternative\v!mp}%
{\hbox\bgroup % todo: add code key + tag
- \edef\overlaywidth {\the\scratchwidth}%
- \edef\overlayheight {\the\dimexpr\mathstackersparameter\c!mpheight}%
- \edef\overlaydepth {\the\dimexpr\mathstackersparameter\c!mpdepth}%
- \edef\overlayoffset {\the\dimexpr\mathstackersparameter\c!mpoffset}%
- \edef\overlaylinewidth{\the\linewidth}%
+ \d_overlay_width \scratchwidth
+ \d_overlay_height \dimexpr\mathstackersparameter\c!mpheight
+ \d_overlay_depth \dimexpr\mathstackersparameter\c!mpdepth
+ \d_overlay_offset \dimexpr\mathstackersparameter\c!mpoffset
+ \d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathstackersparameter\c!color}%
\edef\p_mp{\mathstackersparameter\c!mp}%
\uniqueMPgraphic{\p_mp}%
@@ -261,9 +280,11 @@
\fi}
\unexpanded\def\math_stackers_triplet#method#category#codepoint#toptext#bottomtext%
+ %{\math_stackers_start_group{#category}%
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -311,7 +332,11 @@
\fi
\scratchwidth\wd
\ifdim\wd\scratchboxone>\wd\scratchboxtwo
- \scratchboxone
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
\else\ifdim\wd\scratchboxtwo>\wd\scratchboxthree
\scratchboxtwo
\else
@@ -327,7 +352,9 @@
\advance\scratchwidth2\scratchhoffset
%
\ifcase#method\relax
+ \dostarttagged\t!mathstackermid\empty
\setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname
+ \dostoptagged
\fi
%
\ifdim\wd\scratchboxone<\scratchwidth
@@ -370,30 +397,54 @@
\fi
%
\math_stackers_normalize_three
- %
- \math_stackers_middle\bgroup
- \box\scratchboxthree
- \egroup
- %
- \ifdim\htdp\scratchboxone>\zeropoint
- \scratchoffset\dimexpr\scratchvoffset
- \kern-\scratchwidth
- \math_stackers_top\bgroup
- \raise\dimexpr\dp\scratchboxone+\scratchheight+\scratchoffset+\scratchtopoffset\relax
- \box\scratchboxone
+ % analysis
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \dosettagproperty\s!subtype\t!munderover
+ \else
+ \dosettagproperty\s!subtype\t!munder
+ \fi
+ \else
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \dosettagproperty\s!subtype\t!mover
+ \else
+ % brrr
+ \fi
+ \fi
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
+ % under
+ \ifdim\htdp\scratchboxtwo>\zeropoint
+ \math_stackers_start_tagged_bot
+ \scratchoffset\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
+ \box\scratchboxtwo
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ % over
+ \ifdim\htdp\scratchboxone>\zeropoint
+ \math_stackers_start_tagged_top
+ \scratchoffset\scratchvoffset
+ \kern-\scratchwidth
+ \math_stackers_top\bgroup
+ \raise\dimexpr\dp\scratchboxone+\scratchheight+\scratchoffset+\scratchtopoffset\relax
+ \box\scratchboxone
+ \egroup
+ \math_stackers_stop_tagged
\fi
%
- \ifdim\htdp\scratchboxtwo>\zeropoint
- \scratchoffset\dimexpr\scratchvoffset
- \kern-\scratchwidth
- \math_stackers_bottom\bgroup
- \lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
- \box\scratchboxtwo
- \egroup
- \fi}%
+ }%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\endgroup}
+ %\math_stackers_start_group}
\unexpanded\def\definemathextensible
{\dotripleempty\math_stackers_define_normal}
@@ -449,13 +500,25 @@
\def\math_class_by_parameter_indeed#1%
{\csname\??mathclasses\ifcsname\??mathclasses#1\endcsname#1\fi\endcsname}
-\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text%
+% 1 0 name n 0 | 0 1 name n 0 | 1 1 name n n
+
+\unexpanded\def\math_stackers_start_group#category%
{\begingroup
\edef\currentmathstackers{#category}%
+ \edef\p_limits{\mathstackersparameter\c!mathlimits}%
+ \ifx\p_limits\v!yes
+ \def\math_stackers_stop_group{\egroup\endgroup\limits}%
+ \mathop\bgroup
+ \else
+ \let\math_stackers_stop_group\endgroup
+ \fi}
+
+\unexpanded\def\math_stackers_make_double#top#bottom#category#codepoint#codeextra#text%
+ {\math_stackers_start_group{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
- {\edef\currentmathstackers{#category}%
- \edef\m_math_stackers_text_middle {#text}%
+ {\edef\m_math_stackers_text_middle {#text}%
%
\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -467,7 +530,7 @@
%
\math_stackers_check_unicode{#codepoint}%
%
- \ifx\currentmathtext\empty
+ \ifx\math_stackers_middle\empty
\setbox\scratchboxthree\emptyhbox
\else
\setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
@@ -480,44 +543,74 @@
\fi
\advance\scratchwidth2\scratchhoffset
%
- \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
\setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
%
- \math_stackers_normalize_three
+ \scratchunicode#codeextra\relax
+ \ifcase\scratchunicode\else
+ \setbox\scratchboxone\csname\??mathstackersalternative\p_alternative\endcsname
+ \fi
%
- \math_stackers_middle\bgroup
- \box\scratchboxthree
- \egroup
+ \math_stackers_normalize_three
+ % analysis
+ \ifcase#bottom\relax
+ \ifcase#top\relax
+ \dosettagproperty\s!subtype\t!munderover
+ \else
+ \dosettagproperty\s!subtype\t!mover
+ \fi
+ \else
+ \ifcase#top\relax
+ \dosettagproperty\s!subtype\t!munder
+ \else
+ % brrr
+ \fi
+ \fi
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
%
\ifdim\htdp\scratchboxtwo>\zeropoint
- \kern-\scratchwidth
- \ifcase#top\else
- \math_stackers_top\bgroup
- % \raise\dimexpr\scratchheight+\scratchtopoffset\relax
- \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax
- \box\scratchboxtwo
- \egroup
- \fi
- \scratchunicode#codeextra\relax
- \ifcase\scratchunicode\else
+ \ifcase#bottom\else
\kern-\scratchwidth
- \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
+ \ifcase#top\relax
+ \box\scratchboxtwo
+ \else
+ \box\scratchboxone
+ \fi
+ \egroup
+ \math_stackers_stop_tagged
\fi
- \ifcase#bottom\else
- \math_stackers_bottom\bgroup
- % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
- \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax
- \box\scratchboxtwo
- \egroup
+ \ifcase#top\else
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
+ \box\scratchboxtwo
+ \egroup
+ \math_stackers_stop_tagged
\fi
+ %
\fi}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
- \edef\p_limits{\mathstackersparameter\c!mathlimits}%
- \ifx\p_limits\v!yes
- \expandafter\endgroup\expandafter\limits
- \else
- \expandafter\endgroup
- \fi}
+ \math_stackers_stop_group}
\unexpanded\def\definemathoverextensible {\dotripleempty \math_extensibles_define_over }
\unexpanded\def\definemathunderextensible {\dotripleempty \math_extensibles_define_under}
@@ -551,14 +644,173 @@
\def\math_stackers_handle_over[#category]%
{\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
-\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_under[#category]%
{\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
-\def\math_stackers_handle_double[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_double[#category]%
{\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
- {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}%
+ {\math_stackers_make_double#top#bottom{#category}{#codepoint}{0}{#text}%
+ \endgroup}
+
+%D A relative new one is a combination of accents and text (as needed in mathml):
+
+\unexpanded\def\math_stackers_make_double_text#where#category#codepoint#text#extra%
+ {\math_stackers_start_group{#category}%
+ \mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\edef\currentmathstackers{#category}%
+ %
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
+ %
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ %
+ \edef\m_math_stackers_text_middle{#text}%
+ \math_stackers_check_unicode{#codepoint}%
+ \scratchunicode#codepoint\relax
+ %
+ \ifx\math_stackers_middle\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ %
+ \ifcase#where\relax
+ \edef\m_math_stackers_text_top{#extra}%
+ \ifx\math_stackers_top\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_toptext}%
+ \fi
+ \else
+ \edef\m_math_stackers_text_bottom{#extra}%
+ \ifx\math_stackers_bottom\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_bottomtext}%
+ \fi
+ \fi
+ %
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
+ \relax
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \ifdim\wd\scratchboxone<\scratchwidth
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}%
+ \fi
+ \ifdim\wd\scratchboxthree<\scratchwidth
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}%
+ \fi
+ %
+ \math_stackers_normalize_three
+ % analysis
+ \dosettagproperty\s!subtype\t!munderover
+ % base
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
+ %
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ \kern-\scratchwidth
+ \ifcase#where\relax
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % toptext
+ \egroup
+ \math_stackers_stop_tagged
+ \else
+ % under
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % bottext
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ % over
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ }%
+ \dostoptagged
+ \mathstackersparameter\c!right\relax
+ \math_stackers_stop_group}
+
+\unexpanded\def\definemathovertextextensible {\dotripleempty\math_extensibles_define_over_text }
+\unexpanded\def\definemathundertextextensible{\dotripleempty\math_extensibles_define_under_text}
+
+\def\math_extensibles_define_over_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\plusone {#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\plusone \noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\def\math_extensibles_define_under_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\zerocount{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\mathovertext {\begingroup\dosingleempty\math_stackers_handle_over_text }
+\unexpanded\def\mathundertext{\begingroup\dosingleempty\math_stackers_handle_under_text }
+
+\def\math_stackers_handle_over_text[#category]%
+ {\math_stackers_direct_double_text\plusone {\iffirstargument#category\else\v!top \fi}} % will be defined later on
+
+\def\math_stackers_handle_under_text[#category]%
+ {\math_stackers_direct_double_text\zerocount{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_direct_double_text#where#category#codepoint#text#extra%%
+ {\math_stackers_make_double_text#where{#category}{#codepoint}{#text}{#extra}%
\endgroup}
%D Here is a bonus macro that takes three texts. It can be used to get consistent
@@ -654,11 +906,23 @@
[\v!both]
\definemathstackers
- [vfenced]
+ [\v!vfenced]
[\v!both]
[\c!mathclass=\s!ord,
\c!mathlimits=\v!yes]
+% these are needed for mathml:
+
+% \setupmathstackers
+% [\v!both]
+% [\c!hoffset=1pt,
+% \c!voffset=1pt]
+
+\definemathstackers
+ [\v!bothtext]
+ [\v!both]
+ [\c!strut=\v!yes]
+
% These are compatibity definitions, math only.
% todo: top= bottom= middle= is nicer (compare math-fen)
@@ -761,6 +1025,24 @@
\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
+\definemathextensible [\v!mathematics] [eleftarrowfill] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [erightarrowfill] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [eleftrightarrowfill] ["27F7]
+\definemathextensible [\v!mathematics] [etwoheadrightarrowfill] ["27F9]
+\definemathextensible [\v!mathematics] [eleftharpoondownfill] ["21BD]
+\definemathextensible [\v!mathematics] [eleftharpoonupfill] ["21BC]
+\definemathextensible [\v!mathematics] [erightharpoondownfill] ["21C1]
+\definemathextensible [\v!mathematics] [erightharpoonupfill] ["21C0]
+
+\definemathextensible [\v!mathematics] [eoverbarfill] ["FE33E]
+\definemathextensible [\v!mathematics] [eunderbarfill] ["FE33F]
+\definemathextensible [\v!mathematics] [eoverbracefill] ["FE3DE]
+\definemathextensible [\v!mathematics] [eunderbracefill] ["FE3DF]
+\definemathextensible [\v!mathematics] [eoverparentfill] ["FE3DC]
+\definemathextensible [\v!mathematics] [eunderparentfill] ["FE3DD]
+\definemathextensible [\v!mathematics] [eoverbracketfill] ["FE3B4]
+\definemathextensible [\v!mathematics] [eunderbracketfill] ["FE3B5]
+
\definemathextensible [\v!text] [trel] ["002D]
\definemathextensible [\v!text] [tequal] ["003D]
\definemathextensible [\v!text] [tmapsto] ["21A6]
@@ -819,23 +1101,39 @@
% alternatively we can move the original to FE*
\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E]
-\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
+\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F]
\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE]
-\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
+\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF]
\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC]
-\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
+\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD]
\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4]
-\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
+\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5]
% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
+%D For mathml:
+
+\definemathdoubleextensible [both] [overbarunderbar] ["FE33E] ["FE33F]
+\definemathdoubleextensible [both] [overbraceunderbrace] ["FE3DE] ["FE3DF]
+\definemathdoubleextensible [both] [overparentunderparent] ["FE3DC] ["FE3DD]
+\definemathdoubleextensible [both] [overbracketunderbracket] ["FE3B4] ["FE3B5]
+
+\definemathovertextextensible [bothtext] [overbartext] ["FE33E]
+\definemathundertextextensible [bothtext] [underbartext] ["FE33F]
+\definemathovertextextensible [bothtext] [overbracetext] ["FE3DE]
+\definemathundertextextensible [bothtext] [underbracetext] ["FE3DF]
+\definemathovertextextensible [bothtext] [overparenttext] ["FE3DC]
+\definemathundertextextensible [bothtext] [underparenttext] ["FE3DD]
+\definemathovertextextensible [bothtext] [overbrackettext] ["FE3B4]
+\definemathundertextextensible [bothtext] [underbrackettext] ["FE3B5]
+
%D Some bonus ones (for the moment here):
\definemathstackers
@@ -927,6 +1225,15 @@
\defineextensiblefiller [Leftrightarrowfill] ["27FA]
\defineextensiblefiller [Leftrightarrowfill] ["27FA]
+%defineextensiblefiller [overbarfill] ["FE33E] % untested
+%defineextensiblefiller [underbarfill] ["FE33F] % untested
+\defineextensiblefiller [overbracefill] ["FE3DE] % untested
+\defineextensiblefiller [underbracefill] ["FE3DF] % untested
+\defineextensiblefiller [overparentfill] ["FE3DC] % untested
+\defineextensiblefiller [underparentfill] ["FE3DD] % untested
+\defineextensiblefiller [overbracketfill] ["FE3B4] % untested
+\defineextensiblefiller [underbracketfill] ["FE3B5] % untested
+
%D Extra:
\unexpanded\edef\singlebond{\mathematics{\mathsurround\zeropoint\char\number"002D}}
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index ab5902dd4..0d900b3a1 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -6,15 +6,30 @@ if not modules then modules = { } end modules ['math-tag'] = {
license = "see context related readme files"
}
+-- todo: have a local list with local tags that then get appended
+
-- use lpeg matchers
local find, match = string.find, string.match
-local insert, remove = table.insert, table.remove
+local insert, remove, concat = table.insert, table.remove, table.concat
+
+local attributes = attributes
+local nodes = nodes
-local attributes, nodes = attributes, nodes
+local nuts = nodes.nuts
+local tonut = nuts.tonut
-local set_attributes = nodes.setattributes
-local traverse_nodes = node.traverse
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getsubtype = nuts.getsubtype
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local set_attributes = nuts.setattributes
+local traverse_nodes = nuts.traverse
local nodecodes = nodes.nodecodes
@@ -31,15 +46,32 @@ local math_style_code = nodecodes.style -- attr style
local math_choice_code = nodecodes.choice -- attr display text script scriptscript
local math_fence_code = nodecodes.fence -- attr subtype
+local accentcodes = nodes.accentcodes
+
+local math_fixed_top = accentcodes.fixedtop
+local math_fixed_bottom = accentcodes.fixedbottom
+local math_fixed_both = accentcodes.fixedboth
+
+local kerncodes = nodes.kerncodes
+
+local fontkern_code = kerncodes.fontkern
+
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local math_code = nodecodes.math
+
+local processnoads = noads.process
local a_tagged = attributes.private('tagged')
+local a_taggedpar = attributes.private('taggedpar')
local a_exportstatus = attributes.private('exportstatus')
local a_mathcategory = attributes.private('mathcategory')
local a_mathmode = attributes.private('mathmode')
+local a_fontkern = attributes.private('fontkern')
local tags = structures.tags
@@ -55,31 +87,53 @@ local mathcodes = mathematics.codes
local ordinary_code = mathcodes.ordinary
local variable_code = mathcodes.variable
+local fromunicode16 = fonts.mappings.fromunicode16
+local font_of_family = node.family_font
+local fontcharacters = fonts.hashes.characters
+
+local report_tags = logs.reporter("structure","tags")
+
local process
local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
- -- order.
- local nucleus, sup, sub = start.nucleus, start.sup, start.sub
+ -- order. The mrows are needed to keep mn's separated.
+ local nucleus = getfield(start,"nucleus")
+ local sup = getfield(start,"sup")
+ local sub = getfield(start,"sub")
if sub then
if sup then
- start[a_tagged] = start_tagged("msubsup")
+ setattr(start,a_tagged,start_tagged("msubsup"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sub)
+ stop_tagged()
+ start_tagged("mrow")
process(sup)
stop_tagged()
+ stop_tagged()
else
- start[a_tagged] = start_tagged("msub")
+ setattr(start,a_tagged,start_tagged("msub"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sub)
stop_tagged()
+ stop_tagged()
end
elseif sup then
- start[a_tagged] = start_tagged("msup")
+ setattr(start,a_tagged,start_tagged("msup"))
+ -- start_tagged("mrow")
process(nucleus)
+ -- stop_tagged()
+ start_tagged("mrow")
process(sup)
stop_tagged()
+ stop_tagged()
else
process(nucleus)
end
@@ -90,254 +144,407 @@ end
-- todo: variants -> original
local actionstack = { }
+local fencesstack = { }
+
+-- glyph nodes and such can happen in under and over stuff
+
+local function getunicode(n) -- instead of getchar
+ local char = getchar(n)
+ local font = font_of_family(getfield(n,"fam")) -- font_of_family
+ local data = fontcharacters[font][char]
+ return data.unicode or char
+end
+
+-------------------
+
+local content = { }
+local found = false
+
+content[math_char_code] = function() found = true end
+
+local function hascontent(head)
+ found = false
+ processnoads(head,content,"content")
+ return found
+end
+
+--------------------
+
+local function showtag(n,id)
+ local attr = getattr(n,a_tagged)
+ report_tags("%s = %s",nodecodes[id or getid(n)],attr and taglist[attr].tagname or "?")
+end
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
+ local mtexttag = nil
while start do
- local id = start.id
- if id == math_char_code then
- local char = start.char
- -- check for code
- local a = start[a_mathcategory]
- if a then
- a = { detail = a }
+ local id = getid(start)
+
+-- showtag(start,id)
+
+ if id == glyph_code or id == disc_code then
+ if not mtexttag then
+ mtexttag = start_tagged("mtext")
end
- local code = getmathcode(char)
- if code then
- code = code[1]
+ setattr(start,a_tagged,mtexttag)
+ elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getattr(start,a_fontkern)) then
+ setattr(start,a_tagged,mtexttag)
+ else
+ if mtexttag then
+ stop_tagged()
+ mtexttag = nil
end
- local tag
- if code == ordinary_code or code == variable_code then
- local ch = chardata[char]
- local mc = ch and ch.mathclass
- if mc == "number" then
- tag = "mn"
- elseif mc == "variable" or not mc then -- variable is default
- tag = "mi"
+ if id == math_char_code then
+ local char = getchar(start)
+ local code = getmathcode(char)
+ if code then
+ code = code[1]
+ end
+ local tag
+ if code == ordinary_code or code == variable_code then
+ local ch = chardata[char]
+ local mc = ch and ch.mathclass
+ if mc == "number" then
+ tag = "mn"
+ elseif mc == "variable" or not mc then -- variable is default
+ tag = "mi"
+ else
+ tag = "mo"
+ end
else
tag = "mo"
end
- else
- tag = "mo"
- end
- start[a_tagged] = start_tagged(tag,a)
- stop_tagged()
- break -- okay?
- elseif id == math_textchar_code then
- -- check for code
- local a = start[a_mathcategory]
- if a then
- start[a_tagged] = start_tagged("ms",{ detail = a })
- else
- start[a_tagged] = start_tagged("ms")
- end
- stop_tagged()
- break
- elseif id == math_delim_code then
- -- check for code
- start[a_tagged] = start_tagged("mo")
- stop_tagged()
- break
- elseif id == math_style_code then
- -- has a next
- elseif id == math_noad_code then
- processsubsup(start)
- elseif id == math_box_code or id == hlist_code or id == vlist_code then
- -- keep an eye on math_box_code and see what ends up in there
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- if last and find(last[#last],"formulacaption[:%-]") then
- -- leave alone, will nicely move to the outer level
- else
- local text = start_tagged("mtext")
- start[a_tagged] = text
- local list = start.list
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text)
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged(tag,{ mathcategory = a }))
else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- local tagdata = taglist[attr]
- local common = #tagdata + 1
- local function runner(list) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- for n in traverse_nodes(list) do
- local id = n.id
- local aa = n[a_tagged]
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa]
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
+ setattr(start,a_tagged,start_tagged(tag)) -- todo: a_mathcategory
+ end
+ stop_tagged()
+ break -- okay?
+ elseif id == math_textchar_code then -- or id == glyph_code
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged("ms",{ mathcategory = a })) -- mtext
+ else
+ setattr(start,a_tagged,start_tagged("ms")) -- mtext
+ end
+ stop_tagged()
+ break
+ elseif id == math_delim_code then
+ -- check for code
+ setattr(start,a_tagged,start_tagged("mo"))
+ stop_tagged()
+ break
+ elseif id == math_style_code then
+ -- has a next
+ elseif id == math_noad_code then
+ processsubsup(start)
+ elseif id == math_box_code or id == hlist_code or id == vlist_code then
+ -- keep an eye on math_box_code and see what ends up in there
+ local attr = getattr(start,a_tagged)
+ local specification = taglist[attr]
+ local tag = specification.tagname
+ if tag == "formulacaption" then
+ -- skip
+ elseif tag == "mstacker" then
+ local list = getfield(start,"list")
+ if list then
+ process(list)
+ end
+ else
+ if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
+ tag = "mtext"
+ end
+ local text = start_tagged(tag)
+ setattr(start,a_tagged,text)
+ local list = getfield(start,"list")
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text) -- only the first node ?
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ --
+ -- todo: have a local list with local tags that then get appended
+ --
+ local tagdata = specification.taglist
+ local common = #tagdata + 1
+ local function runner(list,depth) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ local keep = nil
+ -- local keep = { } -- win case we might need to move keep outside
+ for n in traverse_nodes(list) do
+ local id = getid(n)
+ local mth = id == math_code and getsubtype(n)
+ if mth == 0 then
+ -- insert(keep,text)
+ keep = text
+ text = start_tagged("mrow")
+ common = common + 1
+ end
+ local aa = getattr(n,a_tagged)
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa].taglist
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
end
- else
- ac = text
+ cache[aa] = ac
end
- cache[aa] = ac
+ setattr(n,a_tagged,ac)
+ else
+ setattr(n,a_tagged,text)
+ end
+
+ if id == hlist_code or id == vlist_code then
+ runner(getlist(n),depth+1)
+ elseif id == glyph_code then
+ runner(getfield(n,"components"),depth+1) -- this should not be needed
+ elseif id == disc_code then
+ runner(getfield(n,"pre"),depth+1) -- idem
+ runner(getfield(n,"post"),depth+1) -- idem
+ runner(getfield(n,"replace"),depth+1) -- idem
+ end
+ if mth == 1 then
+ stop_tagged()
+ -- text = remove(keep)
+ text = keep
+ common = common - 1
end
- n[a_tagged] = ac
- else
- n[a_tagged] = text
end
- if id == hlist_code or id == vlist_code then
- runner(n.list)
+ end
+ runner(list,0)
+ end
+ stop_tagged()
+ end
+ elseif id == math_sub_code then -- normally a hbox
+ local list = getfield(start,"list")
+ if list then
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ if last then
+ local tag = last.tagname
+ local detail = last.detail
+ if tag == "maction" then
+ if detail == "" then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ elseif actionstack[#actionstack] == action then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ else
+ insert(actionstack,action)
+ setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ process(list)
+ stop_tagged()
+ remove(actionstack)
end
+ elseif tag == "mstacker" then -- or tag == "mstackertop" or tag == "mstackermid" or tag == "mstackerbot" then
+ -- looks like it gets processed twice
+-- do we still end up here ?
+ setattr(start,a_tagged,restart_tagged(attr)) -- so we just reuse the attribute
+ process(list)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
end
+ else -- never happens, we're always document
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
end
- runner(list)
end
+ elseif id == math_fraction_code then
+ local num = getfield(start,"num")
+ local denom = getfield(start,"denom")
+ local left = getfield(start,"left")
+ local right = getfield(start,"right")
+ if left then
+ setattr(left,a_tagged,start_tagged("mo"))
+ process(left)
+ stop_tagged()
+ end
+ setattr(start,a_tagged,start_tagged("mfrac"))
+ process(num)
+ process(denom)
stop_tagged()
- end
- elseif id == math_sub_code then
- local list = start.list
- if list then
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- local action = last and match(last[#last],"maction:(.-)%-")
- if action and action ~= "" then
- if actionstack[#actionstack] == action then
- start[a_tagged] = start_tagged("mrow")
- process(list)
+ if right then
+ setattr(right,a_tagged,start_tagged("mo"))
+ process(right)
+ stop_tagged()
+ end
+ elseif id == math_choice_code then
+ local display = getfield(start,"display")
+ local text = getfield(start,"text")
+ local script = getfield(start,"script")
+ local scriptscript = getfield(start,"scriptscript")
+ if display then
+ process(display)
+ end
+ if text then
+ process(text)
+ end
+ if script then
+ process(script)
+ end
+ if scriptscript then
+ process(scriptscript)
+ end
+ elseif id == math_fence_code then
+ local delim = getfield(start,"delim")
+ local subtype = getfield(start,"subtype")
+ if subtype == 1 then
+ -- left
+ local properties = { }
+ insert(fencesstack,properties)
+ setattr(start,a_tagged,start_tagged("mfenced",{ properties = properties })) -- needs checking
+ if delim then
+ start_tagged("ignore")
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ properties.left = chr
+ end
+ process(delim)
stop_tagged()
- else
- insert(actionstack,action)
- start[a_tagged] = start_tagged("mrow",{ detail = action })
- process(list)
+ end
+ start_tagged("mrow") -- begin of subsequence
+ elseif subtype == 2 then
+ -- middle
+ if delim then
+ start_tagged("ignore")
+ local top = fencesstack[#fencesstack]
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ local mid = top.middle
+ if mid then
+ mid[#mid+1] = chr
+ else
+ top.middle = { chr }
+ end
+ end
+ process(delim)
stop_tagged()
- remove(actionstack)
end
- else
- start[a_tagged] = start_tagged("mrow")
- process(list)
+ stop_tagged() -- end of subsequence
+ start_tagged("mrow") -- begin of subsequence
+ elseif subtype == 3 then
+ local properties = remove(fencesstack)
+ if not properties then
+ report_tags("missing right fence")
+ properties = { }
+ end
+ if delim then
+ start_tagged("ignore")
+ local chr = getfield(delim,"small_char")
+ if chr ~= 0 then
+ properties.right = chr
+ end
+ process(delim)
+ stop_tagged()
+ end
+ stop_tagged() -- end of subsequence
stop_tagged()
+ else
+ -- can't happen
end
- end
- elseif id == math_fraction_code then
- local num, denom, left, right = start.num, start.denom, start.left, start.right
- if left then
- left[a_tagged] = start_tagged("mo")
- process(left)
- stop_tagged()
- end
- start[a_tagged] = start_tagged("mfrac")
- process(num)
- process(denom)
- stop_tagged()
- if right then
- right[a_tagged] = start_tagged("mo")
- process(right)
- stop_tagged()
- end
- elseif id == math_choice_code then
- local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
- if display then
- process(display)
- end
- if text then
- process(text)
- end
- if script then
- process(script)
- end
- if scriptscript then
- process(scriptscript)
- end
- elseif id == math_fence_code then
- local delim = start.delim
- local subtype = start.subtype
- if subtype == 1 then
- -- left
- start[a_tagged] = start_tagged("mfenced")
- if delim then
- start[a_tagged] = start_tagged("mleft")
- process(delim)
+ elseif id == math_radical_code then
+ local left = getfield(start,"left")
+ local degree = getfield(start,"degree")
+ if left then
+ start_tagged("ignore")
+ process(left) -- root symbol, ignored
stop_tagged()
end
- elseif subtype == 2 then
- -- middle
- if delim then
- start[a_tagged] = start_tagged("mmiddle")
- process(delim)
+ if degree and hascontent(degree) then
+ setattr(start,a_tagged,start_tagged("mroot"))
+ processsubsup(start)
+ process(degree)
stop_tagged()
- end
- elseif subtype == 3 then
- if delim then
- start[a_tagged] = start_tagged("mright")
- process(delim)
+ else
+ setattr(start,a_tagged,start_tagged("msqrt"))
+ processsubsup(start)
stop_tagged()
end
- stop_tagged()
- else
- -- can't happen
- end
- elseif id == math_radical_code then
- local left, degree = start.left, start.degree
- if left then
- start_tagged("")
- process(left) -- root symbol, ignored
- stop_tagged()
- end
- if degree then -- not good enough, can be empty mlist
- start[a_tagged] = start_tagged("mroot")
- processsubsup(start)
- process(degree)
- stop_tagged()
- else
- start[a_tagged] = start_tagged("msqrt")
- processsubsup(start)
- stop_tagged()
- end
- elseif id == math_accent_code then
- local accent, bot_accent = start.accent, start.bot_accent
- if bot_accent then
- if accent then
- start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
+ elseif id == math_accent_code then
+ local accent = getfield(start,"accent")
+ local bot_accent = getfield(start,"bot_accent")
+ local subtype = getsubtype(start)
+ if bot_accent then
+ if accent then
+ setattr(start,a_tagged,start_tagged("munderover", {
+ accent = true,
+ top = getunicode(accent),
+ bottom = getunicode(bot_accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ process(accent)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("munder", {
+ accent = true,
+ bottom = getunicode(bot_accent),
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ stop_tagged()
+ end
+ elseif accent then
+ setattr(start,a_tagged,start_tagged("mover", {
+ accent = true,
+ top = getunicode(accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ }))
processsubsup(start)
- process(bot_accent)
process(accent)
stop_tagged()
else
- start[a_tagged] = start_tagged("munder",{ detail = "accent" })
processsubsup(start)
- process(bot_accent)
- stop_tagged()
end
- elseif accent then
- start[a_tagged] = start_tagged("mover",{ detail = "accent" })
- processsubsup(start)
- process(accent)
+ elseif id == glue_code then
+ -- local spec = getfield(start,"spec")
+ -- setattr(start,a_tagged,start_tagged("mspace",{ width = getfield(spec,"width") }))
+ setattr(start,a_tagged,start_tagged("mspace"))
stop_tagged()
else
- processsubsup(start)
+ setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ stop_tagged()
end
- elseif id == glue_code then
- start[a_tagged] = start_tagged("mspace")
- stop_tagged()
- else
- start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
- stop_tagged()
end
- start = start.next
+ start = getnext(start)
+ end
+ if mtexttag then
+ stop_tagged()
end
end
function noads.handlers.tags(head,style,penalties)
- local v_math = start_tagged("math")
- local v_mrow = start_tagged("mrow")
- local v_mode = head[a_mathmode]
- head[a_tagged] = v_math
- head[a_tagged] = v_mrow
- tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
+ head = tonut(head)
+ local v_mode = getattr(head,a_mathmode)
+ local v_math = start_tagged("math", { mode = v_mode == 1 and "display" or "inline" })
+ setattr(head,a_tagged,start_tagged("mrow"))
process(head)
stop_tagged()
stop_tagged()
diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua
index 6d9a9f903..a683e02cf 100644
--- a/tex/context/base/math-vfu.lua
+++ b/tex/context/base/math-vfu.lua
@@ -41,6 +41,8 @@ local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
+local chardata = characters.data
+
local mathencodings = allocate()
fonts.encodings.math = mathencodings -- better is then: fonts.encodings.vectors
local vfmath = allocate()
@@ -432,6 +434,14 @@ local function repeated(main,characters,id,size,unicode,u,n,private,fraction) --
end
end
+local function cloned(main,characters,id,size,source,target)
+ local data = characters[source]
+ if data then
+ characters[target] = data
+ return true
+ end
+end
+
-- we use the fact that context defines the smallest sizes first .. a real dirty and ugly hack
local data_of_smaller = nil
@@ -451,10 +461,11 @@ function vfmath.addmissing(main,id,size)
-- here id is the index in fonts (normally 14 or so) and that slot points to self
- local characters = main.characters
- local shared = main.shared
- local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { }
+ local characters = main.characters
+ local shared = main.shared
+ local variables = main.goodies.mathematics and main.goodies.mathematics.variables or { }
local joinrelfactor = variables.joinrelfactor or 3
+
for i=0x7A,0x7D do
make(main,characters,id,size,i,1)
end
@@ -536,23 +547,24 @@ function vfmath.addmissing(main,id,size)
-- 21CB leftrightharpoon
-- 21CC rightleftharpoon
- stack (main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
- jointwo (main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
- jointwo (main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
- jointwo (main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
- jointwo (main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
- jointwo (main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
- jointwo (main,characters,id,size,0x2209,0x00338,0,0x02208) -- \not\in
- jointwo (main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
+ stack(main,characters,id,size,0x2259,0x0003D,3,0x02227) -- \buildrel\wedge\over=
+
+ jointwo(main,characters,id,size,0x22C8,0x022B3,joinrelfactor,0x022B2) -- \mathrel\triangleright\joinrel\mathrel\triangleleft (4 looks better than 3)
+ jointwo(main,characters,id,size,0x22A7,0x0007C,joinrelfactor,0x0003D) -- \mathrel|\joinrel=
+ jointwo(main,characters,id,size,0x2260,0x00338,0,0x0003D) -- \not\equal
+ jointwo(main,characters,id,size,0x2284,0x00338,0,0x02282) -- \not\subset
+ jointwo(main,characters,id,size,0x2285,0x00338,0,0x02283) -- \not\supset
+ jointwo(main,characters,id,size,0x2209,0x00338,0,0x02208) -- \not\in
+ jointwo(main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔)
repeated(main,characters,id,size,0x222C,0x222B,2,0xFF800,1/3)
repeated(main,characters,id,size,0x222D,0x222B,3,0xFF810,1/3)
- characters[0xFE325] = fastcopy(characters[0x2032])
-
- raise (main,characters,id,size,0x02032,0xFE325,1,id_of_smaller) -- prime
- raise (main,characters,id,size,0x02033,0xFE325,2,id_of_smaller) -- double prime
- raise (main,characters,id,size,0x02034,0xFE325,3,id_of_smaller) -- triple prime
+ if cloned(main,characters,id,size,0x2032,0xFE325) then
+ raise(main,characters,id,size,0x2032,0xFE325,1,id_of_smaller) -- prime
+ raise(main,characters,id,size,0x2033,0xFE325,2,id_of_smaller) -- double prime
+ raise(main,characters,id,size,0x2034,0xFE325,3,id_of_smaller) -- triple prime
+ end
-- there are more (needs discussion first):
@@ -882,7 +894,7 @@ function vfmath.define(specification,set,goodies)
local ru = rv[unicode]
if not ru then
if trace_virtual then
- report_virtual("unicode slot %U has no index %H in vector %a for font %a",unicode,index,vectorname,fontname)
+ report_virtual("unicode slot %U has no index %H in vector %a for font %a (%S)",unicode,index,vectorname,fontname,chardata[unicode].description)
elseif not already_reported then
report_virtual("the mapping is incomplete for %a at %p",name,size)
already_reported = true
diff --git a/tex/context/base/meta-fig.mkiv b/tex/context/base/meta-fig.mkiv
index 46dc4cffc..bf37aa7bf 100644
--- a/tex/context/base/meta-fig.mkiv
+++ b/tex/context/base/meta-fig.mkiv
@@ -47,6 +47,9 @@
\c!command=\meta_process_graphic_instance{\fittingpageparameter\c!instance},
\c!instance=]
+\unexpanded\def\setupMPpage
+ {\setupfittingpage[MPpage]}
+
%D \macros
%D {MPfigure}
%D
@@ -56,7 +59,7 @@
{\bgroup
\getfiguredimensions[#1]% [\c!object=\v!no] already set
\startMPcode
- externalfigure "#1"
+ draw externalfigure "#1"
xscaled \the\dimexpr\figurewidth \relax\space % must be points
yscaled \the\dimexpr\figureheight\relax\space % must be points
#2 ;
diff --git a/tex/context/base/meta-fnt.lua b/tex/context/base/meta-fnt.lua
index cf47f0c92..95bdfa6d9 100644
--- a/tex/context/base/meta-fnt.lua
+++ b/tex/context/base/meta-fnt.lua
@@ -21,32 +21,18 @@ mpfonts.version = mpfonts.version or 1.20
mpfonts.inline = true
mpfonts.cache = containers.define("fonts", "mp", mpfonts.version, true)
-metapost.fonts = metapost.fonts or { }
+metapost.fonts = metapost.fonts or { }
+
+local function unicodetoactualtext(...)
+ unicodetoactualtext = backends.codeinjections.unicodetoactualtext
+ return unicodetoactualtext(...)
+end
-- a few glocals
local characters, descriptions = { }, { }
local factor, code, slot, width, height, depth, total, variants, bbox, llx, lly, urx, ury = 100, { }, 0, 0, 0, 0, 0, 0, true, 0, 0, 0, 0
--- The next variant of ActualText is what Taco and I could come up with
--- eventually. As of September 2013 Acrobat copies okay, Summatra copies a
--- question mark, pdftotext injects an extra space and Okular adds a
--- newline plus space.
-
--- return formatters["BT /Span << /ActualText (CONTEXT) >> BDC [] TJ % t EMC ET"](code)
-
-local function topdf(n,code)
- if n < 0x10000 then
- return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n,code)
- else
- return formatters["BT /Span << /ActualText >> BDC [] TJ % t EMC ET"](n/1024+0xD800,n%1024+0xDC00,code)
- end
-end
-
--- local function topdf(n,code)
--- return formatters["/Span << /ActualText (CTX) >> BDC % t EMC"](code)
--- end
-
local flusher = {
startfigure = function(_chr_,_llx_,_lly_,_urx_,_ury_)
code = { }
@@ -68,6 +54,7 @@ local flusher = {
end,
stopfigure = function()
local cd = chardata[n]
+ local code = unicodetoactualtext(slot,concat(code," ")) or ""
descriptions[slot] = {
-- unicode = slot,
name = cd and cd.adobename,
@@ -79,7 +66,7 @@ local flusher = {
if inline then
characters[slot] = {
commands = {
- { "special", "pdf: " .. topdf(slot,code) },
+ { "special", "pdf:" .. code },
}
}
else
@@ -88,13 +75,14 @@ local flusher = {
{
"image",
{
- stream = topdf(slot,code),
+ stream = code,
bbox = { 0, -depth * 65536, width * 65536, height * 65536 }
},
},
}
}
end
+ code = nil -- no need to keep that
end
}
@@ -261,7 +249,16 @@ function metapost.fonts.define(specification)
} )
end
-commands.definemetafont = metapost.fonts.define
+interfaces.implement {
+ name = "definemetafont",
+ actions = metapost.fonts.define,
+ arguments = {
+ {
+ { "fontname" },
+ { "filename" },
+ }
+ }
+}
-- metapost.fonts.define {
-- fontname = "bidi",
diff --git a/tex/context/base/meta-fnt.mkiv b/tex/context/base/meta-fnt.mkiv
index 603fcf14d..e54c0be0a 100644
--- a/tex/context/base/meta-fnt.mkiv
+++ b/tex/context/base/meta-fnt.mkiv
@@ -21,11 +21,11 @@
{\dotripleempty\meta_font_define}
\def\meta_font_define[#1][#2][#3]%
- {\ctxcommand{definemetafont {
- fontname = "#1",
- filename = "#2"
+ {\clf_definemetafont
+ fontname {#1}%
+ filename {#2}%
% no #3 settings yet (compose, instances)
- }}}
+ \relax}
% \startluacode
% metapost.fonts.define { fontname = "bidi-symbols", filename = "bidi-symbols.mp" }
diff --git a/tex/context/base/meta-fun.lua b/tex/context/base/meta-fun.lua
index 78ee25baf..7aaaf2818 100644
--- a/tex/context/base/meta-fun.lua
+++ b/tex/context/base/meta-fun.lua
@@ -26,7 +26,7 @@ function metafun.topath(t,connector)
if type(ti) == "string" then
context(ti)
else
- context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
+ context("(%F,%F)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
end
end
else
@@ -47,7 +47,7 @@ function metafun.interpolate(f,b,e,s,c)
else
done = true
end
- context("(%s,%s)",i,d(i))
+ context("(%F,%F)",i,d(i))
end
end
if not done then
diff --git a/tex/context/base/meta-imp-dum.mkiv b/tex/context/base/meta-imp-dum.mkiv
index 1daff57ac..e6ccc234c 100644
--- a/tex/context/base/meta-imp-dum.mkiv
+++ b/tex/context/base/meta-imp-dum.mkiv
@@ -60,6 +60,8 @@
% \stopuseMPgraphic
\startuseMPgraphic{figure:placeholder}{width,height,reduction,color}
+ begingroup ;
+ save w, h, d, r, p, c, b ;
numeric w, h, d, r ; path p ;
if cmykcolor \MPvar{color} :
cmykcolor c, b ; b := (0,0,0,0)
@@ -80,6 +82,7 @@
withcolor r[c randomized(.3,.9),b] ;
endfor ;
clip currentpicture to p ;
+ endgroup ;
\stopuseMPgraphic
\defineoverlay
diff --git a/tex/context/base/meta-imp-tab.mkiv b/tex/context/base/meta-imp-tab.mkiv
new file mode 100644
index 000000000..a4affbea4
--- /dev/null
+++ b/tex/context/base/meta-imp-tab.mkiv
@@ -0,0 +1,73 @@
+%D \module
+%D [ file=meta-tab,
+%D version=2003.03.21, % very old but now with splitter
+%D title=\METAPOST\ Graphics,
+%D subtitle=Dummy (External) Graphics,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startuseMPgraphic{TallyBar}
+ height := (10/12) * LineHeight ;
+ span := ( 4/10) * LineHeight ;
+ drift := ( 1/10) * LineHeight ;
+ def d = (uniformdeviate drift) enddef ;
+ for i := 1 upto \MPvar{n} :
+ draw
+ if (i mod 5)=0 :
+ ((-d-4.5span,d)--(+d-0.5span,height-d))
+ else :
+ ((-d,+d)--(+d,height-d))
+ fi
+ shifted (span*i,d-drift) withpen pencircle ;
+ endfor ;
+ currentpicture := currentpicture scaled .75 ;
+\stopuseMPgraphic
+
+\setupMPvariables
+ [TallyBar]
+ [n=0]
+
+\unexpanded\def\tallynumeral#1%
+ {\dontleavehmode
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=#1}}}
+
+\unexpanded\def\tallynumerals#1%
+ {\dontleavehmode
+ \begingroup
+ \scratchcounter#1\relax
+ \doloop
+ {\ifnum\scratchcounter>\plusfive
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=5}}%
+ \advance\scratchcounter-\plusfive
+ \space
+ \else
+ \lower.25\exheight\hbox{\useMPgraphic{TallyBar}{n=\the\scratchcounter}}%
+ \exitloop
+ \fi}%
+ \endgroup}
+
+\let\FunnyBar\tallynumeral
+
+\defineconversion[tally][\tallynumerals]
+
+\continueifinputfile{meta-imp-tab.mkiv}
+
+\starttext
+
+ \starttabulate[|pr|c|]
+ \NC \tallynumerals {24} \NC \NR
+ \NC \tallynumerals {12} \NC \times \NR
+ \HL
+ \NC \tallynumerals{288} \NC = \NR
+ \stoptabulate
+
+ \tallynumerals{"FFFF}
+
+\stoptext
+
+\endinput
diff --git a/tex/context/base/meta-imp-txt.mkiv b/tex/context/base/meta-imp-txt.mkiv
index bcfc5513f..7069d21a4 100644
--- a/tex/context/base/meta-imp-txt.mkiv
+++ b/tex/context/base/meta-imp-txt.mkiv
@@ -104,7 +104,7 @@
\unexpanded\def\getshapecharacteristics
{\doglobal\increment\currentshapetext
- \doifdefinedelse{parlines:\currentshapetext}
+ \doifelsedefined{parlines:\currentshapetext}
{\global\parlines \getvalue{parlines:\currentshapetext}%
\global\chardef\parfirst \getvalue{parfirst:\currentshapetext}%
\global\parvoffset \getvalue{parvoffset:\currentshapetext}%
@@ -152,7 +152,7 @@
%%%%%%% rotfont nog definieren
-\doifundefined{RotFont}{\definefont[RotFont][RegularBold]}
+\doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}
\unexpanded\def\processfollowingtoken#1% strut toegevoegd
{\appendtoks#1\to\MPtoks
@@ -174,9 +174,7 @@
{\vbox\bgroup
\forgetall
\dontcomplain
- \startMPenvironment
- \doifundefined{RotFont}{\definefont[RotFont][RegularBold]}%
- \stopMPenvironment
+ \doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}%
\MPtoks\emptytoks
\resetMPdrawing
\startMPdrawing
@@ -225,6 +223,7 @@
withpen pencircle scaled .50pt withcolor green ;
fi ;
endfor ;
+% fill boundingbox currentpicture ;
\stopMPdrawing
\MPdrawingdonetrue
\getMPdrawing
diff --git a/tex/context/base/meta-ini.lua b/tex/context/base/meta-ini.lua
index 713ba3d5d..8f7131263 100644
--- a/tex/context/base/meta-ini.lua
+++ b/tex/context/base/meta-ini.lua
@@ -15,34 +15,6 @@ local context = context
metapost = metapost or { }
--- for the moment downward compatible
-
-local report_metapost = logs.reporter ("metapost")
-local status_metapost = logs.messenger("metapost")
-
-local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
-
-local function action(name,foundname)
- status_metapost("library %a is loaded",name)
- context.startreadingfile()
- context.input(foundname)
- context.stopreadingfile()
-end
-
-local function failure(name)
- report_metapost("library %a is unknown or invalid",name)
-end
-
-function commands.useMPlibrary(name)
- commands.uselibrary {
- name = name,
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = true,
- }
-end
-
-- experimental
local colorhash = attributes.list[attributes.private('color')]
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index 28ba9e901..299f37cef 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -106,7 +106,7 @@
{\dosinglegroupempty\meta_start_extensions}
\def\meta_start_extensions#1#2\stopMPextensions % we could use buffers instead
- {\ctxlua{metapost.setextensions("#1",\!!bs#2\!!es)}}
+ {\clf_setmpextensions{#1}{#2}}
\let\stopMPextensions\relax
@@ -155,12 +155,12 @@
\global\t_meta_inclusions\expandafter{\the\t_meta_inclusions#2}%
\let\currentMPinstance\m_meta_saved_instance}
-\def\meta_preset_definitions
- {\edef\overlaywidth {\overlaywidth \space}%
- \edef\overlayheight {\overlayheight \space}%
- \edef\overlaylinewidth{\overlaylinewidth\space}%
- \edef\currentwidth {\the\hsize \space}%
- \edef\currentheight {\the\vsize \space}}
+% \def\meta_preset_definitions
+% {\edef\overlaywidth {\overlaywidth \space}%
+% \edef\overlayheight {\overlayheight \space}%
+% \edef\overlaylinewidth{\overlaylinewidth\space}}
+
+\let\meta_preset_definitions\relax
\installcommandhandler \??mpinstance {MPinstance} \??mpinstance
@@ -217,13 +217,22 @@
\def\currentMPinstance {\defaultMPinstance}
\def\currentMPformat {\currentMPinstance}
-\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[extrafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[doublefun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!double]
-\defineMPinstance[decimalfun][\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!decimal]
-\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
-\defineMPinstance[metapost] [\s!format=mpost]
-\defineMPinstance[nofun] [\s!format=mpost]
+\defineMPinstance[metafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[extrafun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+\defineMPinstance[lessfun] [\s!format=metafun]
+\defineMPinstance[doublefun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!double]
+\defineMPinstance[binaryfun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!binary]
+\defineMPinstance[decimalfun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes,\c!method=\s!decimal]
+
+\defineMPinstance[mprun] [\s!format=metafun,\s!extensions=\v!yes,\s!initializations=\v!yes]
+
+\defineMPinstance[metapost] [\s!format=mpost]
+\defineMPinstance[nofun] [\s!format=mpost]
+\defineMPinstance[doublepost] [\s!format=mpost,\c!method=\s!double]
+\defineMPinstance[binarypost] [\s!format=mpost,\c!method=\s!binary]
+\defineMPinstance[decimalpost][\s!format=mpost,\c!method=\s!decimal]
+
+%defineMPinstance[megapost] [\s!format=mpost,\c!method=\s!decimal]
\newconditional\c_meta_include_initializations
@@ -264,7 +273,10 @@
\ifx\p_setups\empty \else
\setups[\p_setups]%
\fi
- \useMPinstancestyleandcolor\c!textstyle\c!textcolor}
+ \useMPinstancestyleparameter\c!textstyle}
+
+\def\meta_set_current_color
+ {\useMPinstancecolorparameter\c!textcolor}
\def\meta_stop_current_graphic
{\global\t_meta_definitions\emptytoks
@@ -291,20 +303,19 @@
\forgetall
\edef\p_extensions{\MPinstanceparameter\s!extensions}%
\meta_process_graphic_start
- \normalexpanded{\noexpand\ctxlua{metapost.graphic {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs#1;\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
-% useextensions = "\MPinstanceparameter\s!extensions",
-\ifx\p_extensions\v!yes
- extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
-\fi
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- definitions = \!!bs\meta_flush_current_definitions\!!es,
- figure = "\MPaskedfigure",
- method = "\MPinstanceparameter\c!method",
- }}}%
+ \normalexpanded{\noexpand\clf_mpgraphic
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {#1;}%
+ initializations {\meta_flush_current_initializations}%
+ \ifx\p_extensions\v!yes
+ extensions {\clf_getmpextensions{\currentMPinstance}}% goes through tex again
+ \fi
+ inclusions {\meta_flush_current_inclusions}%
+ definitions {\meta_flush_current_definitions}%
+ figure {\MPaskedfigure}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
\meta_process_graphic_stop
\meta_stop_current_graphic}
@@ -325,23 +336,43 @@
\newif\ifsetMPrandomseed \setMPrandomseedtrue % false by default
+\let\theMPrandomseed\empty
+
\def\setMPrandomseed
- {\let\theMPrandomseed\empty
- \ifsetMPrandomseed \ifx\getrandomnumber\undefined \else
- \getrandomnumber\localMPseed\zerocount{4095}%
- \def\theMPrandomseed{randomseed:=\localMPseed}%
- \fi\fi}
+ {\ifsetMPrandomseed
+ \def\theMPrandomseed{randomseed:=\mprandomnumber;}%
+ \else
+ \let\theMPrandomseed\empty
+ \fi}
%D Calling up previously defined graphics.
+% \def\includeMPgraphic#1% gets expanded !
+% {\ifcsname\??mpgraphic#1\endcsname
+% \csname\??mpgraphic#1\endcsname ; % ; is safeguard
+% \fi}
+%
+% \unexpanded\def\meta_enable_include % public
+% {\let\meta_handle_use_graphic \thirdofthreearguments
+% \let\meta_handle_reusable_graphic\thirdofthreearguments}
+%
+% but ... we want this too:
+%
+% \startuseMPgraphic{x}
+% draw textext("\externalfigure[foo.pdf]") ;
+% \stopuseMPgraphic
+%
+% \useMPgraphic{x}
+%
+% so we cannot overload unless we let back to the original meanings
+% each graphic ... a better solution is:
+
\def\includeMPgraphic#1% gets expanded !
{\ifcsname\??mpgraphic#1\endcsname
- \csname\??mpgraphic#1\endcsname ; % ; is safeguard
+ \doubleexpandafter\fourthoffourarguments\csname\??mpgraphic#1\endcsname ; % ; is safeguard
\fi}
-\unexpanded\def\meta_enable_include % public
- {\let\meta_handle_use_graphic \thirdofthreearguments
- \let\meta_handle_reusable_graphic\thirdofthreearguments}
+\let\meta_enable_include\relax
%D Drawings (stepwise built):
@@ -397,20 +428,13 @@
\unexpanded\def\meta_grab_clip_path#1#2#3#4% #4 is alternative (called in backend code)
{\begingroup
- \edef\width {#2\space}\let\overlaywidth \width
- \edef\height{#3\space}\let\overlayheight\height
- \ifcsname\??mpclip#1\endcsname
- \meta_start_current_graphic
- \xdef\MPclippath{\normalexpanded{\noexpand\ctxlua{metapost.theclippath {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs\getvalue{\??mpclip#1}\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
- useextensions = "\MPinstanceparameter\s!extensions",
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- method = "\MPinstanceparameter\c!method",
- }}}}%
- \meta_stop_current_graphic
+ \edef\width {#2}% \let\overlaywidth \width
+ \edef\height{#3}% \let\overlayheight\height
+ \d_overlay_width #2\onebasepoint
+ \d_overlay_height#3\onebasepoint
+ \edef\currentMPclip{#1}%
+ \ifcsname\??mpclip\currentMPclip\endcsname
+ \meta_grab_clip_path_indeed
\ifx\MPclippath\empty
\xdef\MPclippath{#4}%
\fi
@@ -419,6 +443,19 @@
\fi
\endgroup}
+\def\meta_grab_clip_path_indeed
+ {\meta_start_current_graphic
+ \normalexpanded{\noexpand\clf_mpsetclippath
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {\csname\??mpclip\currentMPclip\endcsname}%
+ initializations {\meta_flush_current_initializations}%
+ useextensions {\MPinstanceparameter\s!extensions}%
+ inclusions {\meta_flush_current_inclusions}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
+ \meta_stop_current_graphic}
+
%D Since we want lables to follow the document settings, we
%D also set the font related variables.
@@ -426,7 +463,7 @@
{\dontleavehmode
\begingroup
\definedfont[#1]%
- \hskip\cldcontext{fonts.hashes.parameters[font.current()].designsize}sp\relax
+ \hskip\clf_currentdesignsize\scaledpoint\relax
\endgroup}
\definefontsynonym[MetafunDefault][Regular*default]
@@ -434,7 +471,7 @@
\startMPinitializations % scale is not yet ok
defaultfont:="\truefontname{MetafunDefault}";
% defaultscale:=\the\bodyfontsize/10pt; % only when hard coded 10pt
- defaultscale:=1;
+ % defaultscale:=1;
\stopMPinitializations
% watch out, this is a type1 font because mp can only handle 8 bit fonts
@@ -567,7 +604,7 @@
\def\meta_prepare_variable_yes
{\defconvertedcommand\ascii\m_meta_current_variable % otherwise problems
- \doifcolorelse \ascii % with 2\bodyfontsize
+ \doifelsecolor \ascii % with 2\bodyfontsize
{\meta_prepare_variable_color}
{\begingroup
\setbox\b_meta_variable_box\hbox{\scratchdimen\m_meta_current_variable sp}%
@@ -596,7 +633,13 @@
%D \stoptyping
\def\overlaystamp % watch the \MPcolor, since colors can be redefined
- {\overlaywidth:\overlayheight:\overlaydepth:\overlayoffset:\overlaylinewidth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
+ {\the\d_overlay_width :%
+ \the\d_overlay_height :%
+ \the\d_overlay_depth :%
+ \the\d_overlay_offset :%
+ \the\d_overlay_linewidth:%
+ \MPcolor\overlaycolor :% % todo, expand once \m_overlaycolor
+ \MPcolor\overlaylinecolor} % todo, expand once \m_overlaylinecolor
%D A better approach is to let additional variables play a role
%D in determining the uniqueness. In the next macro, the
@@ -779,19 +822,25 @@
\let\reuseMPgraphic \useMPgraphic % we can save a setup here if needed
\let\reusableMPgraphic\reuseMPgraphic % we can save a setup here if needed
-\unexpanded\def\meta_enable_include
- {\let\meta_handle_use_graphic \thirdofthreearguments
- \let\meta_handle_reusable_graphic\thirdofthreearguments}
-
%D \macros
%D {startuniqueMPpagegraphic,uniqueMPpagegraphic}
%D
%D Experimental.
-\def\m_meta_page_prefix{\doifoddpageelse oe}
+\def\m_meta_page_prefix{\doifelseoddpage oe}
+
+% \def\overlaypagestamp
+% {\m_meta_page_prefix:\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
\def\overlaypagestamp
- {\m_meta_page_prefix:\overlaywidth:\overlayheight:\overlaydepth:\MPcolor\overlaycolor:\MPcolor\overlaylinecolor}
+ {\m_meta_page_prefix :%
+ \the\d_overlay_width :%
+ \the\d_overlay_height :%
+ \the\d_overlay_depth :%
+ \the\d_overlay_offset :%
+ \the\d_overlay_linewidth:%
+ \MPcolor\overlaycolor :%
+ \MPcolor\overlaylinecolor}
\unexpanded\def\startuniqueMPpagegraphic
{\dodoublegroupempty\meta_start_unique_page_graphic}
@@ -855,12 +904,21 @@
% makempy.registerfile(filename)
-\startMPinitializations
- boolean collapse_data; collapse_data:=true;
- def data_mpd_file = "\MPdataMPDfile" enddef ;
- def data_mpo_file = "\MPdataMPOfile" enddef ;
- def data_mpy_file = "\MPdataMPYfile" enddef ;
-\stopMPinitializations
+% % hm. extensions get expanded so the wrong names then end up in format
+%
+% \startMPinitializations
+% boolean collapse_data; collapse_data:=true ; % will be obsolete
+% def data_mpd_file = "\MPdataMPDfile" enddef ; % will go via lua
+% def data_mpo_file = "\MPdataMPOfile" enddef ;
+% def data_mpy_file = "\MPdataMPYfile" enddef ;
+% \stopMPinitializations
+
+\startMPextensions
+ boolean collapse_data; collapse_data:=true ; % will be obsolete
+ def data_mpd_file = "\noexpand\MPdataMPDfile" enddef ; % will go via lua
+ def data_mpo_file = "\noexpand\MPdataMPOfile" enddef ;
+ def data_mpy_file = "\noexpand\MPdataMPYfile" enddef ;
+\stopMPextensions
\unexpanded\def\getMPdata
{\let\MPdata\secondoftwoarguments
@@ -880,7 +938,7 @@
\def\meta_process_buffer[#1]%
{\meta_begin_graphic_group{#1}%
- \meta_process_graphic{\ctxcommand{feedback("\currentMPgraphicname")}}%
+ \meta_process_graphic{\clf_feedback{\currentMPgraphicname}}%
\meta_end_graphic_group}
\unexpanded\def\runMPbuffer
@@ -924,15 +982,15 @@
\else\ifx\m_meta_option\!!plustoken
#2% % use in main doc too
\fi\fi\fi
- \ctxlua{metapost.tex.set(\!!bs\detokenize{#2}\!!es)}}
+ \clf_mptexset{\detokenize{#2}}}
\let\stopMPenvironment\relax
\unexpanded\def\resetMPenvironment
- {\ctxlua{metapost.tex.reset()}}
+ {\clf_mptexreset}
\unexpanded\def\useMPenvironmentbuffer[#1]%
- {\ctxlua{metapost.tex.set(buffers.content("#1"))}}
+ {\clf_mpsetfrombuffer{#1}}
%D This command takes \type {[reset]} as optional
%D argument.
@@ -1054,7 +1112,7 @@
%D Loading specific \METAPOST\ related definitions is
%D accomplished by:
-\unexpanded\def\useMPlibrary[#1]{\ctxcommand{useMPlibrary(\!!bs#1\!!es)}}
+\unexpanded\def\useMPlibrary[#1]{\clf_useMPlibrary{#1}}
%D \macros
%D {setMPtext, MPtext, MPstring, MPbetex}
@@ -1084,46 +1142,62 @@
% \setupcolors[state=stop,conversion=never] % quite tricky ... type mismatch
-% A dirty trick, ** in colo-ini.lua (mpcolor).
+% \startMPextensions
+% color OverlayColor,OverlayLineColor;
+% \stopMPextensions
-\def\m_meta_colo_initializations{% no vardef, goes wrong with spot colors
- def OverlayLineColor=\MPcolor{\overlaylinecolor} enddef;
- def OverlayColor=\MPcolor{\overlaycolor} enddef;
-}
+\startMPinitializations
+ CurrentLayout:="\currentlayout";
+\stopMPinitializations
-\startMPextensions
- color OverlayColor,OverlayLineColor;
-\stopMPextensions
+% \startMPinitializations
+% OverlayWidth:=\overlaywidth;
+% OverlayHeight:=\overlayheight;
+% OverlayDepth:=\overlaydepth;
+% OverlayLineWidth:=\overlaylinewidth;
+% OverlayOffset:=\overlayoffset;
+% \stopMPinitializations
+
+% A dirty trick, ** in colo-ini.lua (mpcolor). We cannot use a vardef, because
+% that fails with spot colors.
\startMPinitializations
- CurrentLayout:="\currentlayout";
- OverlayWidth:=\overlaywidth;
- OverlayHeight:=\overlayheight;
- OverlayDepth:=\overlaydepth;
- OverlayLineWidth:=\overlaylinewidth;
- OverlayOffset:=\overlayoffset;
- %
- \m_meta_colo_initializations
- %
- BaseLineSkip:=\the\baselineskip;
- LineHeight:=\the\baselineskip;
- BodyFontSize:=\the\bodyfontsize;
- %
- TopSkip:=\the\topskip;
- StrutHeight:=\strutheight;
- StrutDepth:=\strutdepth;
- %
- CurrentWidth:=\the\hsize;
- CurrentHeight:=\the\vsize;
- %
- EmWidth:=\the\emwidth;
- ExHeight:=\the\exheight;
- %
- PageNumber:=\the\pageno;
- RealPageNumber:=\the\realpageno;
- LastPageNumber:= \lastpage;
+ def OverlayLineColor=\ifx\overlaylinecolor\empty black \else\MPcolor{\overlaylinecolor} \fi enddef;
+ def OverlayColor =\ifx\overlaycolor \empty black \else\MPcolor{\overlaycolor} \fi enddef;
\stopMPinitializations
+% \newcount\c_overlay_colormodel
+% \newcount\c_overlay_color
+% \newcount\c_overlay_transparency
+% \newcount\c_overlay_linecolor
+% \newcount\c_overlay_linetransparency
+
+% \appendtoks
+% \c_overlay_colormodel \attribute\colormodelattribute
+% \c_overlay_color \colo_helpers_inherited_current_ca\overlaycolor
+% \c_overlay_transparency \colo_helpers_inherited_current_ta\overlaycolor
+% \c_overlay_linecolor \colo_helpers_inherited_current_ca\overlaylinecolor
+% \c_overlay_linetransparency\colo_helpers_inherited_current_ta\overlaylinecolor
+% \to \everyMPgraphic
+
+% \startMPinitializations
+% BaseLineSkip:=\the\baselineskip;
+% LineHeight:=\the\baselineskip;
+% BodyFontSize:=\the\bodyfontsize;
+% %
+% TopSkip:=\the\topskip;
+% StrutHeight:=\strutheight;
+% StrutDepth:=\strutdepth;
+% %
+% CurrentWidth:=\the\hsize;
+% CurrentHeight:=\the\vsize;
+% HSize:=\the\hsize ;
+% VSize:=\the\vsize ;
+% %
+% EmWidth:=\the\emwidth;
+% ExHeight:=\the\exheight;
+% \stopMPinitializations
+
\appendtoks
\disablediscretionaries
\disablecompoundcharacters
@@ -1153,11 +1227,6 @@
\let \} \letterclosebrace
\to \everyMPgraphic
-\startMPinitializations
- prologues:=0;
- mpprocset:=1;
-\stopMPinitializations
-
%D \macros
%D {PDFMPformoffset}
%D
@@ -1184,21 +1253,36 @@
enddef;
\stopMPextensions
-\startMPinitializations
- HSize:=\the\hsize ;
- VSize:=\the\vsize ;
-\stopMPinitializations
-
\startMPextensions
vardef ForegroundBox =
unitsquare xysized(HSize,VSize)
enddef ;
- PageFraction := 1 ;
\stopMPextensions
-\startMPinitializations
- PageFraction := if \lastpage>1: (\realfolio-1)/(\lastpage-1) else: 1 fi ;
-\stopMPinitializations
+% \startMPextensions
+% PageFraction := 1 ;
+% \stopMPextensions
+
+% \startMPinitializations
+% PageFraction := if \lastpage>1: (\realfolio-1)/(\lastpage-1) else: 1 fi ;
+% \stopMPinitializations
+
+\startMPdefinitions {metapost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {binarypost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {decimalpost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+\startMPdefinitions {doublepost}
+ if unknown context_bare : input mp-bare.mpiv ; fi ;
+\stopMPdefinitions
+
+% \startMPdefinitions {nofun}
+% if unknown context_bare : input mp-bare.mpiv ; fi ;
+% \stopMPdefinitions
%D And some more. These are not really needed since we
%D don't use the normal figure inclusion macros any longer.
@@ -1359,12 +1443,16 @@
\newconstant\MPcolormethod
\appendtoks
- \ctxlua{metapost.setoutercolor(\number\MPcolormethod,\number\attribute\colormodelattribute,\number\attribute\colorattribute,\number\dogetattribute{transparency})}%
+ \clf_mpsetoutercolor
+ \MPcolormethod\space
+ \attribute\colormodelattribute\space
+ \attribute\colorattribute\space
+ \dogetattribute{transparency}\relax
\to \everyMPgraphic
-\startMPinitializations
- defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi;
-\stopMPinitializations
+% \startMPinitializations
+% defaultcolormodel := \ifcase\MPcolormethod1\or1\or3\else3\fi;
+% \stopMPinitializations
%D macros
%D {mprunvar,mpruntab,mprunset}
@@ -1390,9 +1478,9 @@
%D $(x,y) = (\MPrunset{point}{,})$
%D \stoptyping
-\def\MPrunvar #1{\ctxcommand{mprunvar("#1")}} \let\mprunvar\MPrunvar
-\def\MPruntab#1#2{\ctxcommand{mprunvar("#1",\number#2)}} \let\mpruntab\MPruntab
-\def\MPrunset#1#2{\ctxcommand{mprunvar("#1","#2")}} \let\mprunset\MPrunset
+\def\MPrunvar #1{\clf_mprunvar{#1}} \let\mprunvar\MPrunvar
+\def\MPruntab#1#2{\clf_mpruntab{#1}#2\relax} \let\mpruntab\MPruntab % #2 is number
+\def\MPrunset#1#2{\clf_mprunset{#1}{#2}} \let\mprunset\MPrunset
%D We also provide an outputless run:
@@ -1461,7 +1549,7 @@
\edef\p_mpy{\directMPgraphicsparameter{mpy}}%
\ifx\p_mpy\empty \else
\let\MPdataMPYfile\p_mpy
- \ctxlua{metapost.makempy.registerfile("\p_mpy")}%
+ \clf_registermpyfile{\p_mpy}%
\fi
\to \everysetupMPgraphics
diff --git a/tex/context/base/meta-pag.mkiv b/tex/context/base/meta-pag.mkiv
index a25353b18..6b6abd211 100644
--- a/tex/context/base/meta-pag.mkiv
+++ b/tex/context/base/meta-pag.mkiv
@@ -23,82 +23,76 @@
%D pagebody looks.
\startMPextensions
- boolean PageStateAvailable,OnRightPage,InPageBody;
+ boolean PageStateAvailable;
PageStateAvailable:=true;
\stopMPextensions
-\startMPinitializations
- OnRightPage:=true;
- InPageBody:=\ifinpagebody true \else false \fi;
-\stopMPinitializations
-
-\startMPinitializations
- CurrentColumn:=\number\mofcolumns;
- NOfColumns:=\number\nofcolumns;
- % todo: ColumnDistance
-\stopMPinitializations
-
% maybe always set as frozen anyway
-\startMPinitializations
- % def LoadPageState =
- OnRightPage:=\MPonrightpage;
- OnOddPage:=\MPonoddpage;
- RealPageNumber:=\the\realpageno;
- PageNumber:=\the\pageno;
- NOfPages:=\lastpage;
- PaperHeight:=\the\paperheight;
- PaperWidth:=\the\paperwidth;
- PrintPaperHeight:=\the\printpaperheight;
- PrintPaperWidth:=\the\printpaperwidth;
- TopSpace:=\the\topspace;
- BottomSpace:=\the\bottomspace;
- BackSpace:=\the\backspace;
- CutSpace:=\the\cutspace;
- MakeupHeight:=\the\makeupheight;
- MakeupWidth:=\the\makeupwidth;
- TopHeight:=\the\topheight;
- TopDistance:=\the\topdistance;
- HeaderHeight:=\the\headerheight;
- HeaderDistance:=\the\headerdistance;
- TextHeight:=\the\textheight;
- FooterDistance:=\the\footerdistance;
- FooterHeight:=\the\footerheight;
- BottomDistance:=\the\bottomdistance;
- BottomHeight:=\the\bottomheight;
- LeftEdgeWidth:=\the\leftedgewidth;
- LeftEdgeDistance:=\the\leftedgedistance;
- LeftMarginWidth:=\the\leftmarginwidth;
- LeftMarginDistance:=\the\leftmargindistance;
- TextWidth:=\the\textwidth ;
- RightMarginDistance:=\the\rightmargindistance;
- RightMarginWidth:=\the\rightmarginwidth;
- RightEdgeDistance:=\the\rightedgedistance;
- RightEdgeWidth:=\the\rightedgewidth;
- InnerMarginDistance:=\the\innermargindistance;
- InnerMarginWidth:=\the\innermarginwidth;
- OuterMarginDistance:=\the\outermargindistance;
- OuterMarginWidth:=\the\outermarginwidth;
- InnerEdgeDistance:=\the\inneredgedistance;
- InnerEdgeWidth:=\the\inneredgewidth;
- OuterEdgeDistance:=\the\outeredgedistance;
- OuterEdgeWidth:=\the\outeredgewidth;
- PageOffset:=\the\pagebackgroundoffset;
- PageDepth:=\the\pagebackgrounddepth;
- LayoutColumns:=\the\layoutcolumns;
- LayoutColumnDistance:=\the\layoutcolumndistance;
- LayoutColumnWidth:=\the\layoutcolumnwidth;
- def LoadPageState =
- % now always set .. this dummy can move to the mp code
- enddef;
-\stopMPinitializations
+% \startMPinitializations
+% PaperHeight:=\the\paperheight;
+% PaperWidth:=\the\paperwidth;
+% PrintPaperHeight:=\the\printpaperheight;
+% PrintPaperWidth:=\the\printpaperwidth;
+% TopSpace:=\the\topspace;
+% BottomSpace:=\the\bottomspace;
+% BackSpace:=\the\backspace;
+% CutSpace:=\the\cutspace;
+% MakeupHeight:=\the\makeupheight;
+% MakeupWidth:=\the\makeupwidth;
+% TopHeight:=\the\topheight;
+% TopDistance:=\the\topdistance;
+% HeaderHeight:=\the\headerheight;
+% HeaderDistance:=\the\headerdistance;
+% TextHeight:=\the\textheight;
+% FooterDistance:=\the\footerdistance;
+% FooterHeight:=\the\footerheight;
+% BottomDistance:=\the\bottomdistance;
+% BottomHeight:=\the\bottomheight;
+% LeftEdgeWidth:=\the\leftedgewidth;
+% LeftEdgeDistance:=\the\leftedgedistance;
+% LeftMarginWidth:=\the\leftmarginwidth;
+% LeftMarginDistance:=\the\leftmargindistance;
+% TextWidth:=\the\textwidth;
+% RightMarginDistance:=\the\rightmargindistance;
+% RightMarginWidth:=\the\rightmarginwidth;
+% RightEdgeDistance:=\the\rightedgedistance;
+% RightEdgeWidth:=\the\rightedgewidth;
+% InnerMarginDistance:=\the\innermargindistance;
+% InnerMarginWidth:=\the\innermarginwidth;
+% OuterMarginDistance:=\the\outermargindistance;
+% OuterMarginWidth:=\the\outermarginwidth;
+% InnerEdgeDistance:=\the\inneredgedistance;
+% InnerEdgeWidth:=\the\inneredgewidth;
+% OuterEdgeDistance:=\the\outeredgedistance;
+% OuterEdgeWidth:=\the\outeredgewidth;
+% PageOffset:=\the\pagebackgroundoffset;
+% PageDepth:=\the\pagebackgrounddepth;
+% LayoutColumns:=\the\layoutcolumns;
+% LayoutColumnDistance:=\the\layoutcolumndistance;
+% LayoutColumnWidth:=\the\layoutcolumnwidth;
+% %
+% boolean OnRightPage,OnOddPage,InPageBody;
+% %
+% OnRightPage:=\MPonrightpage;
+% OnOddPage:=\MPonoddpage;
+% InPageBody:=\ifinpagebody true \else false \fi;
+% %
+% RealPageNumber:=\the\realpageno;
+% PageNumber:=\the\pageno;
+% NOfPages:=\lastpage;
+% LastPageNumber:=\lastpage;
+% %
+% CurrentColumn:=\number\mofcolumns;
+% NOfColumns:=\number\nofcolumns;
+% \stopMPinitializations
\def\MPonrightpage{true}
\def\MPonoddpage {true}
\def\freezeMPpagelayout
{\edef\MPonrightpage{\doifbothsides {tru}{tru}{fals}e}%
- \edef\MPonoddpage {\doifoddpageelse {tru}{fals}e}}
+ \edef\MPonoddpage {\doifelseoddpage {tru}{fals}e}}
%D We need to freeze the pagelayout before the backgrounds
%D are build, because the overlay will temporarily become
@@ -121,7 +115,7 @@
% \to \everyMPgraphic
\prependtoks
- \calculatereducedvsizes % this is really needed
+ \calculatereducedvsizes % bah, this is really needed
\to \everyMPgraphic
\protect \endinput
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 46e20ad31..3cbff63b1 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -38,8 +38,8 @@ local mptopdf = metapost.mptopdf
mptopdf.nofconverted = 0
-local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues
-local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues
+local f_translate = formatters["1 0 0 0 1 %F %F cm"] -- no %s due to 1e-035 issues
+local f_concat = formatters["%F %F %F %F %F %F cm"] -- no %s due to 1e-035 issues
local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
@@ -329,7 +329,7 @@ handlers[50] = function() report_mptopdf("skipping special %s",50) end
--end of not supported
function mps.setrgbcolor(r,g,b) -- extra check
- r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
+ r, g, b = tonumber(r), tonumber(g), tonumber(b) -- needed when we use lpeg
if r == 0.0123 and g < 0.1 then
g, b = round(g*10000), round(b*10000)
local s = specials[b]
@@ -411,7 +411,8 @@ function mps.fshow(str,font,scale) -- lpeg parser
mps.textext(font,scale,lpegmatch(package,str))
end
-local cnumber = lpegC(number)
+----- cnumber = lpegC(number)
+local cnumber = number/tonumber -- we now expect numbers (feeds into %F)
local cstring = lpegC(nonspace)
local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
@@ -572,3 +573,11 @@ statistics.register("mps conversion time",function()
return nil
end
end)
+
+-- interface
+
+interfaces.implement {
+ name = "convertmpstopdf",
+ arguments = "string",
+ actions = mptopdf.convertmpstopdf
+}
diff --git a/tex/context/base/meta-pdf.mkiv b/tex/context/base/meta-pdf.mkiv
index 3469419d4..50eb1dd72 100644
--- a/tex/context/base/meta-pdf.mkiv
+++ b/tex/context/base/meta-pdf.mkiv
@@ -46,7 +46,7 @@
\forgetall
\offinterlineskip
\setbox\MPbox\vbox\bgroup
- \ctxlua{metapost.mptopdf.convertmpstopdf("\MPfilename")}%
+ \clf_convertmpstopdf{\MPfilename}%
\removeunwantedspaces % not that needed
\egroup
\finalizeMPbox
diff --git a/tex/context/base/meta-pdh.mkiv b/tex/context/base/meta-pdh.mkiv
index b65fe6ac6..f3db5b388 100644
--- a/tex/context/base/meta-pdh.mkiv
+++ b/tex/context/base/meta-pdh.mkiv
@@ -374,17 +374,17 @@
%D
%D \startbuffer
%D \startuniqueMPgraphic{CircularShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D circular_shade(p,0,.2red,.9red) ;
%D \stopuniqueMPgraphic
%D
%D \startuniqueMPgraphic{LinearShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D linear_shade(p,0,.2blue,.9blue) ;
%D \stopuniqueMPgraphic
%D
%D \startuniqueMPgraphic{DuotoneShade}
-%D path p ; p := unitsquare xscaled \overlaywidth yscaled \overlayheight ;
+%D path p ; p := unitsquare xscaled OverlayWidth yscaled OverlayHeight ;
%D linear_shade(p,2,.5green,.5red) ;
%D \stopuniqueMPgraphic
%D \stopbuffer
@@ -429,8 +429,8 @@
%D
%D \def\SomeShade#1#2#3#4#5%
%D {\startuniqueMPgraphic{Shade-#1}
-%D width := \overlaywidth ;
-%D height := \overlayheight ;
+%D width := OverlayWidth ;
+%D height := OverlayHeight ;
%D path p ; p := unitsquare xscaled width yscaled height ;
%D #2_shade(p,#3,#4,#5) ;
%D \stopuniqueMPgraphic
diff --git a/tex/context/base/meta-tex.lua b/tex/context/base/meta-tex.lua
index 7a4123abb..1008e45c0 100644
--- a/tex/context/base/meta-tex.lua
+++ b/tex/context/base/meta-tex.lua
@@ -6,12 +6,15 @@ if not modules then modules = { } end modules ['meta-tex'] = {
license = "see context related readme files"
}
+local tostring = tostring
local format, gsub, find, match = string.format, string.gsub, string.find, string.match
local formatters = string.formatters
local P, S, R, C, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.match
metapost = metapost or { }
+local implement = interfaces.implement
+
-- local left = P("[")
-- local right = P("]")
-- local space = P(" ")
@@ -39,6 +42,12 @@ function metapost.escaped(str)
context(lpegmatch(pattern,str))
end
+implement {
+ name = "metapostescaped",
+ actions = metapost.escaped,
+ arguments = "string"
+}
+
local simplify = true
-- local function strip(n,e)
@@ -108,10 +117,6 @@ local enumber = number * S("eE") * number
local cleaner = Cs((P("@@")/"@" + P("@")/"%%" + P(1))^0)
-function format_n(fmt,...)
- return
-end
-
context = context or { exponent = function(...) print(...) end }
function metapost.format_string(fmt,...)
@@ -142,6 +147,9 @@ function metapost.nvformat(fmt,str)
metapost.format_number(fmt,metapost.untagvariable(str,false))
end
+implement { name = "metapostformatted", actions = metapost.svformat, arguments = { "string", "string" } }
+implement { name = "metapostgraphformat", actions = metapost.nvformat, arguments = { "string", "string" } }
+
-- local function test(fmt,n)
-- logs.report("mp format test","fmt: %s, n: %s, result: %s, \\exponent{%s}{%s}",fmt,n,
-- formatters[lpegmatch(cleaner,fmt)](n),
@@ -167,3 +175,34 @@ end
-- test("@j","1.2e+102")
-- test("@j","1.23e+102")
-- test("@j","1.234e+102")
+
+local f_textext = formatters[ [[textext("%s")]] ]
+local f_mthtext = formatters[ [[textext("\mathematics{%s}")]] ]
+local f_exptext = formatters[ [[textext("\mathematics{%s\times10^{%s}}")]] ]
+
+local mpprint = mp.print
+
+function mp.format(fmt,str)
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](metapost.untagvariable(str,false))))
+end
+
+function mp.formatted(fmt,num) -- svformat
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](tonumber(num) or num)))
+end
+
+function mp.graphformat(fmt,num) -- nvformat
+ fmt = lpegmatch(cleaner,fmt)
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,number)
+ if base and exponent then
+ mpprint(f_exptext(base,exponent))
+ else
+ mpprint(f_mthtext(num))
+ end
+ else
+ mpprint(f_textext(tostring(num)))
+ end
+end
diff --git a/tex/context/base/meta-tex.mkiv b/tex/context/base/meta-tex.mkiv
index deac883c8..0f5a27ff8 100644
--- a/tex/context/base/meta-tex.mkiv
+++ b/tex/context/base/meta-tex.mkiv
@@ -28,7 +28,7 @@
\let\stopTeXtexts\relax
-\def\TeXtext
+\unexpanded\def\TeXtext
{\dosingleempty\meta_textext}
\def\meta_textext[#1]#2#3% contrary to mkii we don't process yet but we do expand
@@ -68,7 +68,7 @@
\unexpanded\def\definetextext[#1]%
{\def\currenttextext{#1}%
- \doifnextoptionalelse\meta_textext_define_one\meta_textext_define_zero}
+ \doifelsenextoptionalcs\meta_textext_define_one\meta_textext_define_zero}
\def\meta_textext_define_one {\setvalue{\??graphictexarguments1:\currenttextext}}
\def\meta_textext_define_zero{\setvalue{\??graphictexarguments0:\currenttextext}}
@@ -76,10 +76,10 @@
\def\sometxt#1#{\meta_some_txt{#1}}
\def\meta_some_txt#1#2% we need to capture embedded quotes (a bit messy as later on textext is filtered
- {textext.drt("\mpsometxt#1{\ctxlua{metapost.escaped(\!!bs#2\!!es)}}")}
+ {textext.drt("\mpsometxt#1{\clf_metapostescaped{#2}}")}
\unexpanded\def\mpsometxt % no _ catcode
- {\doifnextoptionalelse\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
+ {\doifelsenextoptionalcs\meta_some_txt_indeed_yes\meta_some_txt_indeed_nop}
\def\meta_some_txt_indeed_yes[#1]%
{\def\currenttextext{#1}%
@@ -116,8 +116,8 @@
%
% \unexpanded\def\definetextext[#1]#2{\setvalue{@@st@@#1}{#2}}
%
-% \long\def\sometxt #1#{\dosometxt{#1}} % grab optional [args]
-% \long\def\dosometxt #1#2{\ctxlua{metapost.sometxt(\!!bs#1\!!es,\!!bs#2\!!es)}}
+% \def\sometxt #1#{\dosometxt{#1}} % grab optional [args]
+% \def\dosometxt #1#2{\ctxlua{metapost.sometxt(\!!bs#1\!!es,\!!bs#2\!!es)}}
%
% \def\sometxta #1{textext.drt("#1")}
% \def\sometxtb #1#2{textext.drt("\getvalue{@@st@@#1}{#2}")}
@@ -139,7 +139,7 @@
% {\showstruts\useMPgraphic{testgraphic}}
\unexpanded\def\MPexponent #1#2{\mathematics{#1\times10^{#2}}}
-\unexpanded\def\MPformatted #1#2{\ctxlua{metapost.svformat("#1","#2")}}
-\unexpanded\def\MPgraphformat#1#2{\ctxlua{metapost.nvformat("#1","#2")}}
+\unexpanded\def\MPformatted #1#2{\clf_metapostformatted{#1}{#2}}
+\unexpanded\def\MPgraphformat#1#2{\clf_metapostgraphformat{#1}{#2}}
\protect \endinput
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index a1a4e645a..b437e1212 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -6,21 +6,28 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
license = "see context related readme files",
}
--- todo
+-- for the moment we have the scanners here but they migh tbe moved to
+-- the other modules
+local type, tostring = type, tostring
local format, concat = string.format, table.concat
local settings_to_hash = utilities.parsers.settings_to_hash
local report_metapost = logs.reporter("metapost")
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
-local mplib = mplib
+local mplib = mplib
-metapost = metapost or {}
-local metapost = metapost
+metapost = metapost or {}
+local metapost = metapost
-local v_no = interfaces.variables.no
+local setters = tokens.setters
+local setmacro = setters.macro
+local implement = interfaces.implement
+
+local v_no = interfaces.variables.no
metapost.defaultformat = "metafun"
metapost.defaultinstance = "metafun"
@@ -78,15 +85,155 @@ function metapost.getextensions(instance,state)
end
end
-function commands.getmpextensions(instance,state)
- context(metapost.getextensions(instance,state))
+-- function commands.getmpextensions(instance,state)
+-- context(metapost.getextensions(instance,state))
+-- end
+
+implement {
+ name = "setmpextensions",
+ actions = metapost.setextensions,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "getmpextensions",
+ actions = { metapost.getextensions, context } ,
+ arguments = "string"
+}
+
+local report_metapost = logs.reporter ("metapost")
+local status_metapost = logs.messenger("metapost")
+
+local patterns = {
+ "meta-imp-%s.mkiv",
+ "meta-imp-%s.tex",
+ -- obsolete:
+ "meta-%s.mkiv",
+ "meta-%s.tex"
+}
+
+local function action(name,foundname)
+ status_metapost("library %a is loaded",name)
+ context.startreadingfile()
+ context.input(foundname)
+ context.stopreadingfile()
+end
+
+local function failure(name)
+ report_metapost("library %a is unknown or invalid",name)
end
+implement {
+ name = "useMPlibrary",
+ arguments = "string",
+ actions = function(name)
+ resolvers.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+}
+
+-- metapost.variables = { } -- to be stacked
+
+implement {
+ name = "mprunvar",
+ arguments = "string",
+ actions = function(name)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(concat(value," "))
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+implement {
+ name = "mpruntab",
+ arguments = { "string", "integer" },
+ actions = function(name,n)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(value[n])
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+implement {
+ name = "mprunset",
+ actions = function(name,connector)
+ local value = metapost.variables[name]
+ if value ~= nil then
+ local tvalue = type(value)
+ if tvalue == "table" then
+ context(concat(value,connector))
+ elseif tvalue == "number" or tvalue == "boolean" then
+ context(tostring(value))
+ elseif tvalue == "string" then
+ context(value)
+ end
+ end
+ end
+}
+
+-- we need to move more from pps to here as pps is the plugin .. the order is a mess
+-- or just move the scanners to pps
+
function metapost.graphic(specification)
- setmpsformat(specification)
- metapost.graphic_base_pass(specification)
+ metapost.graphic_base_pass(setmpsformat(specification))
end
+implement {
+ name = "mpgraphic",
+ actions = metapost.graphic,
+ arguments = {
+ {
+ { "instance" },
+ { "format" },
+ { "data" },
+ { "initializations" },
+ { "extensions" },
+ { "inclusions" },
+ { "definitions" },
+ { "figure" },
+ { "method" },
+ }
+ }
+}
+
+implement {
+ name = "mpsetoutercolor",
+ actions = function(...) metapost.setoutercolor(...) end, -- not yet implemented
+ arguments = { "integer", "integer", "integer", "integer" }
+}
+
+implement {
+ name = "mpflushreset",
+ actions = function() metapost.flushreset() end -- not yet implemented
+}
+
+implement {
+ name = "mpflushliteral",
+ actions = function(str) metapost.flushliteral(str) end, -- not yet implemented
+ arguments = "string",
+}
+
function metapost.getclippath(specification) -- why not a special instance for this
setmpsformat(specification)
local mpx = specification.mpx
@@ -135,20 +282,42 @@ end
function metapost.theclippath(...)
local result = metapost.getclippath(...)
if result then -- we could just print the table
- result = concat(metapost.flushnormalpath(result),"\n")
- context(result)
+-- return concat(metapost.flushnormalpath(result),"\n")
+ return concat(metapost.flushnormalpath(result)," ")
+ else
+ return ""
end
end
+implement {
+ name = "mpsetclippath",
+ actions = function(specification)
+ setmacro("MPclippath",metapost.theclippath(specification),"global")
+ end,
+ arguments = {
+ {
+ { "instance" },
+ { "format" },
+ { "data" },
+ { "initializations" },
+ { "useextensions" },
+ { "inclusions" },
+ { "method" },
+ },
+ }
+}
+
statistics.register("metapost processing time", function()
local n = metapost.n
if n and n > 0 then
local nofconverted = metapost.makempy.nofconverted
local elapsedtime = statistics.elapsedtime
local elapsed = statistics.elapsed
- local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
+ local instances, memory = metapost.getstatistics(true)
+ local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s, instances: %i, memory: %0.3f M",
elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
- elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
+ elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n),
+ instances, memory/(1024*1024))
if nofconverted > 0 then
return format("%s, external: %s (%s calls)",
str, elapsedtime(metapost.makempy), nofconverted)
@@ -163,17 +332,44 @@ end)
-- only used in graphictexts
metapost.tex = metapost.tex or { }
+local mptex = metapost.tex
local environments = { }
-function metapost.tex.set(str)
+function mptex.set(str)
environments[#environments+1] = str
end
-function metapost.tex.reset()
- environments = { }
+function mptex.setfrombuffer(name)
+ environments[#environments+1] = buffers.content(name)
end
-function metapost.tex.get()
+function mptex.get()
return concat(environments,"\n")
end
+
+function mptex.reset()
+ environments = { }
+end
+
+implement {
+ name = "mptexset",
+ arguments = "string",
+ actions = mptex.set
+}
+
+implement {
+ name = "mptexsetfrombuffer",
+ arguments = "string",
+ actions = mptex.setfrombuffer
+}
+
+implement {
+ name = "mptexget",
+ actions = { mptex.get, context }
+}
+
+implement {
+ name = "mptexreset",
+ actions = mptex.reset
+}
diff --git a/tex/context/base/mlib-ctx.mkiv b/tex/context/base/mlib-ctx.mkiv
index 75ff45488..a7bb612c8 100644
--- a/tex/context/base/mlib-ctx.mkiv
+++ b/tex/context/base/mlib-ctx.mkiv
@@ -18,6 +18,8 @@
\registerctxluafile{mlib-run}{1.001}
\registerctxluafile{mlib-ctx}{1.001}
+\registerctxluafile{mlib-lua}{1.001}
+\registerctxluafile{mlib-int}{1.001} % here ?
\unprotect
diff --git a/tex/context/base/mlib-int.lua b/tex/context/base/mlib-int.lua
new file mode 100644
index 000000000..6d219fe04
--- /dev/null
+++ b/tex/context/base/mlib-int.lua
@@ -0,0 +1,153 @@
+if not modules then modules = { } end modules ['mlib-int'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local factor = number.dimenfactors.bp
+local mpprint = mp.print
+local mpboolean = mp.boolean
+local mpquoted = mp.quoted
+local getdimen = tex.getdimen
+local getcount = tex.getcount
+local get = tex.get
+local mpcolor = attributes.colors.mpcolor
+local emwidths = fonts.hashes.emwidths
+local exheights = fonts.hashes.exheights
+
+function mp.PaperHeight () mpprint(getdimen("paperheight") *factor) end
+function mp.PaperWidth () mpprint(getdimen("paperwidth") *factor) end
+function mp.PrintPaperHeight () mpprint(getdimen("printpaperheight") *factor) end
+function mp.PrintPaperWidth () mpprint(getdimen("printpaperwidth") *factor) end
+function mp.TopSpace () mpprint(getdimen("topspace") *factor) end
+function mp.BottomSpace () mpprint(getdimen("bottomspace") *factor) end
+function mp.BackSpace () mpprint(getdimen("backspace") *factor) end
+function mp.CutSpace () mpprint(getdimen("cutspace") *factor) end
+function mp.MakeupHeight () mpprint(getdimen("makeupheight") *factor) end
+function mp.MakeupWidth () mpprint(getdimen("makeupwidth") *factor) end
+function mp.TopHeight () mpprint(getdimen("topheight") *factor) end
+function mp.TopDistance () mpprint(getdimen("topdistance") *factor) end
+function mp.HeaderHeight () mpprint(getdimen("headerheight") *factor) end
+function mp.HeaderDistance () mpprint(getdimen("headerdistance") *factor) end
+function mp.TextHeight () mpprint(getdimen("textheight") *factor) end
+function mp.FooterDistance () mpprint(getdimen("footerdistance") *factor) end
+function mp.FooterHeight () mpprint(getdimen("footerheight") *factor) end
+function mp.BottomDistance () mpprint(getdimen("bottomdistance") *factor) end
+function mp.BottomHeight () mpprint(getdimen("bottomheight") *factor) end
+function mp.LeftEdgeWidth () mpprint(getdimen("leftedgewidth") *factor) end
+function mp.LeftEdgeDistance () mpprint(getdimen("leftedgedistance") *factor) end
+function mp.LeftMarginWidth () mpprint(getdimen("leftmarginwidth") *factor) end
+function mp.LeftMarginDistance () mpprint(getdimen("leftmargindistance") *factor) end
+function mp.TextWidth () mpprint(getdimen("textwidth") *factor) end
+function mp.RightMarginDistance () mpprint(getdimen("rightmargindistance") *factor) end
+function mp.RightMarginWidth () mpprint(getdimen("rightmarginwidth") *factor) end
+function mp.RightEdgeDistance () mpprint(getdimen("rightedgedistance") *factor) end
+function mp.RightEdgeWidth () mpprint(getdimen("rightedgewidth") *factor) end
+function mp.InnerMarginDistance () mpprint(getdimen("innermargindistance") *factor) end
+function mp.InnerMarginWidth () mpprint(getdimen("innermarginwidth") *factor) end
+function mp.OuterMarginDistance () mpprint(getdimen("outermargindistance") *factor) end
+function mp.OuterMarginWidth () mpprint(getdimen("outermarginwidth") *factor) end
+function mp.InnerEdgeDistance () mpprint(getdimen("inneredgedistance") *factor) end
+function mp.InnerEdgeWidth () mpprint(getdimen("inneredgewidth") *factor) end
+function mp.OuterEdgeDistance () mpprint(getdimen("outeredgedistance") *factor) end
+function mp.OuterEdgeWidth () mpprint(getdimen("outeredgewidth") *factor) end
+function mp.PageOffset () mpprint(getdimen("pagebackgroundoffset")*factor) end
+function mp.PageDepth () mpprint(getdimen("pagebackgrounddepth") *factor) end
+function mp.LayoutColumns () mpprint(getcount("layoutcolumns")) end
+function mp.LayoutColumnDistance() mpprint(getdimen("layoutcolumndistance")*factor) end
+function mp.LayoutColumnWidth () mpprint(getdimen("layoutcolumnwidth") *factor) end
+function mp.SpineWidth () mpprint(getdimen("spinewidth") *factor) end
+function mp.PaperBleed () mpprint(getdimen("paperbleed") *factor) end
+
+function mp.PageNumber () mpprint(getcount("pageno")) end
+function mp.RealPageNumber () mpprint(getcount("realpageno")) end
+function mp.NOfPages () mpprint(getcount("lastpageno")) end
+
+function mp.CurrentColumn () mpprint(getcount("mofcolumns")) end
+function mp.NOfColumns () mpprint(getcount("nofcolumns")) end
+
+function mp.BaseLineSkip () mpprint(getdimen("baselineskip") *factor) end
+function mp.LineHeight () mpprint(getdimen("lineheight") *factor) end
+function mp.BodyFontSize () mpprint(getdimen("bodyfontsize") *factor) end
+
+function mp.TopSkip () mpprint(getdimen("topskip") *factor) end
+function mp.StrutHeight () mpprint(getdimen("strutht") *factor) end
+function mp.StrutDepth () mpprint(getdimen("strutdp") *factor) end
+
+function mp.PageNumber () mpprint(getcount("pageno")) end
+function mp.RealPageNumber () mpprint(getcount("realpageno")) end
+function mp.NOfPages () mpprint(getcount("lastpageno")) end
+
+function mp.CurrentWidth () mpprint(get("hsize") *factor) end
+function mp.CurrentHeight () mpprint(get("vsize") *factor) end
+
+function mp.EmWidth () mpprint(emwidths [false]*factor) end
+function mp.ExHeight () mpprint(exheights[false]*factor) end
+
+mp.HSize = mp.CurrentWidth
+mp.VSize = mp.CurrentHeight
+mp.LastPageNumber = mp.NOfPages
+
+function mp.PageFraction ()
+ local lastpage = getcount("lastpageno")
+ if lastpage > 1 then
+ mpprint((getcount("realpageno")-1)/(lastpage-1))
+ else
+ mpprint(1)
+ end
+end
+
+-- locals
+
+local on_right = structures.pages.on_right
+local is_odd = structures.pages.is_odd
+local in_body = structures.pages.in_body
+
+mp.OnRightPage = function() mpprint(on_right()) end -- needs checking
+mp.OnOddPage = function() mpprint(is_odd ()) end -- needs checking
+mp.InPageBody = function() mpprint(in_body ()) end -- needs checking
+
+-- mp.CurrentLayout : \currentlayout
+
+function mp.OverlayWidth () mpprint(getdimen("d_overlay_width") *factor) end
+function mp.OverlayHeight () mpprint(getdimen("d_overlay_height") *factor) end
+function mp.OverlayDepth () mpprint(getdimen("d_overlay_depth") *factor) end
+function mp.OverlayLineWidth () mpprint(getdimen("d_overlay_linewidth")*factor) end
+function mp.OverlayOffset () mpprint(getdimen("d_overlay_offset") *factor) end
+
+function mp.defaultcolormodel()
+ local colormethod = getcount("MPcolormethod")
+ -- if colormethod == 0 then
+ -- return 1
+ -- elseif colormethod == 1 then
+ -- return 1
+ -- elseif colormethod == 2 then
+ -- return 3
+ -- else
+ -- return 3
+ -- end
+ return (colormethod == 0 or colormethod == 1) and 1 or 3
+end
+
+-- not much difference (10000 calls in a graphic neither as expansion seems to win
+-- over defining the macro etc) so let's not waste counters then
+
+-- function mp.OverlayColor()
+-- local c = mpcolor(
+-- getcount("c_overlay_colormodel"),
+-- getcount("c_overlay_color"),
+-- getcount("c_overlay_transparency")
+-- )
+-- mpquoted(c)
+-- end
+--
+-- function mp.OverlayLineColor()
+-- local c = mpcolor(
+-- getcount("c_overlay_colormodel"),
+-- getcount("c_overlay_linecolor"),
+-- getcount("c_overlay_linetransparency")
+-- )
+-- mpquoted(c)
+-- end
diff --git a/tex/context/base/mlib-lua.lua b/tex/context/base/mlib-lua.lua
new file mode 100644
index 000000000..7e0568463
--- /dev/null
+++ b/tex/context/base/mlib-lua.lua
@@ -0,0 +1,383 @@
+if not modules then modules = { } end modules ['mlib-lua'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very preliminary code!
+
+-- maybe we need mplib.model, but how with instances
+
+local type, tostring, select, loadstring = type, tostring, select, loadstring
+local find, gsub = string.find, string.gsub
+
+local formatters = string.formatters
+local concat = table.concat
+local lpegmatch = lpeg.match
+
+local P, S, Ct = lpeg.P, lpeg.S, lpeg.Ct
+
+local report_luarun = logs.reporter("metapost","lua")
+
+local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end)
+local trace_enabled = true
+
+local be_tolerant = true directives.register("metapost.lua.tolerant",function(v) be_tolerant = v end)
+
+mp = mp or { } -- system namespace
+MP = MP or { } -- user namespace
+
+local buffer, n, max = { }, 0, 10 -- we reuse upto max
+
+function mp._f_()
+ if trace_enabled and trace_luarun then
+ local result = concat(buffer," ",1,n)
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ report_luarun("data: %s",result)
+ return result
+ else
+ if n == 0 then
+ return ""
+ end
+ local result
+ if n == 1 then
+ result = buffer[1]
+ else
+ result = concat(buffer," ",1,n)
+ end
+ if n > max then
+ buffer = { }
+ end
+ n = 0
+ return result
+ end
+end
+
+local f_code = formatters["%s return mp._f_()"]
+
+local f_numeric = formatters["%.16f"]
+local f_pair = formatters["(%.16f,%.16f)"]
+local f_triplet = formatters["(%.16f,%.16f,%.16f)"]
+local f_quadruple = formatters["(%.16f,%.16f,%.16f,%.16f)"]
+
+function mp.print(...)
+ for i=1,select("#",...) do
+ local value = select(i,...)
+ if value ~= nil then
+ n = n + 1
+ local t = type(value)
+ if t == "number" then
+ buffer[n] = f_numeric(value)
+ elseif t == "string" then
+ buffer[n] = value
+ elseif t == "table" then
+ buffer[n] = "(" .. concat(value,",") .. ")"
+ else -- boolean or whatever
+ buffer[n] = tostring(value)
+ end
+ end
+ end
+end
+
+function mp.boolean(n)
+ n = n + 1
+ buffer[n] = n and "true" or "false"
+end
+
+function mp.numeric(n)
+ n = n + 1
+ buffer[n] = n and f_numeric(n) or "0"
+end
+
+function mp.pair(x,y)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_pair(x[1],x[2])
+ else
+ buffer[n] = f_pair(x,y)
+ end
+end
+
+function mp.triplet(x,y,z)
+ n = n + 1
+ if type(x) == "table" then
+ buffer[n] = f_triplet(x[1],x[2],x[3])
+ else
+ buffer[n] = f_triplet(x,y,z)
+ end
+end
+
+function mp.quadruple(w,x,y,z)
+ n = n + 1
+ if type(w) == "table" then
+ buffer[n] = f_quadruple(w[1],w[2],w[3],w[4])
+ else
+ buffer[n] = f_quadruple(w,x,y,z)
+ end
+end
+
+function mp.path(t,connector,cycle)
+ if type(t) == "table" then
+ local tn = #t
+ if tn > 0 then
+ if connector == true then
+ connector = "--"
+ cycle = true
+ elseif not connector then
+ connector = "--"
+ end
+ local ti = t[1]
+ n = n + 1 ; buffer[n] = f_pair(ti[1],ti[2])
+ for i=2,tn do
+ local ti = t[i]
+ n = n + 1 ; buffer[n] = connector
+ n = n + 1 ; buffer[n] = f_pair(ti[1],ti[2])
+ end
+ if cycle then
+ n = n + 1 ; buffer[n] = connector
+ n = n + 1 ; buffer[n] = "cycle"
+ end
+ end
+ end
+end
+
+function mp.size(t)
+ n = n + 1
+ buffer[n] = type(t) == "table" and f_numeric(#t) or "0"
+end
+
+-- experiment: names can change
+
+local datasets = { }
+mp.datasets = datasets
+
+function datasets.load(tag,filename)
+ if not filename then
+ tag, filename = file.basename(tag), tag
+ end
+ local data = mp.dataset(io.loaddata(filename) or "")
+ datasets[tag] = {
+ Data = data,
+ Line = function(n) mp.path(data[n or 1]) end,
+ Size = function() mp.size(data) end,
+ }
+end
+
+--
+
+local replacer = lpeg.replacer("@","%%")
+
+function mp.format(fmt,...)
+ n = n + 1
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = formatters[fmt](...)
+end
+
+function mp.quoted(fmt,s,...)
+ n = n + 1
+ if s then
+ if not find(fmt,"%%") then
+ fmt = lpegmatch(replacer,fmt)
+ end
+ buffer[n] = '"' .. formatters[fmt](s,...) .. '"'
+ else
+ buffer[n] = '"' .. fmt .. '"'
+ end
+end
+
+function mp.n(t)
+ return type(t) == "table" and #t or 0
+end
+
+local whitespace = lpeg.patterns.whitespace
+local newline = lpeg.patterns.newline
+local setsep = newline^2
+local comment = (S("#%") + P("--")) * (1-newline)^0 * (whitespace - setsep)^0
+local value = (1-whitespace)^1 / tonumber
+local entry = Ct( value * whitespace * value)
+local set = Ct((entry * (whitespace-setsep)^0 * comment^0)^1)
+local series = Ct((set * whitespace^0)^1)
+
+local pattern = whitespace^0 * series
+
+function mp.dataset(str)
+ return lpegmatch(pattern,str)
+end
+
+-- \startluacode
+-- local str = [[
+-- 10 20 20 20
+-- 30 40 40 60
+-- 50 10
+--
+-- 10 10 20 30
+-- 30 50 40 50
+-- 50 20 -- the last one
+--
+-- 10 20 % comment
+-- 20 10
+-- 30 40 # comment
+-- 40 20
+-- 50 10
+-- ]]
+--
+-- MP.myset = mp.dataset(str)
+--
+-- inspect(MP.myset)
+-- \stopluacode
+--
+-- \startMPpage
+-- color c[] ; c[1] := red ; c[2] := green ; c[3] := blue ;
+-- for i=1 upto lua("mp.print(mp.n(MP.myset))") :
+-- draw lua("mp.path(MP.myset[" & decimal i & "])") withcolor c[i] ;
+-- endfor ;
+-- \stopMPpage
+
+-- function metapost.runscript(code)
+-- local f = loadstring(f_code(code))
+-- if f then
+-- local result = f()
+-- if result then
+-- local t = type(result)
+-- if t == "number" then
+-- return f_numeric(result)
+-- elseif t == "string" then
+-- return result
+-- else
+-- return tostring(result)
+-- end
+-- end
+-- end
+-- return ""
+-- end
+
+local cache, n = { }, 0 -- todo: when > n then reset cache or make weak
+
+function metapost.runscript(code)
+ local trace = trace_enabled and trace_luarun
+ if trace then
+ report_luarun("code: %s",code)
+ end
+ local f
+ if n > 100 then
+ cache = nil -- forget about caching
+ f = loadstring(f_code(code))
+ if not f and be_tolerant then
+ f = loadstring(code)
+ end
+ else
+ f = cache[code]
+ if not f then
+ f = loadstring(f_code(code))
+ if f then
+ n = n + 1
+ cache[code] = f
+ elseif be_tolerant then
+ f = loadstring(code)
+ if f then
+ n = n + 1
+ cache[code] = f
+ end
+ end
+ end
+ end
+ if f then
+ local result = f()
+ if result then
+ local t = type(result)
+ if t == "number" then
+ t = f_numeric(result)
+ elseif t == "string" then
+ t = result
+ else
+ t = tostring(result)
+ end
+ if trace then
+ report_luarun("result: %s",code)
+ end
+ return t
+ elseif trace then
+ report_luarun("no result")
+ end
+ else
+ report_luarun("no result, invalid code")
+ end
+ return ""
+end
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return mpx:get_numeric(s) end
+-- mp.string = function(s) return mpx:get_string (s) end
+-- mp.boolean = function(s) return mpx:get_boolean(s) end
+-- mp.number = mp.numeric
+-- end
+
+local get_numeric = mplib.get_numeric
+local get_string = mplib.get_string
+local get_boolean = mplib.get_boolean
+local get_number = get_numeric
+
+-- function metapost.initializescriptrunner(mpx)
+-- mp.numeric = function(s) return get_numeric(mpx,s) end
+-- mp.string = function(s) return get_string (mpx,s) end
+-- mp.boolean = function(s) return get_boolean(mpx,s) end
+-- mp.number = mp.numeric
+-- end
+
+local currentmpx = nil
+
+local get = { }
+mp.get = get
+
+get.numeric = function(s) return get_numeric(currentmpx,s) end
+get.string = function(s) return get_string (currentmpx,s) end
+get.boolean = function(s) return get_boolean(currentmpx,s) end
+get.number = mp.numeric
+
+function metapost.initializescriptrunner(mpx,trialrun)
+ currentmpx = mpx
+ if trace_luarun then
+ report_luarun("type of run: %s", trialrun and "trial" or "final")
+ end
+ -- trace_enabled = not trialrun blocks too much
+end
+
+-- texts:
+
+local factor = 65536*(7227/7200)
+local textexts = nil
+local mptriplet = mp.triplet
+
+function mp.tt_initialize(tt)
+ textexts = tt
+end
+
+-- function mp.tt_wd(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.width/factor or 0)
+-- end
+-- function mp.tt_ht(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.height/factor or 0)
+-- end
+-- function mp.tt_dp(n)
+-- local box = textexts and textexts[n]
+-- mpprint(box and box.depth/factor or 0)
+-- end
+
+function mp.tt_dimensions(n)
+ local box = textexts[n]
+ if box then
+ -- could be made faster with nuts but not critical
+ mptriplet(box.width/factor,box.height/factor,box.depth/factor)
+ else
+ mptriplet(0,0,0)
+ end
+end
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index 6bb08bd1d..8de09f42a 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -19,13 +19,19 @@ local report_metapost = logs.reporter("metapost")
local trace_variables = false trackers.register("metapost.variables",function(v) trace_variables = v end)
-local mplib, context = mplib, context
+local mplib = mplib
+local context = context
local allocate = utilities.storage.allocate
local copy_node = node.copy
local write_node = node.write
+local pen_info = mplib.pen_info
+local object_fields = mplib.fields
+
+local save_table = false
+
metapost = metapost or { }
local metapost = metapost
@@ -33,7 +39,6 @@ metapost.flushers = metapost.flushers or { }
local pdfflusher = { }
metapost.flushers.pdf = pdfflusher
-metapost.multipass = false -- to be stacked
metapost.n = 0
metapost.optimize = true -- false
@@ -41,9 +46,31 @@ local experiment = true -- uses context(node) that already does delayed nod
local savedliterals = nil -- needs checking
local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
-local pdfliteral = function(s)
+local f_f = formatters["%F"]
+
+local f_m = formatters["%F %F m"]
+local f_c = formatters["%F %F %F %F %F %F c"]
+local f_l = formatters["%F %F l"]
+local f_cm = formatters["%F %F %F %F %F %F cm"]
+local f_M = formatters["%F M"]
+local f_j = formatters["%i j"]
+local f_J = formatters["%i J"]
+local f_d = formatters["[%s] %F d"]
+local f_w = formatters["%F w"]
+
+directives.register("metapost.savetable",function(v)
+ if type(v) == "string" then
+ save_table = file.addsuffix(v,"mpl")
+ elseif v then
+ save_table = file.addsuffix(environment.jobname .. "-graphic","mpl")
+ else
+ save_table = false
+ end
+end)
+
+local pdfliteral = function(pdfcode)
local literal = copy_node(mpsliteral)
- literal.data = s
+ literal.data = pdfcode
return literal
end
@@ -52,18 +79,19 @@ end
-- get a new result table and the stored objects are forgotten. Otherwise they
-- are reused.
-local function getobjects(result,figure,f)
+local function getobjects(result,figure,index)
if metapost.optimize then
- local objects = result.objects
- if not objects then
- result.objects = { }
+ local robjects = result.objects
+ if not robjects then
+ robjects = { }
+ result.objects = robjects
end
- objects = result.objects[f]
- if not objects then
- objects = figure:objects()
- result.objects[f] = objects
+ local fobjects = robjects[index or 1]
+ if not fobjects then
+ fobjects = figure:objects()
+ robjects[index] = fobjects
end
- return objects
+ return fobjects
else
return figure:objects()
end
@@ -71,15 +99,20 @@ end
function metapost.convert(result, trialrun, flusher, multipass, askedfig)
if trialrun then
- metapost.multipass = false
- metapost.parse(result, askedfig)
- if multipass and not metapost.multipass and metapost.optimize then
- metapost.flush(result, flusher, askedfig) -- saves a run
+ local multipassindeed = metapost.parse(result,askedfig)
+ if multipass and not multipassindeed and metapost.optimize then
+ if save_table then
+ table.save(save_table,metapost.totable(result,1)) -- direct
+ end
+ metapost.flush(result,flusher,askedfig) -- saves a run
else
return false
end
else
- metapost.flush(result, flusher, askedfig)
+ if save_table then
+ table.save(save_table,metapost.totable(result,1)) -- direct
+ end
+ metapost.flush(result,flusher,askedfig)
end
return true -- done
end
@@ -119,7 +152,7 @@ end
function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
savedliterals = nil
metapost.n = metapost.n + 1
- context.startMPLIBtoPDF(llx,lly,urx,ury)
+ context.startMPLIBtoPDF(f_f(llx),f_f(lly),f_f(urx),f_f(ury))
if message then pdfflusher.comment(message) end
end
@@ -156,8 +189,6 @@ local bend_tolerance = 131/65536
local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1
-local pen_info = mplib.pen_info
-
local function pen_characteristics(object)
local t = pen_info(object)
rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty
@@ -192,11 +223,11 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
+ t[nt] = f_m(pth.x_coord,pth.y_coord)
elseif curved(ith,pth) then
- t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ t[nt] = f_c(ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
else
- t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
+ t[nt] = f_l(pth.x_coord,pth.y_coord)
end
ith = pth
end
@@ -204,15 +235,15 @@ local function flushnormalpath(path, t, open)
nt = nt + 1
local one = path[1]
if curved(pth,one) then
- t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ t[nt] = f_c(pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
else
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
elseif #path == 1 then
-- special case .. draw point
local one = path[1]
nt = nt + 1
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ t[nt] = f_l(one.x_coord,one.y_coord)
end
return t
end
@@ -226,18 +257,18 @@ local function flushconcatpath(path, t, open)
nt = 0
end
nt = nt + 1
- t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
+ t[nt] = f_cm(sx,rx,ry,sy,tx,ty)
for i=1,#path do
nt = nt + 1
pth = path[i]
if not ith then
- t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_m(mpconcat(pth.x_coord,pth.y_coord))
elseif curved(ith,pth) then
local a, b = mpconcat(ith.right_x,ith.right_y)
local c, d = mpconcat(pth.left_x,pth.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
+ t[nt] = f_l(mpconcat(pth.x_coord, pth.y_coord))
end
ith = pth
end
@@ -247,15 +278,15 @@ local function flushconcatpath(path, t, open)
if curved(pth,one) then
local a, b = mpconcat(pth.right_x,pth.right_y)
local c, d = mpconcat(one.left_x,one.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
+ t[nt] = f_c(a,b,c,d,mpconcat(one.x_coord, one.y_coord))
else
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
elseif #path == 1 then
-- special case .. draw point
nt = nt + 1
local one = path[1]
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
return t
end
@@ -311,32 +342,8 @@ local variable =
local pattern_lst = (variable * newline^0)^0
-metapost.variables = { } -- to be stacked
-metapost.llx = 0 -- to be stacked
-metapost.lly = 0 -- to be stacked
-metapost.urx = 0 -- to be stacked
-metapost.ury = 0 -- to be stacked
-
-function commands.mprunvar(key,n) -- should be defined in another lib
- local value = metapost.variables[key]
- if value ~= nil then
- local tvalue = type(value)
- if tvalue == "table" then
- local ntype = type(n)
- if ntype == "number" then
- context(value[n])
- elseif ntype == "string" then
- context(concat(value,n))
- else
- context(concat(value," "))
- end
- elseif tvalue == "number" or tvalue == "boolean" then
- context(tostring(value))
- elseif tvalue == "string" then
- context(value)
- end
- end
-end
+metapost.variables = { } -- to be stacked
+metapost.properties = { } -- to be stacked
function metapost.untagvariable(str,variables) -- will be redone
if variables == false then
@@ -362,6 +369,30 @@ function metapost.processspecial(str)
end
end
+local function setproperties(figure)
+ local boundingbox = figure:boundingbox()
+ local properties = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ slot = figure:charcode(),
+ width = figure:width(),
+ height = figure:height(),
+ depth = figure:depth(),
+ italic = figure:italcorr(),
+ number = figure:charcode() or 0,
+ }
+ metapost.properties = properties
+ return properties
+end
+
+local function setvariables(figure)
+ local variables = { }
+ metapost.variables = variables
+ return variables
+end
+
function metapost.flush(result,flusher,askedfig)
if result then
local figures = result.fig
@@ -376,42 +407,27 @@ function metapost.flush(result,flusher,askedfig)
local flushfigure = flusher.flushfigure
local textfigure = flusher.textfigure
local processspecial = flusher.processspecial or metapost.processspecial
- for f=1,#figures do
- local figure = figures[f]
- local objects = getobjects(result,figure,f)
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local t = { }
+ local variables = setvariables(figure) -- also resets then in case of not found
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
+ local result = { }
local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
- local variables = { }
- metapost.variables = variables
- metapost.properties = {
- llx = llx,
- lly = lly,
- urx = urx,
- ury = ury,
- slot = figure:charcode(),
- width = figure:width(),
- height = figure:height(),
- depth = figure:depth(),
- italic = figure:italcorr(),
- }
- -- replaced by the above
- metapost.llx = llx
- metapost.lly = lly
- metapost.urx = urx
- metapost.ury = ury
+ local llx = properties.llx
+ local lly = properties.lly
+ local urx = properties.urx
+ local ury = properties.ury
if urx < llx then
-- invalid
- startfigure(fignum,0,0,0,0,"invalid",figure)
+ startfigure(properties.number,0,0,0,0,"invalid",figure)
stopfigure()
else
- startfigure(fignum,llx,lly,urx,ury,"begin",figure)
- t[#t+1] = "q"
+ startfigure(properties.number,llx,lly,urx,ury,"begin",figure)
+ result[#result+1] = "q"
if objects then
- resetplugins(t) -- we should move the colorinitializer here
+ resetplugins(result) -- we should move the colorinitializer here
for o=1,#objects do
local object = objects[o]
local objecttype = object.type
@@ -422,20 +438,21 @@ function metapost.flush(result,flusher,askedfig)
processspecial(object.prescript)
end
elseif objecttype == "start_clip" then
- t[#t+1] = "q"
- flushnormalpath(object.path,t,false)
- t[#t+1] = "W n"
+ local evenodd = not object.istext and object.postscript == "evenodd"
+ result[#result+1] = "q"
+ flushnormalpath(object.path,result,false)
+ result[#result+1] = evenodd and "W* n" or "W n"
elseif objecttype == "stop_clip" then
- t[#t+1] = "Q"
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ result[#result+1] = "Q"
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, "" -- was false
elseif objecttype == "text" then
- t[#t+1] = "q"
+ result[#result+1] = "q"
local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
- flushfigure(t) -- flush accumulated literals
- t = { }
+ result[#result+1] = f_cm(ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%F %F m %F %F %F %F 0 0 cm"](unpack(ot))
+ flushfigure(result) -- flush accumulated literals
+ result = { }
textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
else
-- we use an indirect table as we want to overload
-- entries but this is not possible in userdata
@@ -451,32 +468,32 @@ function metapost.flush(result,flusher,askedfig)
local before, after = processplugins(object)
local objecttype = object.type -- can have changed
if before then
- t = pluginactions(before,t,flushfigure)
+ result = pluginactions(before,result,flushfigure)
end
local ml = object.miterlimit
if ml and ml ~= miterlimit then
miterlimit = ml
- t[#t+1] = formatters["%f M"](ml)
+ result[#result+1] = f_M(ml)
end
local lj = object.linejoin
if lj and lj ~= linejoin then
linejoin = lj
- t[#t+1] = formatters["%i j"](lj)
+ result[#result+1] = f_j(lj)
end
local lc = object.linecap
if lc and lc ~= linecap then
linecap = lc
- t[#t+1] = formatters["%i J"](lc)
+ result[#result+1] = f_J(lc)
end
local dl = object.dash
if dl then
- local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
+ local d = f_d(concat(dl.dashes or {}," "),dl.offset)
if d ~= dashed then
dashed = d
- t[#t+1] = dashed
+ result[#result+1] = d
end
- elseif dashed then
- t[#t+1] = "[] 0 d"
+ elseif dashed ~= false then -- was just dashed test
+ result[#result+1] = "[] 0 d"
dashed = false
end
local path = object.path -- newpath
@@ -486,7 +503,7 @@ function metapost.flush(result,flusher,askedfig)
if pen then
if pen.type == 'elliptical' then
transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
+ result[#result+1] = f_w(penwidth) -- todo: only if changed
if objecttype == 'fill' then
objecttype = 'both'
end
@@ -495,58 +512,59 @@ function metapost.flush(result,flusher,askedfig)
end
end
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
+ local evenodd = not object.istext and object.postscript == "evenodd"
if path then
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
end
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
local path = object.htap
if path then
if transformed then
- t[#t+1] = "q"
+ result[#result+1] = "q"
end
if transformed then
- flushconcatpath(path,t,open)
+ flushconcatpath(path,result,open)
else
- flushnormalpath(path,t,open)
+ flushnormalpath(path,result,open)
end
if objecttype == "fill" then
- t[#t+1] = "h f"
+ result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo
elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
+ result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- t[#t+1] = "h B"
+ result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
end
if transformed then
- t[#t+1] = "Q"
+ result[#result+1] = "Q"
end
end
if after then
- t = pluginactions(after,t,flushfigure)
+ result = pluginactions(after,result,flushfigure)
end
if object.grouped then
-- can be qQ'd so changes can end up in groups
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, "" -- was false
end
end
end
end
- t[#t+1] = "Q"
- flushfigure(t)
+ result[#result+1] = "Q"
+ flushfigure(result)
stopfigure("end")
end
if askedfig ~= "all" then
@@ -562,20 +580,18 @@ function metapost.parse(result,askedfig)
if result then
local figures = result.fig
if figures then
+ local multipass = false
local analyzeplugins = metapost.analyzeplugins -- each object
- for f=1,#figures do
- local figure = figures[f]
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local bbox = figure:boundingbox()
- metapost.llx = bbox[1]
- metapost.lly = bbox[2]
- metapost.urx = bbox[3]
- metapost.ury = bbox[4]
- local objects = getobjects(result,figure,f)
+ for index=1,#figures do
+ local figure = figures[index]
+ local properties = setproperties(figure)
+ if askedfig == "direct" or askedfig == "all" or askedfig == properties.number then
+ local objects = getobjects(result,figure,index)
if objects then
for o=1,#objects do
- analyzeplugins(objects[o])
+ if analyzeplugins(objects[o]) then
+ multipass = true
+ end
end
end
if askedfig ~= "all" then
@@ -583,24 +599,24 @@ function metapost.parse(result,askedfig)
end
end
end
+ return multipass
end
end
end
-- tracing:
-local t = { }
+local result = { }
local flusher = {
startfigure = function()
- t = { }
+ result = { }
context.startnointerference()
end,
flushfigure = function(literals)
- local n = #t
- for i=1, #literals do
- n = n + 1
- t[n] = literals[i]
+ local n = #result
+ for i=1,#literals do
+ result[n+i] = literals[i]
end
end,
stopfigure = function()
@@ -610,30 +626,35 @@ local flusher = {
function metapost.pdfliterals(result)
metapost.flush(result,flusher)
- return t
+ return result
end
--- so far
-
-function metapost.totable(result)
- local figure = result and result.fig and result.fig[1]
+function metapost.totable(result,askedfig)
+ local askedfig = askedfig or 1
+ local figure = result and result.fig and result.fig[1]
if figure then
- local t = { }
- local objects = figure:objects()
+ local results = { }
+ -- local objects = figure:objects()
+ local objects = getobjects(result,figure,askedfig)
for o=1,#objects do
local object = objects[o]
- local tt = { }
- local fields = mplib.fields(object)
+ local result = { }
+ local fields = object_fields(object) -- hm, is this the whole list, if so, we can get it once
for f=1,#fields do
local field = fields[f]
- tt[field] = object[field]
+ result[field] = object[field]
end
- t[o] = tt
+ results[o] = result
end
- local b = figure:boundingbox()
+ local boundingbox = figure:boundingbox()
return {
- boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] },
- objects = t
+ boundingbox = {
+ llx = boundingbox[1],
+ lly = boundingbox[2],
+ urx = boundingbox[3],
+ ury = boundingbox[4],
+ },
+ objects = results
}
else
return nil
diff --git a/tex/context/base/mlib-pdf.mkiv b/tex/context/base/mlib-pdf.mkiv
index 0913b3699..92bf86ea9 100644
--- a/tex/context/base/mlib-pdf.mkiv
+++ b/tex/context/base/mlib-pdf.mkiv
@@ -88,7 +88,7 @@
% MPLIB specific:
-\def\MPLIBtoPDF#1{\ctxlua{metapost.flushliteral(#1)}}
+\def\MPLIBtoPDF{\clf_mpflushliteral}
\def\startMPLIBtoPDF#1#2#3#4%
{\meta_process_graphic_figure_start
@@ -109,7 +109,7 @@
\meta_process_graphic_figure_stop}
\def\MPLIBflushreset % This can (will) move to the Lua end.
- {\ctxlua{metapost.flushreset()}}
+ {\clf_mpflushreset}
%D Kind of special:
%
@@ -134,27 +134,50 @@
\unexpanded\def\directMPgraphic
{\dodoublegroupempty\mlib_direct_graphic}
+% \def\mlib_direct_graphic#1#2% makes pages (todo: make boxes)
+% {\meta_begin_graphic_group{#1}%
+% \let\startMPLIBtoPDF\directstartMPLIBtoPDF
+% \let\stopMPLIBtoPDF \directstopMPLIBtoPDF
+% \meta_start_current_graphic
+% \forgetall
+% \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+% \normalexpanded{\noexpand\ctxlua{metapost.graphic {
+% instance = "\currentMPinstance",
+% format = "\currentMPformat",
+% data = \!!bs#2;\!!es,
+% initializations = \!!bs\meta_flush_current_initializations\!!es,
+% % useextensions = "\MPinstanceparameter\s!extensions",
+% \ifx\p_extensions\v!yes
+% extensions = \!!bs\clf_getmpextensions{\currentMPinstance}\!!es,
+% \fi
+% inclusions = \!!bs\meta_flush_current_inclusions\!!es,
+% definitions = \!!bs\meta_flush_current_definitions\!!es,
+% figure = "all",
+% method = "\MPinstanceparameter\c!method",
+% }}}%
+% \meta_stop_current_graphic
+% \meta_end_graphic_group}
+
\def\mlib_direct_graphic#1#2% makes pages (todo: make boxes)
{\meta_begin_graphic_group{#1}%
\let\startMPLIBtoPDF\directstartMPLIBtoPDF
\let\stopMPLIBtoPDF \directstopMPLIBtoPDF
\meta_start_current_graphic
- \forgetall
- \edef\p_extensions{\MPinstanceparameter\s!extensions}%
- \normalexpanded{\noexpand\ctxlua{metapost.graphic {
- instance = "\currentMPinstance",
- format = "\currentMPformat",
- data = \!!bs#2;\!!es,
- initializations = \!!bs\meta_flush_current_initializations\!!es,
-% useextensions = "\MPinstanceparameter\s!extensions",
-\ifx\p_extensions\v!yes
- extensions = \!!bs\ctxcommand{getmpextensions("\currentMPinstance")}\!!es,
-\fi
- inclusions = \!!bs\meta_flush_current_inclusions\!!es,
- definitions = \!!bs\meta_flush_current_definitions\!!es,
- figure = "all",
- method = "\MPinstanceparameter\c!method",
- }}}%
+ \forgetall
+ \edef\p_extensions{\MPinstanceparameter\s!extensions}%
+ \normalexpanded{\noexpand\clf_mpgraphic
+ instance {\currentMPinstance}%
+ format {\currentMPformat}%
+ data {#2;}%
+ initializations {\meta_flush_current_initializations}%
+ \ifx\p_extensions\v!yes
+ extensions {\clf_getmpextensions{\currentMPinstance}}%
+ \fi
+ inclusions {\meta_flush_current_inclusions}%
+ definitions {\meta_flush_current_definitions}%
+ figure {all}%
+ method {\MPinstanceparameter\c!method}%
+ \relax}%
\meta_stop_current_graphic
\meta_end_graphic_group}
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index 385fb3ece..ab56699b9 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['mlib-pps'] = {
license = "see context related readme files",
}
--- todo: pass multipass nicer
-
local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
-local tonumber, type = tonumber, type
+local tonumber, type, unpack = tonumber, type, unpack
local round = math.round
local insert, remove, concat = table.insert, table.remove, table.concat
local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
@@ -18,8 +16,15 @@ local formatters = string.formatters
local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+local context = context
+local context_setvalue = context.setvalue
+
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
local texgetbox = tex.getbox
local texsetbox = tex.setbox
+local textakebox = tex.takebox
local copy_list = node.copy_list
local free_list = node.flush_list
local setmetatableindex = table.setmetatableindex
@@ -37,11 +42,19 @@ local report_textexts = logs.reporter("metapost","textexts")
local report_scripts = logs.reporter("metapost","scripts")
local colors = attributes.colors
+local defineprocesscolor = colors.defineprocesscolor
+local definespotcolor = colors.definespotcolor
+local definemultitonecolor = colors.definemultitonecolor
+local colorvalue = colors.value
-local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end
-local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end
-local rgbtogray = colors.rgbtogray or function() return 0 end
-local cmyktogray = colors.cmyktogray or function() return 0 end
+local transparencies = attributes.transparencies
+local registertransparency = transparencies.register
+local transparencyvalue = transparencies.value
+
+local rgbtocmyk = colors.rgbtocmyk -- or function() return 0,0,0,1 end
+local cmyktorgb = colors.cmyktorgb -- or function() return 0,0,0 end
+local rgbtogray = colors.rgbtogray -- or function() return 0 end
+local cmyktogray = colors.cmyktogray -- or function() return 0 end
metapost.makempy = metapost.makempy or { nofconverted = 0 }
local makempy = metapost.makempy
@@ -56,11 +69,6 @@ local innertransparency = nooutertransparency
local pdfcolor = lpdf.color
local pdftransparency = lpdf.transparency
-local registercolor = colors.register
-local registerspotcolor = colors.registerspotcolor
-
-local transparencies = attributes.transparencies
-local registertransparency = transparencies.register
function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute)
-- has always to be called before conversion
@@ -82,12 +90,19 @@ function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattri
innertransparency = outertransparency -- not yet used
end
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
+-- todo: get this from the lpdf module
+
+local f_f = formatters["%F"]
+local f_f3 = formatters["%.3F"]
+
+local f_gray = formatters["%.3F g %.3F G"]
+local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
+local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
+local f_cm = formatters["q %F %F %F %F %F %F cm"]
local f_shade = formatters["MpSh%s"]
+local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
+
local function checked_color_pair(color,...)
if not color then
return innercolor, outercolor
@@ -137,15 +152,6 @@ local function normalize(ca,cb)
end
end
--- todo: check for the same colorspace (actually a backend issue), now we can
--- have several similar resources
---
--- normalize(ca,cb) fails for spotcolors
-
-local function spotcolorconverter(parent, n, d, p)
- registerspotcolor(parent)
- return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor
-end
local commasplitter = tsplitat(",")
@@ -211,21 +217,34 @@ local function checkandconvert(ca,cb)
end
end
+-- We keep textexts in a shared list (as it's easier that way and we also had that in
+-- the beginning). Each graphic gets its own (1 based) subtable so that we can also
+-- handle multiple conversions in one go which is needed when we process mp files
+-- directly.
+
local stack = { } -- quick hack, we will pass topofstack around
local top = nil
local nofruns = 0 -- askedfig: "all", "first", number
-local function startjob(texmode)
- top = {
- textexts = { }, -- all boxes, optionally with a different color
- texslots = { }, -- references to textexts in order or usage
- texorder = { }, -- references to textexts by mp index
+local function preset(t,k)
+ -- references to textexts by mp index
+ local v = {
textrial = 0,
texfinal = 0,
- -- used by tx plugin
+ texslots = { },
+ texorder = { },
texhash = { },
+ }
+ t[k] = v
+ return v
+end
+
+local function startjob(plugmode)
+ top = {
+ textexts = { }, -- all boxes, optionally with a different color
texlast = 0,
- texmode = texmode, -- some day we can then skip all pre/postscripts
+ texdata = setmetatableindex({},preset), -- references to textexts in order or usage
+ plugmode = plugmode, -- some day we can then skip all pre/postscripts
}
insert(stack,top)
if trace_runs then
@@ -239,7 +258,7 @@ local function stopjob()
for n, tn in next, top.textexts do
free_list(tn)
if trace_textexts then
- report_textexts("freeing box %s",n)
+ report_textexts("freeing text %s",n)
end
end
if trace_runs then
@@ -251,27 +270,41 @@ local function stopjob()
end
end
-function metapost.settextexts () end -- obsolete
-function metapost.resettextexts() end -- obsolete
+function metapost.getjobdata()
+ return top
+end
-- end of new
-function metapost.settext(box,slot)
- top.textexts[slot] = copy_list(texgetbox(box))
- texsetbox(box,nil)
- -- this will become
- -- top.textexts[slot] = texgetbox(box)
- -- unsetbox(box)
+local function settext(box,slot)
+ if top then
+ top.textexts[slot] = copy_list(texgetbox(box))
+ texsetbox(box,nil)
+ -- this can become
+ -- top.textexts[slot] = textakebox(box)
+ else
+ -- weird error
+ end
end
-function metapost.gettext(box,slot)
- texsetbox(box,copy_list(top.textexts[slot]))
- if trace_textexts then
- report_textexts("putting text %s in box %s",slot,box)
+local function gettext(box,slot)
+ if top then
+ texsetbox(box,copy_list(top.textexts[slot]))
+ if trace_textexts then
+ report_textexts("putting text %s in box %s",slot,box)
+ end
+ -- top.textexts[slot] = nil -- no, pictures can be placed several times
+ else
+ -- weird error
end
- -- top.textexts[slot] = nil -- no, pictures can be placed several times
end
+metapost.settext = settext
+metapost.gettext = gettext
+
+implement { name = "mpsettext", actions = settext, arguments = { "integer", "integer" } } -- box slot
+implement { name = "mpgettext", actions = gettext, arguments = { "integer", "integer" } } -- box slot
+
-- rather generic pdf, so use this elsewhere too it no longer pays
-- off to distinguish between outline and fill (we now have both
-- too, e.g. in arrows)
@@ -461,7 +494,7 @@ end
-- currently a a one-liner produces less code
-- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings
--- so this is something for a boring plain or train trip and we might assume proper mp
+-- so this is something for a boring plane or train trip and we might assume proper mp
-- input anyway
local parser = Cs((
@@ -471,57 +504,95 @@ local parser = Cs((
+ 1
)^0)
+local checking_enabled = true directives.register("metapost.checktexts",function(v) checking_enabled = v end)
+
local function checktexts(str)
- found, forced = false, false
- return lpegmatch(parser,str), found, forced
+ if checking_enabled then
+ found, forced = false, false
+ return lpegmatch(parser,str), found, forced
+ else
+ return str
+ end
end
metapost.checktexts = checktexts
local factor = 65536*(7227/7200)
-function metapost.edefsxsy(wd,ht,dp) -- helper for figure
- local hd = ht + dp
- context.setvalue("sx",wd ~= 0 and factor/wd or 0)
- context.setvalue("sy",hd ~= 0 and factor/hd or 0)
-end
+-- function metapost.edefsxsy(wd,ht,dp) -- helper for figure
+-- local hd = ht + dp
+-- context_setvalue("sx",wd ~= 0 and factor/wd or 0)
+-- context_setvalue("sy",hd ~= 0 and factor/hd or 0)
+-- end
+
+implement {
+ name = "mpsetsxsy",
+ arguments = { "dimen", "dimen", "dimen" },
+ actions = function(wd,ht,dp)
+ local hd = ht + dp
+ setmacro("sx",wd ~= 0 and factor/wd or 0)
+ setmacro("sy",hd ~= 0 and factor/hd or 0)
+ end
+}
local function sxsy(wd,ht,dp) -- helper for text
local hd = ht + dp
return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
end
-local no_first_run = "mfun_first_run := false ;"
-local do_first_run = "mfun_first_run := true ;"
-local no_trial_run = "mfun_trial_run := false ;"
-local do_trial_run = "mfun_trial_run := true ;"
+-- for stock mp we need to declare the booleans first
+
+local no_first_run = "boolean mfun_first_run ; mfun_first_run := false ;"
+local do_first_run = "boolean mfun_first_run ; mfun_first_run := true ;"
+local no_trial_run = "boolean mfun_trial_run ; mfun_trial_run := false ;"
+local do_trial_run = "boolean mfun_trial_run ; mfun_trial_run := true ;"
local do_begin_fig = "; beginfig(1) ; "
local do_end_fig = "; endfig ;"
local do_safeguard = ";"
-local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
+-- local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
+--
+-- function metapost.textextsdata()
+-- local textexts = top.textexts
+-- local collected = { }
+-- local nofcollected = 0
+-- for k, data in sortedhash(top.texdata) do -- sort is nicer in trace
+-- local texorder = data.texorder
+-- for n=1,#texorder do
+-- local box = textexts[texorder[n]]
+-- if box then
+-- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
+-- if trace_textexts then
+-- report_textexts("passed data item %s:%s > (%p,%p,%p)",k,n,wd,ht,dp)
+-- end
+-- nofcollected = nofcollected + 1
+-- collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
+-- else
+-- break
+-- end
+-- end
+-- end
+-- return collected
+-- end
function metapost.textextsdata()
- local texorder = top.texorder
- local textexts = top.textexts
- local collected = { }
- local nofcollected = 0
- for n=1,#texorder do
- local box = textexts[texorder[n]]
- if box then
- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
- if trace_textexts then
- report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
+ local textexts = top.textexts
+ local collected = { }
+ for k, data in sortedhash(top.texdata) do -- sort is nicer in trace
+ local texorder = data.texorder
+ for n=1,#texorder do
+ local box = textexts[texorder[n]]
+ if box then
+ collected[n] = box
+ else
+ break
end
- nofcollected = nofcollected + 1
- collected[nofcollected] = f_text_data(n,wd,n,ht,n,dp)
- else
- break
end
end
- return collected
+ mp.tt_initialize(collected)
end
+
metapost.intermediate = metapost.intermediate or { }
metapost.intermediate.actions = metapost.intermediate.actions or { }
@@ -552,16 +623,16 @@ local function extrapass()
if trace_runs then
report_metapost("second run of job %s, asked figure %a",top.nofruns,top.askedfig)
end
+ local textexts = metapost.textextsdata()
processmetapost(top.mpx, {
top.wrappit and do_begin_fig or "",
no_trial_run,
- concat(metapost.textextsdata()," ;\n"),
+ textexts and concat(textexts," ;\n") or "",
top.initializations,
do_safeguard,
top.data,
top.wrappit and do_end_fig or "",
}, false, nil, false, true, top.askedfig)
- -- context.MPLIBresettexts() -- must happen afterwards
end
function metapost.graphic_base_pass(specification) -- name will change (see mlib-ctx.lua)
@@ -585,17 +656,18 @@ function metapost.graphic_base_pass(specification) -- name will change (see mlib
top.nofruns = nofruns
--
local done_1, done_2, done_3, forced_1, forced_2, forced_3
- data, done_1, forced_1 = checktexts(data)
- -- we had preamble = extensions + inclusions
- if extensions == "" then
- extensions, done_2, forced_2 = "", false, false
- else
- extensions, done_2, forced_2 = checktexts(extensions)
- end
- if inclusions == "" then
- inclusions, done_3, forced_3 = "", false, false
- else
- inclusions, done_3, forced_3 = checktexts(inclusions)
+ if checking_enabled then
+ data, done_1, forced_1 = checktexts(data)
+ if extensions == "" then
+ extensions, done_2, forced_2 = "", false, false
+ else
+ extensions, done_2, forced_2 = checktexts(extensions)
+ end
+ if inclusions == "" then
+ inclusions, done_3, forced_3 = "", false, false
+ else
+ inclusions, done_3, forced_3 = checktexts(inclusions)
+ end
end
top.intermediate = false
top.multipass = false -- no needed here
@@ -680,6 +752,12 @@ function makempy.registerfile(filename)
mpyfilename = filename
end
+implement {
+ name = "registermpyfile",
+ actions = makempy.registerfile,
+ arguments = "string"
+}
+
function makempy.processgraphics(graphics)
if #graphics == 0 then
return
@@ -720,13 +798,13 @@ end
-- -- the new plugin handler -- --
-local sequencers = utilities.sequencers
-local appendgroup = sequencers.appendgroup
-local appendaction = sequencers.appendaction
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
-local resetter = nil
-local analyzer = nil
-local processor = nil
+local resetter = nil
+local analyzer = nil
+local processor = nil
local resetteractions = sequencers.new { arguments = "t" }
local analyzeractions = sequencers.new { arguments = "object,prescript" }
@@ -773,7 +851,7 @@ end
-- end
function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
- if top.texmode then
+ if top.plugmode then -- hm, what about other features
for i=1,#what do
local wi = what[i]
if type(wi) == "function" then
@@ -790,7 +868,7 @@ function metapost.pluginactions(what,t,flushfigure) -- before/after object, depe
end
function metapost.resetplugins(t) -- intialize plugins, before figure
- if top.texmode then
+ if top.plugmode then
-- plugins can have been added
resetter = resetteractions.runner
analyzer = analyzeractions.runner
@@ -801,16 +879,18 @@ function metapost.resetplugins(t) -- intialize plugins, before figure
end
function metapost.analyzeplugins(object) -- each object (first pass)
- if top.texmode then
+ if top.plugmode then
local prescript = object.prescript -- specifications
if prescript and #prescript > 0 then
- return analyzer(object,splitprescript(prescript))
+ analyzer(object,splitprescript(prescript))
+ return top.multipass
end
end
+ return false
end
function metapost.processplugins(object) -- each object (second pass)
- if top.texmode then
+ if top.plugmode then
local prescript = object.prescript -- specifications
if prescript and #prescript > 0 then
local before = { }
@@ -854,19 +934,33 @@ end
local function tx_reset()
if top then
+ -- why ?
top.texhash = { }
top.texlast = 0
end
end
local fmt = formatters["%s %s %s % t"]
-local pat = tsplitat(":")
+----- pat = tsplitat(":")
+local pat = lpeg.tsplitter(":",tonumber) -- so that %F can do its work
+
+local f_gray_yes = formatters["s=%F,a=%F,t=%F"]
+local f_gray_nop = formatters["s=%F"]
+local f_rgb_yes = formatters["r=%F,g=%F,b=%F,a=%F,t=%F"]
+local f_rgb_nop = formatters["r=%F,g=%F,b=%F"]
+local f_cmyk_yes = formatters["c=%F,m=%F,y=%F,k=%F,a=%F,t=%F"]
+local f_cmyk_nop = formatters["c=%F,m=%F,y=%F,k=%F"]
+
+local ctx_MPLIBsetNtext = context.MPLIBsetNtext
+local ctx_MPLIBsetCtext = context.MPLIBsetCtext
+local ctx_MPLIBsettext = context.MPLIBsettext
local function tx_analyze(object,prescript) -- todo: hash content and reuse them
+ local data = top.texdata[metapost.properties.number]
local tx_stage = prescript.tx_stage
if tx_stage == "trial" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
local s = object.postscript or ""
local c = object.color -- only simple ones, no transparency
@@ -876,79 +970,80 @@ local function tx_analyze(object,prescript) -- todo: hash content and reuse them
c = lpegmatch(pat,txc)
end
end
- local a = prescript.tr_alternative
- local t = prescript.tr_transparency
+ local a = tonumber(prescript.tr_alternative)
+ local t = tonumber(prescript.tr_transparency)
local h = fmt(tx_number,a or "-",t or "-",c or "-")
- local n = top.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
+ local n = data.texhash[h] -- todo: hashed variant with s (nicer for similar labels)
if not n then
local tx_last = top.texlast + 1
top.texlast = tx_last
+ -- report_textexts("tex string: %s",s)
if not c then
- -- no color
+ ctx_MPLIBsetNtext(tx_last,s)
elseif #c == 1 then
if a and t then
- s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_gray_yes(c[1],a,t),s)
else
- s = formatters["\\directcolored[s=%f]%s"](c[1],s)
+ ctx_MPLIBsetCtext(tx_last,f_gray_nop(c[1]),s)
end
elseif #c == 3 then
if a and t then
- s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_rgb_nop(c[1],c[2],c[3],a,t),s)
else
- s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
+ ctx_MPLIBsetCtext(tx_last,f_rgb_nop(c[1],c[2],c[3]),s)
end
elseif #c == 4 then
if a and t then
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
+ ctx_MPLIBsetCtext(tx_last,f_cmyk_yes(c[1],c[2],c[3],c[4],a,t),s)
else
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
+ ctx_MPLIBsetCtext(tx_last,f_cmyk_nop(c[1],c[2],c[3],c[4]),s)
end
+ else
+ ctx_MPLIBsetNtext(tx_last,s)
end
- context.MPLIBsettext(tx_last,s)
top.multipass = true
- metapost.multipass = true -- ugly
- top.texhash[h] = tx_last
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texhash [h] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,tx_last,h)
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a, text %a",tx_stage,tx_trial,tx_number,tx_last,h,s)
end
else
- top.texslots[tx_trial] = n
+ data.texslots[tx_trial] = n
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,tx_trial,tx_number,n,h)
+ report_textexts("stage %a, usage %a, number %a, old %a, hash %a, text %a",tx_stage,tx_trial,tx_number,n,h,s)
end
end
elseif tx_stage == "extra" then
- local tx_trial = top.textrial + 1
- top.textrial = tx_trial
+ local tx_trial = data.textrial + 1
+ data.textrial = tx_trial
local tx_number = tonumber(prescript.tx_number)
- if not top.texorder[tx_number] then
+ if not data.texorder[tx_number] then
local s = object.postscript or ""
local tx_last = top.texlast + 1
top.texlast = tx_last
- context.MPLIBsettext(tx_last,s)
+ ctx_MPLIBsettext(tx_last,s)
top.multipass = true
- metapost.multipass = true -- ugly
- top.texslots[tx_trial] = tx_last
- top.texorder[tx_number] = tx_last
+ data.texslots[tx_trial] = tx_last
+ data.texorder[tx_number] = tx_last
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,tx_trial,tx_number,tx_last)
+ report_textexts("stage %a, usage %a, number %a, extra %a, text %a",tx_stage,tx_trial,tx_number,tx_last,s)
end
end
end
end
local function tx_process(object,prescript,before,after)
- local tx_number = prescript.tx_number
+ local data = top.texdata[metapost.properties.number]
+ local tx_number = tonumber(prescript.tx_number)
if tx_number then
- tx_number = tonumber(tx_number)
local tx_stage = prescript.tx_stage
if tx_stage == "final" then
- top.texfinal = top.texfinal + 1
- local n = top.texslots[top.texfinal]
+ local tx_final = data.texfinal + 1
+ data.texfinal = tx_final
+ local n = data.texslots[tx_final]
if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,top.texfinal,tx_number,n)
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,tx_final,tx_number,n)
end
local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
local box = top.textexts[n]
@@ -956,12 +1051,12 @@ local function tx_process(object,prescript,before,after)
before[#before+1] = function()
-- flush always happens, we can have a special flush function injected before
context.MPLIBgettextscaledcm(n,
- format("%f",sx), -- bah ... %s no longer checks
- format("%f",rx), -- bah ... %s no longer checks
- format("%f",ry), -- bah ... %s no longer checks
- format("%f",sy), -- bah ... %s no longer checks
- format("%f",tx), -- bah ... %s no longer checks
- format("%f",ty), -- bah ... %s no longer checks
+ f_f(sx), -- bah ... %s no longer checks
+ f_f(rx), -- bah ... %s no longer checks
+ f_f(ry), -- bah ... %s no longer checks
+ f_f(sy), -- bah ... %s no longer checks
+ f_f(tx), -- bah ... %s no longer checks
+ f_f(ty), -- bah ... %s no longer checks
sxsy(box.width,box.height,box.depth))
end
else
@@ -972,8 +1067,9 @@ local function tx_process(object,prescript,before,after)
if not trace_textexts then
object.path = false -- else: keep it
end
- object.color = false
+ object.color = false
object.grouped = true
+ object.istext = true
end
end
end
@@ -996,7 +1092,6 @@ local function gt_analyze(object,prescript)
graphics[gt_index] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
top.intermediate = true
top.multipass = true
- metapost.multipass = true -- ugly
end
end
@@ -1012,9 +1107,9 @@ local function sh_process(object,prescript,before,after)
local sh_type = prescript.sh_type
if sh_type then
nofshades = nofshades + 1
- local domain = lpegmatch(domainsplitter,prescript.sh_domain)
- local centera = lpegmatch(centersplitter,prescript.sh_center_a)
- local centerb = lpegmatch(centersplitter,prescript.sh_center_b)
+ local domain = lpegmatch(domainsplitter,prescript.sh_domain or "0 1")
+ local centera = lpegmatch(centersplitter,prescript.sh_center_a or "0 0")
+ local centerb = lpegmatch(centersplitter,prescript.sh_center_b or "0 0")
--
local sh_color_a = prescript.sh_color_a or "1"
local sh_color_b = prescript.sh_color_b or "1"
@@ -1063,8 +1158,9 @@ local function sh_process(object,prescript,before,after)
local coordinates = { centera[1], centera[2], centerb[1], centerb[2] }
lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
elseif sh_type == "circular" then
- local radiusa = tonumber(prescript.sh_radius_a)
- local radiusb = tonumber(prescript.sh_radius_b)
+ local factor = tonumber(prescript.sh_factor) or 1
+ local radiusa = factor * tonumber(prescript.sh_radius_a)
+ local radiusb = factor * tonumber(prescript.sh_radius_b)
local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb }
lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
else
@@ -1109,8 +1205,9 @@ local function ps_process(object,prescript,before,after)
local first, third = op[1], op[3]
local x, y = first.x_coord, first.y_coord
local w, h = third.x_coord - x, third.y_coord - y
- x = x - metapost.llx
- y = metapost.ury - y
+ local properties = metapost.properties
+ x = x - properties.llx
+ y = properties.ury - y
before[#before+1] = function()
context.MPLIBpositionwhd(ps_label,x,y,w,h)
end
@@ -1136,7 +1233,7 @@ end
-- color and transparency
local value = Cs ( (
- (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end
+ (Carg(1) * C((1-P(","))^1)) / function(a,b) return f_f3(a * tonumber(b)) end
+ P(","))^1
)
@@ -1145,6 +1242,12 @@ local value = Cs ( (
local t_list = attributes.list[attributes.private('transparency')]
local c_list = attributes.list[attributes.private('color')]
+local remappers = {
+ [1] = formatters["s=%s"],
+ [3] = formatters["r=%s,g=%s,b=%s"],
+ [4] = formatters["c=%s,m=%s,y=%s,k=%s"],
+}
+
local function tr_process(object,prescript,before,after)
-- before can be shortcut to t
local tr_alternative = prescript.tr_alternative
@@ -1160,56 +1263,83 @@ local function tr_process(object,prescript,before,after)
local sp_type = prescript.sp_type
if not sp_type then
c_b, c_a = colorconverter(cs)
- elseif sp_type == "spot" or sp_type == "multitone" then
- local sp_name = prescript.sp_name or "black"
- local sp_fractions = prescript.sp_fractions or 1
- local sp_components = prescript.sp_components or ""
- local sp_value = prescript.sp_value or "1"
- local cf = cs[1]
- if cf ~= 1 then
- -- beware, we do scale the spotcolors but not the alternative representation
- sp_value = lpegmatch(value,sp_value,1,cf) or sp_value
- end
- c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value)
- elseif sp_type == "named" then
- -- we might move this to another namespace .. also, named can be a spotcolor
- -- so we need to check for that too ... also we need to resolve indirect
- -- colors so we might need the second pass for this (draw dots with \MPcolor)
+ else
local sp_name = prescript.sp_name or "black"
- if not tr_alternative then
- -- todo: sp_name is not yet registered at this time
- local t = t_list[sp_name] -- string or attribute
- local v = t and attributes.transparencies.value(t)
- if v then
- before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
- after[#after+1] = "/Tr0 gs" -- outertransparency
+ if sp_type == "spot" then
+ local sp_value = prescript.sp_value or "s:1"
+ local sp_temp = formatters["mp:%s"](sp_value)
+ local s = split(sp_value,":")
+ local r = remappers[#s]
+ defineprocesscolor(sp_temp,r and r(unpack(s)) or "s=0",true,true)
+ definespotcolor(sp_name,sp_temp,"p=1",true)
+ sp_type = "named"
+ elseif sp_type == "multitone" then
+ local sp_value = prescript.sp_value or "s:1"
+ local sp_spec = { }
+ local sp_list = split(sp_value," ")
+ for i=1,#sp_list do
+ local v = sp_list[i]
+ local t = formatters["mp:%s"](v)
+ local s = split(v,":")
+ local r = remappers[#s]
+ defineprocesscolor(t,r and r(unpack(s)) or "s=0",true,true)
+ local tt = formatters["ms:%s"](v)
+ definespotcolor(tt,t,"p=1",true)
+ sp_spec[#sp_spec+1] = formatters["%s=1"](t)
end
+ sp_spec = concat(sp_spec,",")
+ definemultitonecolor(sp_name,sp_spec,"","",true)
+ sp_type = "named"
end
- local c = c_list[sp_name] -- string or attribute
- local v = c and attributes.colors.value(c)
- if v then
- -- all=1 gray=2 rgb=3 cmyk=4
- local colorspace = v[1]
- local f = cs[1]
- if colorspace == 2 then
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
- elseif colorspace == 3 then
- local r, g, b = f*v[3], f*v[4], f*v[5]
- c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
- elseif colorspace == 4 or colorspace == 1 then
- local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
- c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- else
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
+ if sp_type == "named" then
+ -- we might move this to another namespace .. also, named can be a spotcolor
+ -- so we need to check for that too ... also we need to resolve indirect
+ -- colors so we might need the second pass for this (draw dots with \MPcolor)
+ if not tr_alternative then
+ -- todo: sp_name is not yet registered at this time
+ local t = t_list[sp_name] -- string or attribute
+ local v = t and transparencyvalue(t)
+ if v then
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
+ after[#after+1] = "/Tr0 gs" -- outertransparency
+ end
+ end
+ local c = c_list[sp_name] -- string or attribute
+ local v = c and colorvalue(c)
+ if v then
+ -- all=1 gray=2 rgb=3 cmyk=4
+ local colorspace = v[1]
+ local f = cs[1]
+ if colorspace == 2 then
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ elseif colorspace == 3 then
+ local r, g, b = f*v[3], f*v[4], f*v[5]
+ c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
+ elseif colorspace == 4 or colorspace == 1 then
+ local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
+ c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ elseif colorspace == 5 then
+ -- not all viewers show the fractions ok
+ local name = v[10]
+ local value = split(v[13],",")
+ if f ~= 1 then
+ for i=1,#value do
+ value[i] = f * (tonumber(value[i]) or 1)
+ end
+ end
+ value = concat(value," ")
+ c_b, c_a = checked_color_pair(f_spot,name,name,value,value)
+ else
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ end
end
end
- --
end
if c_a and c_b then
before[#before+1] = c_b
- after[#after+1] = c_a
+ after [#after +1] = c_a
end
end
end
diff --git a/tex/context/base/mlib-pps.mkiv b/tex/context/base/mlib-pps.mkiv
index e16827585..07ac84b8d 100644
--- a/tex/context/base/mlib-pps.mkiv
+++ b/tex/context/base/mlib-pps.mkiv
@@ -33,55 +33,43 @@
\newbox \MPtextbox
\newtoks\everyMPLIBsettext % not used
-% \def\MPLIBsettext#1% #2%
-% {\dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
-%
-% \def\MPLIBresettexts
-% {\ctxlua{metapost.resettextexts()}}
-%
-% \newconditional\MPLIBtextgetdone
-%
-% \def\MPLIBsettext#1% #2%
-% {\ifconditional\MPLIBtextgetdone
-% \else
-% \cldcontext{metapost.tex.get()}% MPenvironments are depricated
-% \settrue\MPLIBtextgetdone % no \global needed
-% \fi
-% \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox}
-%
-% \def\MPLIBresettexts
-% {\ctxlua{metapost.resettextexts()}%
-% \setfalse\MPLIBtextgetdone}
-
\def\doMPLIBflushenvironment
{%\writestatus\m!metapost{flushing environment}%
- \cldcontext{metapost.tex.get()}%
+ \clf_mptexget
\let\MPLIBflushenvironment\relax}% MPenvironments are depricated}
\let\MPLIBflushenvironment\doMPLIBflushenvironment
-\def\MPLIBsettext#1% #2%
+\unexpanded\def\MPLIBsetNtext#1% #2% box text
+ {\MPLIBflushenvironment
+ \dowithnextbox{\clf_mpsettext\nextbox #1}\hbox\bgroup
+ \meta_set_current_color
+ \let\MPLIBflushenvironment\doMPLIBflushenvironment
+ \let\next} % gobble open brace
+
+\unexpanded\def\MPLIBsetCtext#1#2% #3% box colorspec text
{\MPLIBflushenvironment
- \dowithnextbox{\ctxlua{metapost.settext(\number\nextbox,#1)}}\hbox\bgroup
+ \dowithnextbox{\clf_mpsettext\nextbox #1}\hbox\bgroup
+ \directcolored[#2]%
+ \meta_set_current_color % so, textcolor wins !
\let\MPLIBflushenvironment\doMPLIBflushenvironment
\let\next} % gobble open brace
-\def\MPLIBresettexts
- {\ctxlua{metapost.resettextexts()}}
+\let\MPLIBsettext\MPLIBsetNtext
-\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
- {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
+\unexpanded\def\MPLIBgettextscaled#1#2#3% why a copy .. can be used more often
+ {\clf_mpgettext\MPtextbox #1%
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=#2,\c!sy=#3]{\raise\dp\MPtextbox\box\MPtextbox}\forcecolorhack\hss}}}
-\def\MPLIBfigure#1#2%
+\unexpanded\def\MPLIBfigure#1#2%
{\setbox\scratchbox\hbox{\externalfigure[#1][\c!mask=#2]}%
- \ctxlua{metapost.edefsxsy(\number\wd\scratchbox,\number\ht\scratchbox,0)}%
+ \clf_mpsetsxsy\wd\scratchbox\ht\scratchbox\zeropoint
\vbox to \zeropoint{\vss\hbox to \zeropoint{\scale[\c!sx=\sx,\c!sy=\sy]{\box\scratchbox}\hss}}}
% horrible (we could inline scale and matrix code):
-\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
- {\ctxlua{metapost.gettext(\number\MPtextbox,#1)}%
+\unexpanded\def\MPLIBgettextscaledcm#1#2#3#4#5#6#7#8#9% 2-7: sx,rx,ry,sy,tx,ty
+ {\clf_mpgettext\MPtextbox #1%
\setbox\MPbox\hbox\bgroup
\dotransformnextbox{#2}{#3}{#4}{#5}{#6}{#7}% does push pop ... will be changed to proper lua call (avoid small numbers)
\vbox to \zeropoint\bgroup
@@ -103,7 +91,7 @@
\smashbox\MPbox
\box\MPbox}
-\def\MPLIBgraphictext#1% use at mp end
+\unexpanded\def\MPLIBgraphictext#1% use at mp end
{\startTEXpage[\c!scale=10000]#1\stopTEXpage}
%D \startbuffer
@@ -132,7 +120,7 @@
%D
%D \typebuffer \startlinecorrection \getbuffer \stoplinecorrection
-\def\MPLIBpositionwhd#1#2#3#4#5% bp !
+\unexpanded\def\MPLIBpositionwhd#1#2#3#4#5% bp !
{\dosavepositionwhd{#1}\zerocount{#2\onebasepoint}{#3\onebasepoint}{#4\onebasepoint}{#5\onebasepoint}\zeropoint}
% \def\MPLIBextrapass#1%
@@ -158,9 +146,9 @@
\box\scratchbox
\endgroup}
-\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
+\unexpanded\def\MPLIBstartgroup#1#2#3#4#5#6% isolated 0/1, knockout 0/1 llx lly urx ury
{\begingroup
\setbox\scratchbox\hbox\bgroup
- \def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
+ \unexpanded\def\MPLIBstopgroup{\doMPLIBstopgroup{#1}{#2}{#3}{#4}{#5}{#6}}}
\protect \endinput
diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua
index f30ed0c9f..121c32ae9 100644
--- a/tex/context/base/mlib-run.lua
+++ b/tex/context/base/mlib-run.lua
@@ -44,12 +44,12 @@ local texerrormessage = logs.texerrormessage
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+local formatters = string.formatters
+
local mplib = mplib
metapost = metapost or { }
local metapost = metapost
-local mplibone = tonumber(mplib.version()) <= 1.50
-
metapost.showlog = false
metapost.lastlog = ""
metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing
@@ -84,77 +84,74 @@ local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("bas
-- mplib has no real io interface so we have a different mechanism than
-- tex (as soon as we have more control, we will use the normal code)
-
-local finders = { }
-mplib.finders = finders
-
+--
-- for some reason mp sometimes calls this function twice which is inefficient
-- but we cannot catch this
-local function preprocessed(name)
- if not mpbasepath(name) then
- -- we could use the via file but we don't have a complete io interface yet
- local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
- if found then
- local temp = luatex.registertempfile(name,true)
- io.savedata(temp,data)
- return temp
+do
+
+ local finders = { }
+ mplib.finders = finders -- also used in meta-lua.lua
+
+ local new_instance = mplib.new
+ local resolved_file = resolvers.findfile
+
+ local function preprocessed(name)
+ if not mpbasepath(name) then
+ -- we could use the via file but we don't have a complete io interface yet
+ local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
+ if found then
+ local temp = luatex.registertempfile(name,true)
+ io.savedata(temp,data)
+ return temp
+ end
end
+ return name
end
- return name
-end
-mplib.preprocessed = preprocessed -- helper
+ mplib.preprocessed = preprocessed -- helper
-finders.file = function(specification,name,mode,ftype)
- return preprocessed(resolvers.findfile(name,ftype))
-end
+ local function validftype(ftype)
+ if ftype == "" then
+ -- whatever
+ elseif ftype == 0 then
+ -- mplib bug
+ else
+ return ftype
+ end
+ end
-local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
- local specification = url.hashed(name)
- local finder = finders[specification.scheme] or finders.file
- return finder(specification,name,mode,ftype)
-end
+ finders.file = function(specification,name,mode,ftype)
+ return preprocessed(resolvers.findfile(name,validftype(ftype)))
+ end
-local function o_finder(name,mode,ftype)
- -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype)
- return name
-end
+ local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+ local specification = url.hashed(name)
+ local finder = finders[specification.scheme] or finders.file
+ return finder(specification,name,mode,validftype(ftype))
+ end
-local function finder(name, mode, ftype)
- if mode == "w" then
- return o_finder(name,mode,ftype)
- else
- return i_finder(name,mode,ftype)
+ local function o_finder(name,mode,ftype)
+ return name
end
-end
-local i_limited = false
-local o_limited = false
+ o_finder = sandbox.register(o_finder,sandbox.filehandlerone,"mplib output finder")
-directives.register("system.inputmode", function(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- i_finder = i_limiter.protect(i_finder)
- i_limited = true
- end
+ local function finder(name,mode,ftype)
+ return (mode == "w" and o_finder or i_finder)(name,mode,validftype(ftype))
end
-end)
-
-directives.register("system.outputmode", function(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- o_finder = o_limiter.protect(o_finder)
- o_limited = true
- end
+
+ function mplib.new(specification)
+ specification.find_file = finder -- so we block an overload
+ return new_instance(specification)
end
-end)
--- -- --
+ mplib.finder = finder
+
+end
-metapost.finder = finder
+local new_instance = mplib.new
+local find_file = mplib.finder
function metapost.reporterror(result)
if not result then
@@ -182,173 +179,93 @@ function metapost.reporterror(result)
return true
end
-if mplibone then
-
- report_metapost("fatal error: mplib is too old")
-
- os.exit()
-
- -- local preamble = [[
- -- boolean mplib ; mplib := true ;
- -- string mp_parent_version ; mp_parent_version := "%s" ;
- -- input "%s" ; dump ;
- -- ]]
- --
- -- metapost.parameters = {
- -- hash_size = 100000,
- -- main_memory = 4000000,
- -- max_in_open = 50,
- -- param_size = 100000,
- -- }
- --
- -- function metapost.make(name, target, version)
- -- starttiming(mplib)
- -- target = file.replacesuffix(target or name, "mem") -- redundant
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = true,
- -- find_file = finder,
- -- job_name = file.removesuffix(target),
- -- }
- -- ) )
- -- if mpx then
- -- starttiming(metapost.exectime)
- -- local result = mpx:execute(format(preamble,version or "unknown",name))
- -- stoptiming(metapost.exectime)
- -- mpx:finish()
- -- end
- -- stoptiming(mplib)
- -- end
- --
- -- function metapost.load(name)
- -- starttiming(mplib)
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = false,
- -- mem_name = file.replacesuffix(name,"mem"),
- -- find_file = finder,
- -- -- job_name = "mplib",
- -- }
- -- ) )
- -- local result
- -- if not mpx then
- -- result = { status = 99, error = "out of memory"}
- -- end
- -- stoptiming(mplib)
- -- return mpx, result
- -- end
- --
- -- function metapost.checkformat(mpsinput)
- -- local mpsversion = environment.version or "unset version"
- -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
- -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
- -- local mpsbase = file.removesuffix(file.basename(mpsinput))
- -- if mpsbase ~= mpsformat then
- -- mpsformat = mpsformat .. "-" .. mpsbase
- -- end
- -- mpsformat = file.addsuffix(mpsformat, "mem")
- -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or ""
- -- if mpsformatfullname ~= "" then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- local mpx, result = metapost.load(mpsformatfullname)
- -- if mpx then
- -- local result = mpx:execute("show mp_parent_version ;")
- -- if not result.log then
- -- metapost.reporterror(result)
- -- else
- -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
- -- version = gsub(version,"[\'\"]","")
- -- if version ~= mpsversion then
- -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
- -- else
- -- return mpx
- -- end
- -- end
- -- else
- -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname)
- -- metapost.reporterror(result)
- -- end
- -- end
- -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
- -- report_metapost("making %a into %a", mpsinput, mpsformatfullname)
- -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
- -- if lfs.isfile(mpsformatfullname) then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- return metapost.load(mpsformatfullname)
- -- else
- -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname)
- -- end
- -- end
-
-else
-
- -- let end = relax ;
-
- local preamble = [[
- boolean mplib ; mplib := true ;
- let dump = endinput ;
- input "%s" ;
- ]]
-
- local methods = {
- double = "double",
- scaled = "scaled",
- default = "scaled",
- decimal = false, -- for the moment
- }
+local f_preamble = formatters [ [[
+ boolean mplib ; mplib := true ;
+ let dump = endinput ;
+ input "%s" ;
+]] ]
+
+local methods = {
+ double = "double",
+ scaled = "scaled",
+ binary = "binary",
+ decimal = "decimal",
+ default = "scaled",
+}
- function metapost.load(name,method)
- starttiming(mplib)
- method = method and methods[method] or "scaled"
- local mpx = mplib.new {
- ini_version = true,
- find_file = finder,
- math_mode = method,
- }
- report_metapost("initializing number mode %a",method)
- local result
- if not mpx then
- result = { status = 99, error = "out of memory"}
- else
- result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant
- end
- stoptiming(mplib)
- metapost.reporterror(result)
- return mpx, result
+function metapost.runscript(code)
+ return code
+end
+
+function metapost.scripterror(str)
+ report_metapost("script error: %s",str)
+end
+
+-- todo: random_seed
+
+local f_textext = formatters[ [[rawtextext("%s")]] ]
+
+function metapost.maketext(s,mode)
+ if mode and mode == 1 then
+ -- report_metapost("ignoring verbatimtex: %s",s)
+ else
+ -- report_metapost("handling btex ... etex: %s",s)
+ s = gsub(s,'"','"&ditto&"')
+ return f_textext(s)
end
+end
- function metapost.checkformat(mpsinput,method)
- local mpsversion = environment.version or "unset version"
- local mpsinput = mpsinput or "metafun"
- local foundfile = ""
- if file.suffix(mpsinput) ~= "" then
- foundfile = finder(mpsinput) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mp")) or ""
- end
- if foundfile == "" then
- report_metapost("loading %a fails, format not found",mpsinput)
+function metapost.load(name,method)
+ starttiming(mplib)
+ method = method and methods[method] or "scaled"
+ local mpx = new_instance {
+ ini_version = true,
+ math_mode = method,
+ run_script = metapost.runscript,
+ script_error = metapost.scripterror,
+ make_text = metapost.maketext,
+ extensions = 1,
+ }
+ report_metapost("initializing number mode %a",method)
+ local result
+ if not mpx then
+ result = { status = 99, error = "out of memory"}
+ else
+ result = mpx:execute(f_preamble(file.addsuffix(name,"mp"))) -- addsuffix is redundant
+ end
+ stoptiming(mplib)
+ metapost.reporterror(result)
+ return mpx, result
+end
+
+function metapost.checkformat(mpsinput,method)
+ local mpsversion = environment.version or "unset version"
+ local mpsinput = mpsinput or "metafun"
+ local foundfile = ""
+ if file.suffix(mpsinput) ~= "" then
+ foundfile = find_file(mpsinput) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mpvi")) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mpiv")) or ""
+ end
+ if foundfile == "" then
+ foundfile = find_file(file.replacesuffix(mpsinput,"mp")) or ""
+ end
+ if foundfile == "" then
+ report_metapost("loading %a fails, format not found",mpsinput)
+ else
+ report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
+ local mpx, result = metapost.load(foundfile,method)
+ if mpx then
+ return mpx
else
- report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
- local mpx, result = metapost.load(foundfile,method)
- if mpx then
- return mpx
- else
- report_metapost("error in loading %a",mpsinput)
- metapost.reporterror(result)
- end
+ report_metapost("error in loading %a",mpsinput)
+ metapost.reporterror(result)
end
end
-
end
function metapost.unload(mpx)
@@ -398,27 +315,39 @@ function metapost.reset(mpx)
end
end
-local mp_inp, mp_log, mp_tag = { }, { }, 0
+local mp_tra = { }
+local mp_tag = 0
-- key/values
+if not metapost.initializescriptrunner then
+ function metapost.initializescriptrunner() end
+end
+
function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig)
local converted, result = false, { }
if type(mpx) == "string" then
mpx = metapost.format(mpx) -- goody
end
if mpx and data then
+ local tra = nil
starttiming(metapost)
+ metapost.initializescriptrunner(mpx,trialrun)
if trace_graphics then
- if not mp_inp[mpx] then
+ tra = mp_tra[mpx]
+ if not tra then
mp_tag = mp_tag + 1
local jobname = tex.jobname
- mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w")
- mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w")
+ tra = {
+ inp = io.open(formatters["%s-mplib-run-%03i.mp"] (jobname,mp_tag),"w"),
+ log = io.open(formatters["%s-mplib-run-%03i.log"](jobname,mp_tag),"w"),
+ }
+ mp_tra[mpx] = tra
end
- local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
+ local banner = formatters["%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n"](
+ metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
+ tra.inp:write(banner)
+ tra.log:write(banner)
end
if type(data) == "table" then
-- this hack is needed because the library currently barks on \n\n
@@ -455,17 +384,17 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
-- d = string.gsub(d,"\r","")
if d then
if trace_graphics then
- mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i))
- mp_inp[mpx]:write(d)
- mp_inp[mpx]:write(format("\n%% end snippet %s\n",i))
+ tra.inp:write(formatters["\n%% begin snippet %s\n"](i))
+ tra.inp:write(d)
+ tra.inp:write(formatters["\n%% end snippet %s\n"](i))
end
starttiming(metapost.exectime)
- result = mpx:execute(d)
+ result = mpx:execute(d) -- some day we wil use a coroutine with textexts
stoptiming(metapost.exectime)
if trace_graphics and result then
local str = result.log or result.error
if str and str ~= "" then
- mp_log[mpx]:write(str)
+ tra.log:write(str)
end
end
if not metapost.reporterror(result) then
@@ -489,7 +418,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
data = "tracingall;" .. data
end
if trace_graphics then
- mp_inp[mpx]:write(data)
+ tra.inp:write(data)
end
starttiming(metapost.exectime)
result = mpx:execute(data)
@@ -497,7 +426,7 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
if trace_graphics and result then
local str = result.log or result.error
if str and str ~= "" then
- mp_log[mpx]:write(str)
+ tra.log:write(str)
end
end
-- todo: error message
@@ -517,8 +446,8 @@ function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass,
end
if trace_graphics then
local banner = "\n% end graphic\n\n"
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
+ tra.inp:write(banner)
+ tra.log:write(banner)
end
stoptiming(metapost)
end
@@ -580,7 +509,7 @@ function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
else
output = figures[v]:svg() -- (3) for prologues
end
- local outname = format("%s-%s.%s",basename,v,outputformat)
+ local outname = formatters["%s-%s.%s"](basename,v,outputformat)
report_metapost("saving %s bytes in %a",#output,outname)
io.savedata(outname,output)
end
@@ -613,7 +542,7 @@ function metapost.quickanddirty(mpxformat,data)
stopfigure = function()
end
}
- local data = format("; beginfig(1) ;\n %s\n ; endfig ;",data)
+ local data = formatters["; beginfig(1) ;\n %s\n ; endfig ;"](data)
metapost.process(mpxformat, { data }, false, flusher, false, false, "all")
if code then
return {
@@ -625,3 +554,20 @@ function metapost.quickanddirty(mpxformat,data)
report_metapost("invalid quick and dirty run")
end
end
+
+function metapost.getstatistics(memonly)
+ if memonly then
+ local n, m = 0, 0
+ for name, mpx in next, mpxformats do
+ n = n + 1
+ m = m + mpx:statistics().memory
+ end
+ return n, m
+ else
+ local t = { }
+ for name, mpx in next, mpxformats do
+ t[name] = mpx:statistics()
+ end
+ return t
+ end
+end
diff --git a/tex/context/base/mtx-context-arrange.tex b/tex/context/base/mtx-context-arrange.tex
index 49920293f..fb53406d8 100644
--- a/tex/context/base/mtx-context-arrange.tex
+++ b/tex/context/base/mtx-context-arrange.tex
@@ -27,10 +27,11 @@
% --printformat : 2UP, etc
% --paperformat=spec : paper*print or paperxprint
%
-% example: context --extra=arrange --printformat=2UP --paperformat=A4,A3,landscape myfile
+% example: context --extra=arrange --printformat=2UP --paperformat=A4*A3,landscape myfile
%
% end help
+
\input mtx-context-common.tex
\doifdocumentargument {paperoffset} {
@@ -46,6 +47,7 @@
\setdocumentargument{sided}{singlesided}
}
+
\setuppapersize
[\getdocumentargument{paperformat_paper}]
[\getdocumentargument{paperformat_print}]
@@ -95,16 +97,17 @@
\starttext
\startluacode
- local format = string.format
- local fprint = function(...) tex.sprint(tex.ctxcatcodes,format(...)) end
-
- if #document.files > 0 then
- if document.arguments.sort then
- table.sort(document.files)
+ local arguments = document.arguments
+ local files = document.files
+ local noffiles = #files
+ if noffiles > 0 then
+ if arguments.sort then
+ table.sort(files)
end
- local emptypages = document.arguments.addempty or ""
- local textwidth = document.arguments.textwidth or "0cm"
- for _, filename in ipairs(document.files) do
+ local emptypages = arguments.addempty or ""
+ local textwidth = arguments.textwidth or "0cm"
+ for i=1,noffiles do
+ local filename = files[i]
if not string.find(filename,"^mtx%-context%-") then
context.insertpages (
{ filename },
@@ -114,7 +117,7 @@
end
end
else
- fprint("no files given")
+ context("no files given")
end
\stopluacode
diff --git a/tex/context/base/mtx-context-listing.tex b/tex/context/base/mtx-context-listing.tex
index d69db9934..583aa2b8f 100644
--- a/tex/context/base/mtx-context-listing.tex
+++ b/tex/context/base/mtx-context-listing.tex
@@ -20,16 +20,24 @@
% --sort : sort filenames first
% --topspace=dimension : distance above first line
% --backspace=dimension : distance before left margin
-% --pretty : pretty print comform suffix (temporarily disabled)
+% --pretty : pretty print comform suffix
+% --scite : pretty print comform suffix using scite lexer
% --bodyfont=list : additional bodyfont settings
% --paperformat=spec : paper*print or paperxprint
+% --compact : small margins, small font
%
% end help
\input mtx-context-common.tex
+\doifdocumentargument {compact} {
+ \setdocumentargument{topspace} {5mm}
+ \setdocumentargument{backspace}{5mm}
+ \setdocumentargument{bodyfont} {8pt}
+}
+
\setupbodyfont
- [11pt,tt,\getdocumentargument{bodyfont}]
+ [dejavu,11pt,tt,\getdocumentargument{bodyfont}] % dejavu is more complete
\setuptyping
[lines=yes]
@@ -70,16 +78,22 @@
}
local pattern = document.arguments.pattern
+ local scite = document.arguments.scite
if pattern then
document.files = dir.glob(pattern)
end
+ if scite then
+ context.usemodule { "scite" }
+ end
+
if #document.files > 0 then
if document.arguments.sort then
table.sort(document.files)
end
- for _, filename in ipairs(document.files) do
+ for i=1,#document.files do
+ local filename = document.files[i]
if not string.find(filename,"^mtx%-context%-") then
local pretty = document.arguments.pretty
if pretty == true then
@@ -94,19 +108,24 @@
{ function() context.detokenize(pattern and filename or file.basename(filename)) return true end },
{ function() context.pagenumber() return true end }
)
- if pretty then
+ if scite then
+ context.scitefile { filename } -- here { }
+ elseif pretty then
if type(pretty) ~= "string" or pretty == "" then
context.setuptyping { option = "color" }
else
context.setuptyping { option = types[pretty] or pretty }
end
+ context.typefile(filename)
+ else
+ context.typefile(filename)
end
- context.typefile(filename)
end
end
else
context("no files given")
end
+
\stopluacode
\stoptext
diff --git a/tex/context/base/mtx-context-precache.tex b/tex/context/base/mtx-context-precache.tex
new file mode 100644
index 000000000..9cbb46cf2
--- /dev/null
+++ b/tex/context/base/mtx-context-precache.tex
@@ -0,0 +1,161 @@
+%D \module
+%D [ file=mtx-context-precache,
+%D version=2014.12.24,
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Precaching Fonts,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin help
+%
+% usage: context --extra=precache [no options yet]
+%
+% example: context --extra=precache
+%
+% end help
+
+\startluacode
+
+local lower = string.lower
+local filesuffix = file.suffix
+local findfile = resolvers.find_file
+
+local report = logs.reporter("fonts","precache")
+
+function fonts.names.precache()
+ local handlers = fonts.handlers
+ if not handlers then
+ report("no handlers available")
+ return
+ end
+ local otfloader = handlers.otf and handlers.otf.load
+ local afmloader = handlers.afm and handlers.afm.load
+ if not (otfloader or afmloader) then
+ report("no otf or afm handler available")
+ return
+ end
+ fonts.names.load()
+ local data = fonts.names.data
+ if not data then
+ report("no font data available")
+ return
+ end
+ local specifications = data.specifications
+ if not specifications then
+ report("no font specifications available")
+ return
+ end
+ local n = 0
+ for i=1,#specifications do
+ local specification = specifications[i]
+ local filename = specification.filename
+ local cleanfilename = specification.cleanfilename
+ local foundfile = findfile(filename)
+ if foundfile and foundfile ~= "" then
+ local suffix = lower(filesuffix(foundfile))
+ if suffix == "otf" or suffix == "ttf" then
+ if otfloader then
+ report("caching otf file: %s",foundfile)
+ otfloader(foundfile) -- todo: ttc/sub
+ n = n + 1
+ end
+ elseif suffix == "afm" then
+ if afmloader then
+ report("caching afm file: %s",foundfile)
+ afmloader(foundfile)
+ n = n + 1
+ end
+ end
+ end
+ end
+ report("%s files out of %s cached",n,#specifications)
+end
+
+\stopluacode
+
+\starttext
+
+\setuppapersize
+ [A4,landscape]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ footer=0pt,
+ header=1cm,
+ headerdistance=0cm,
+ backspace=5mm,
+ topspace=5mm]
+
+\setupbodyfont
+ [dejavu,6pt,tt]
+
+\startmode[*first]
+ \startluacode
+ fonts.names.precache()
+ \stopluacode
+\stopmode
+
+\startluacode
+ fonts.names.load()
+
+ local specifications = fonts.names.data.specifications
+
+ local sorted = { }
+ local hashed = { }
+
+ for i=1,#specifications do
+ local filename = specifications[i].cleanfilename
+ sorted[i] = filename
+ hashed[filename] = i
+ end
+
+ table.sort(sorted)
+
+ local context = context
+ local basename = file.basename
+
+ local NC = context.NC
+ local NR = context.NR
+ local HL = context.HL
+ local bold = context.bold
+
+ context.starttabulate { "||||||||||" }
+ HL()
+ NC() bold("format")
+ NC() bold("cleanfilename")
+ NC() bold("filename")
+ -- NC() bold("familyname")
+ -- NC() bold("fontname")
+ NC() bold("fullname")
+ NC() bold("rawname")
+ NC() bold("style")
+ NC() bold("variant")
+ NC() bold("weight")
+ NC() bold("width")
+ NC() NR()
+ HL()
+ for i=1,#sorted do
+ local specification = specifications[hashed[sorted[i]]]
+ NC() context(specification.format)
+ NC() context(specification.cleanfilename)
+ NC() context(basename(specification.filename))
+ -- NC() context(specification.familyname)
+ -- NC() context(specification.fontname)
+ NC() context(specification.fullname)
+ NC() context(specification.rawname)
+ NC() context(specification.style)
+ NC() context(specification.variant)
+ NC() context(specification.weight)
+ NC() context(specification.width)
+ NC() NR()
+ end
+ context.stoptabulate()
+\stopluacode
+
+\stoptext
diff --git a/tex/context/base/mult-aux.lua b/tex/context/base/mult-aux.lua
index bdc626d4c..353b5e69c 100644
--- a/tex/context/base/mult-aux.lua
+++ b/tex/context/base/mult-aux.lua
@@ -54,7 +54,7 @@ function namespaces.define(namespace,settings)
if trace_namespaces then
report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
end
- if not find(parent,"\\") then
+ if not find(parent,"\\",1,true) then
parent = "\\" .. prefix .. parent
-- todo: check if defined
end
@@ -154,3 +154,15 @@ function namespaces.list()
local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" }
utilities.formatters.list(data,"namespace",keys)
end
+
+
+interfaces.implement {
+ name = "definenamespace",
+ arguments = { "string", "string" },
+ actions = namespaces.define
+}
+
+interfaces.implement {
+ name = "listnamespaces",
+ actions = namespaces.list
+}
diff --git a/tex/context/base/mult-aux.mkiv b/tex/context/base/mult-aux.mkiv
index 6c44a0ec9..b69d7f370 100644
--- a/tex/context/base/mult-aux.mkiv
+++ b/tex/context/base/mult-aux.mkiv
@@ -106,10 +106,14 @@
\doubleexpandafter\gobbleoneargument
\else
\mult_interfaces_get_parameters_assign#1==\empty\_e_o_p_
- \doubleexpandafter\mult_interfaces_get_parameters_item
+ % \doubleexpandafter\mult_interfaces_get_parameters_item % saves skipping when at end
\fi\fi#2}
-\def\mult_interfaces_get_parameters_error#1#2#3%
+\def\mult_interfaces_get_parameters_error#1#2% #3%
+ {\mult_interfaces_get_parameters_error_indeed{#1}{#2}%
+ \gobbleoneargument}
+
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
{\showassignerror{#2}{\the\inputlineno\space(#1)}}
\def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
@@ -118,9 +122,54 @@
\else\ifx#3\empty
\doubleexpandafter\mult_interfaces_get_parameters_error
\else
- \doubleexpandafter\dosetvalue
+ \doubleexpandafter\mult_interfaces_def
\fi\fi
- \m_mult_interfaces_namespace{#1}{#2}}
+ \m_mult_interfaces_namespace{#1}{#2}%
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\startinterface english
+
+ % some 10% faster
+
+ \let\mult_interfaces_get_parameters_error\undefined
+
+ \def\mult_interfaces_get_parameters_error_one#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}\iftrue}
+
+ \def\mult_interfaces_get_parameters_error_two#1\csname#2#3\endcsname#4%
+ {\mult_interfaces_get_parameters_error_indeed{#2}{#3}}
+
+ \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ {\ifx\empty#1\empty
+ \mult_interfaces_get_parameters_error_one
+ \else\ifx#3\empty
+ \mult_interfaces_get_parameters_error_two
+ \else
+ \expandafter\def\csname\m_mult_interfaces_namespace#1\endcsname{#2}%
+ \fi\fi
+ \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+ % interesting but not faster
+ %
+ % \def\mult_interfaces_get_parameters_error_one#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi}
+ %
+ % \def\mult_interfaces_get_parameters_error_two#1\m_mult_interfaces_namespace#2\fi\fi%
+ % {\mult_interfaces_get_parameters_error_indeed\m_mult_interfaces_namespace{#2}\m_mult_interfaces_namespace\s!dummy\fi\fi}
+ %
+ % \def\mult_interfaces_get_parameters_assign#1=#2=#3#4\_e_o_p_
+ % {\expandafter\def\csname
+ % \ifx\empty#1\empty
+ % \mult_interfaces_get_parameters_error_one
+ % \else\ifx#3\empty
+ % \mult_interfaces_get_parameters_error_two
+ % \else
+ % \m_mult_interfaces_namespace#1%
+ % \fi\fi
+ % \endcsname{#2}
+ % \doubleexpandafter\mult_interfaces_get_parameters_item}
+
+\stopinterface
\newif\ifassignment
@@ -132,6 +181,24 @@
% End of experimental code.
+\unexpanded\def\mult_interfaces_let #1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_lete#1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname\empty}
+\unexpanded\def\mult_interfaces_def #1#2{\expandafter\def \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+\unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
+
+\startinterface english
+
+ \unexpanded\def\mult_interfaces_let #1#2{\expandafter \let\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_lete#1#2{\expandafter \let\csname#1#2\endcsname\empty}
+ \unexpanded\def\mult_interfaces_def #1#2{\expandafter \def\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_edef#1#2{\expandafter\edef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_gdef#1#2{\expandafter\gdef\csname#1#2\endcsname}
+ \unexpanded\def\mult_interfaces_xdef#1#2{\expandafter\xdef\csname#1#2\endcsname}
+
+\stopinterface
+
% the commented detokenized variant that backtracks ... needs testing usage first
%
% \let\whatever\relax
@@ -156,14 +223,30 @@
\def#8##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\s!empty\fi\endcsname}%
\def#9##1{\csname#1#2:##1\endcsname}}
+% pre-expansion can be a bit faster but handly any effect on a normal run so let's go for
+% saving some memory
+%
+% \unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8#9% inlining \csname*\endcsname is more efficient (#3 and #6 only)
+% {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
+% %\def#3##1{\csname#4{#1#2}{##1}\endcsname}%
+% \edef#3##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1\noexpand#2:\s!parent\endcsname{##1}\noexpand\fi\endcsname}%
+% \edef#4##1##2{\noexpand\ifcsname##1:##2\endcsname##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname##1:\s!parent\endcsname{##2}\noexpand\fi}%
+% \def #5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
+% \edef#6##1##2{\noexpand\csname\noexpand\ifcsname#1##1:##2\endcsname#1##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1##1:\s!parent\endcsname{##2}\noexpand\fi\endcsname}%
+% \def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2:##1\endcsname}}% always root, no backtrack
+% % \def#7##1{\mult_interfaces_detokenize{\csname#4{#1#2}{##1}\endcsname}}% compact version
+% % \def#7##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
+% \edef#8##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\s!empty\noexpand\fi\endcsname}%
+% \edef#9##1{\noexpand\csname#1#2:##1\endcsname}}
+
\unexpanded\def\installparameterhandler#1#2%
{\normalexpanded
{\mult_interfaces_install_parameter_handler
{\noexpand#1}% \??aa
\expandafter\noexpand\csname current#2\endcsname
\expandafter\noexpand\csname #2parameter\endcsname
- \expandafter\noexpand\csname do#2parameter\endcsname % or : #2_parameter_hash
- \expandafter\noexpand\csname do#2parentparameter\endcsname % or : #2_parent_parameter_hash
+ \expandafter\noexpand\csname do#2parameter\endcsname % or : #2_parameter
+ \expandafter\noexpand\csname do#2parentparameter\endcsname % or : #2_parent_parameter
\expandafter\noexpand\csname named#2parameter\endcsname
\expandafter\noexpand\csname detokenized#2parameter\endcsname
\expandafter\noexpand\csname strict#2parameter\endcsname % checked
@@ -207,14 +290,14 @@
% In \MKIV\ we can probably use the english variant for all other
% languages too.
-% todo: inline the \do*value
+% todo: inline the def/let
\unexpanded\def\mult_interfaces_install_parameter_set_handler#1#2#3#4#5#6%
{\ifx#2\relax\let#2\empty\fi
- \unexpanded\def#3{\dosetvalue {#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#4{\dosetevalue{#1#2:}}% ##1 {##2} (braces are mandate)
- \unexpanded\def#5{\doletvalue {#1#2:}}% ##1 ##2
- \unexpanded\def#6{\doletvalue {#1#2:}\empty}}% ##1
+ \unexpanded\def#3{\mult_interfaces_def {#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#4{\mult_interfaces_edef{#1#2:}}% ##1 {##2} (braces are mandate)
+ \unexpanded\def#5{\mult_interfaces_let {#1#2:}}% ##1 ##2
+ \unexpanded\def#6{\mult_interfaces_lete{#1#2:}}}% ##1
\startinterface english
@@ -272,6 +355,11 @@
\expandafter\edef\csname#1#4:\s!parent\endcsname{#2}%
\fi \fi}
+\def\mult_interfaces_chain#1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\space\fi}
+\def\getparentchain #1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\fi}
+\def\getcurrentparentchain#1#2{\csname#1#2:\s!chain\endcsname} % for the moment test:
+\def\getcurrentparentchain#1#2{\ifcsname#1#2:\s!chain\endcsname\csname#1#2:\s!chain\endcsname\fi}
+
\unexpanded\def\mult_interfaces_install_define_handler#1#2#3#4#5#6#7#8#9% why is \expanded still needed in clones
{\ifx#4\relax\let#4\empty\fi % see \defineregister
\unexpanded\def#2{\dotripleempty#5}%
@@ -284,6 +372,7 @@
\the#6% predefine
\edef#8{##2}%
\mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{\mult_interfaces_chain#1{##2}##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
\mult_interfaces_get_parameters{#1#4:}[##3]%
\else\ifsecondargument
@@ -291,16 +380,19 @@
\expandafter\mult_check_for_assignment_indeed\detokenize{##2}=@@\_end_
\ifassignment
\let#8\empty
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
\mult_interfaces_get_parameters{#1#4:}[##2]%
\else
\edef#8{##2}%
\mult_check_for_parent{#1}{#3}#4#8%
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{\mult_interfaces_chain#1{##2}##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#1##2}%
\fi
\else
\the#6% predefine
\let#8\empty
+ \expandafter\edef\csname#1#4:\s!chain\endcsname{##1}%
\expandafter\edef\csname#1#4:\s!parent\endcsname{#3}%
\fi\fi
\the#7%
@@ -548,10 +640,10 @@
\expandafter\noexpand\csname everysetup#2\endcsname}}
\unexpanded\def\mult_interfaces_install_direct_parameter_set_handler#1#2#3#4#5%
- {\unexpanded\def#2{\dosetvalue #1}%
- \unexpanded\def#3{\dosetevalue#1}%
- \unexpanded\def#4{\doletvalue #1}%
- \unexpanded\def#5{\doletvalue #1\empty}}%
+ {\unexpanded\def#2{\mult_interfaces_def #1}%
+ \unexpanded\def#3{\mult_interfaces_edef#1}%
+ \unexpanded\def#4{\mult_interfaces_let #1}%
+ \unexpanded\def#5{\mult_interfaces_let #1\empty}}%
\startinterface english
@@ -629,7 +721,7 @@
\edef#2{##1}%
#3[##2]%
\else\iffirstargument
- \doifassignmentelse{##1}
+ \doifelseassignment{##1}
{\let#2\empty
#3[##1]}%
{\edef#2{##1}}%
@@ -691,12 +783,11 @@
\else
\global\advance\c_mult_interfaces_n_of_namespaces\plusone
\expandafter\edef\csname ??#1\endcsname{\v_interfaces_prefix_template}%
- \ctxcommand{registernamespace(\number\c_mult_interfaces_n_of_namespaces,"#1")}%
+ \clf_registernamespace\c_mult_interfaces_n_of_namespaces{#1}%
\fi}
-\def\mult_interfaces_get_parameters_error#1#2#3% redefined
- {\ctxcommand{showassignerror("#1","#2","#3",\the\inputlineno)}%
- \waitonfatalerror}
+\def\mult_interfaces_get_parameters_error_indeed#1#2%
+ {\clf_showassignerror{#1}{#2}\inputlineno} % no longer \waitonfatalerror
% We install two core namespaces here, as we want nice error messages. Maybe
% we will reserve the first 9.
@@ -778,10 +869,10 @@
{\dodoubleargument\mult_interfaces_define_name_space}
\def\mult_interfaces_define_name_space[#1][#2]% namespace settings
- {\ctxlua{interfaces.namespaces.define(\!!bs#1\!!es,\!!bs#2\!!es)}}
+ {\clf_definenamespace{#1}{#2}}
\def\listnamespaces
- {\ctxlua{interfaces.namespaces.list()}}
+ {\clf_listnamespaces}
%D Helper:
%D
@@ -807,6 +898,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifcommandhandlerelse\doifelsecommandhandler
+
\unexpanded\def\doifcommandhandler#1#2% namespace name
{\ifcsname#1#2:\s!parent\endcsname
\expandafter\firstofoneargument
@@ -825,10 +918,10 @@
% another set of (fast) helpers (grep for usage):
-\def\expandnamespaceparameter#1#2#3% \??xx \getp \c!xx \c!yy
+\def\expandnamespaceparameter#1#2#3% \??xx \getp \c!xx \v!yy
{\csname#1\ifcsname#1\expandafter\expandafter\expandafter\mult_aux_expand_namespace_parameter#2#3}
-\def\mult_aux_expand_namespace_parameter#1#2% \cs \c!yy
+\def\mult_aux_expand_namespace_parameter#1#2% \cs \v!yy
{#1\endcsname#1\else#2\fi\endcsname}
\def\expandnamespacemacro#1#2#3% \??xx \some_edefed_cs \c!yy
@@ -856,4 +949,302 @@
%D \edef\m_class_whatever{whatever}
%D \stoptyping
+% experiment: in principle this is faster but not that noticeable as we don't do that
+% many assignments and mechanism that do are also slow; the advantage is mostly nicer
+% in tracing
+
+\def\s!simple{simple}
+\def\s!single{single}
+\def\s!double{double}
+\def\s!triple{triple}
+
+\unexpanded\def\syst_helpers_double_empty#1#2#3%
+ {\syst_helpers_argument_reset
+ \doifelsenextoptional
+ {\syst_helpers_double_empty_one_yes_mult#2#3}%
+ {\syst_helpers_double_empty_one_nop_mult#1}}
+
+\def\syst_helpers_double_empty_one_yes_mult#1#2[#3]%
+ {\firstargumenttrue
+ \doifelsenextoptional
+ {\secondargumenttrue#2[{#3}]}%
+ {\syst_helpers_double_empty_two_nop_mult#1{#3}}}
+
+\def\syst_helpers_double_empty_one_nop_mult% #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ }% #1}
+
+\def\syst_helpers_double_empty_two_nop_mult
+ {\secondargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_double_empty_one_spaced_mult
+ \else
+ \expandafter\syst_helpers_double_empty_one_normal_mult
+ \fi}
+
+\def\syst_helpers_double_empty_one_spaced_mult#1#2{#1[{#2}] }
+\def\syst_helpers_double_empty_one_normal_mult#1#2{#1[{#2}]}
+
+\unexpanded\def\mult_interfaces_install_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}% no every ! don't change it
+ \newtoks#4%
+ \newtoks#7%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_double_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#6#3%
+ \def#8####1% we will have a simple one as well
+ {\edef#3{####1}%
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#6#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#6%
+ \the#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#6#3%
+ \let#3\empty
+ \the#4%
+ \let#3#6%
+ \the#7}}
+
+\unexpanded\def\installsetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2root\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+\unexpanded\def\syst_helpers_triple_empty#1#2#3#4%
+ {\syst_helpers_argument_reset
+ \doifelsenextoptional
+ {\syst_helpers_triple_empty_one_yes_mult#2#3#4}%
+ {\syst_helpers_triple_empty_one_nop_mult#1}}
+
+\def\syst_helpers_triple_empty_one_yes_mult#1#2#3[#4]%
+ {\firstargumenttrue
+ \doifelsenextoptional
+ {\syst_helpers_triple_empty_two_yes_mult#2#3{#4}}%
+ {\syst_helpers_triple_empty_two_nop_mult#1{#4}}}
+
+\def\syst_helpers_triple_empty_two_yes_mult#1#2#3[#4]%
+ {\secondargumenttrue
+ \doifelsenextoptional
+ {\thirdargumenttrue#2[{#3}][{#4}]}%
+ {\syst_helpers_triple_empty_three_nop_mult#1{#3}{#4}}}
+
+\def\syst_helpers_triple_empty_one_nop_mult % #1%
+ {\firstargumentfalse
+ \secondargumentfalse
+ \thirdargumentfalse
+ } % #1
+
+\def\syst_helpers_triple_empty_two_nop_mult
+ {\secondargumentfalse
+ \thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_two_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_two_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_three_nop_mult
+ {\thirdargumentfalse
+ \if_next_blank_space_token
+ \expandafter\syst_helpers_triple_empty_three_spaced_mult
+ \else
+ \expandafter\syst_helpers_triple_empty_three_normal_mult
+ \fi}
+
+\def\syst_helpers_triple_empty_two_spaced_mult #1#2{#1[{#2}] }
+\def\syst_helpers_triple_empty_two_normal_mult #1#2{#1[{#2}]}
+\def\syst_helpers_triple_empty_three_spaced_mult#1#2#3{#1[{#2}][{#3}] }
+\def\syst_helpers_triple_empty_three_normal_mult#1#2#3{#1[{#2}][{#3}]}
+
+\unexpanded\def\mult_interfaces_install_auto_setup_handler#1#2#3#4#5#6#7#8%
+ {\ifx#3\relax\let#3\empty\fi
+ \unexpanded\def#5{\mult_interfaces_get_parameters{#1#3:}}%
+ \newtoks#4%
+ \edef\m_mult_interface_setup{\strippedcsname#2_}%
+ \unexpanded\edef#2{\syst_helpers_triple_empty
+ \csname\m_mult_interface_setup\s!simple\endcsname
+ \csname\m_mult_interface_setup\s!single\endcsname
+ \csname\m_mult_interface_setup\s!double\endcsname
+ \csname\m_mult_interface_setup\s!triple\endcsname}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!triple\endcsname[##1][##2][##3]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ \expandafter\def\csname#1#3:\s!parent\endcsname{#1##2}%
+ \mult_interfaces_get_parameters{#1#3:}[##3]% always sets parent
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!double\endcsname[##1][##2]%
+ {\let#7#3%
+ \def#8####1%
+ {\edef#3{####1}%
+ #6% checks parent and sets if needed
+ \mult_interfaces_get_parameters{#1#3:}[##2]%
+ \the#4}%
+ \processcommalist[##1]#8%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!single\endcsname[##1]%
+ {\let#7#3%
+ \let#3\empty
+ \mult_interfaces_get_parameters{#1:}[##1]%
+ \the#4%
+ \let#3#7}%
+ \unexpanded\expandafter\def\csname\m_mult_interface_setup\s!simple\endcsname%
+ {\let#7#3%
+ \let#3\empty
+ \the#4%
+ \let#3#7}}
+
+\unexpanded\def\installautosetuphandler#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_auto_setup_handler
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname setup#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname everysetup#2\endcsname
+ \expandafter\noexpand\csname setupcurrent#2\endcsname
+ \expandafter\noexpand\csname check#2parent\endcsname
+ \expandafter\noexpand\csname saved_setup_current#2\endcsname
+ \expandafter\noexpand\csname nested_setup_current#2\endcsname}}
+
+% okay, we can also get rid of the #9, but this code looks pretty bad, while the previous is
+% still okay given that we can also use #6 as setup (so in fact we can save some cs again and
+% only use one extra)
+%
+% \global\advance\commalevel \plusone
+% \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname####1,%
+% {\edef#3{####1}%
+% \mult_interfaces_get_parameters{#1#3:}[##2]%
+% \the#5%
+% \syst_helpers_do_process_comma_item}%
+% \expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument\relax##1,]\relax
+% % \syst_helpers_do_do_process_comma_item##1,]\relax
+% \global\advance\commalevel \minusone
+
+% The next one is experimental (and used in publications):
+
+\let\c_mult_set\relax
+
+\unexpanded\def\mult_interfaces_install_definition_set#1#2#3#4#5#6#7%
+ {\newcount#3%
+ \let#6\empty
+ \unexpanded\def#2%
+ {\expandafter\let\expandafter\c_mult_set\csname #1_t_#6\endcsname
+ \ifx\c_mult_set\relax
+ \expandafter\newtoks\c_mult_set
+ \expandafter\let\csname #1_t_#6\endcsname\c_mult_set
+ \fi}
+ \unexpanded\def#4##1%
+ {\pushmacro#6%
+ \advance#3\plusone
+ \edef#6{##1}%
+ \unprotect}%
+ \unexpanded\def#5%
+ {\protect
+ \advance#3\minusone
+ \popmacro#6}%
+ \unexpanded\def#7##1%
+ {\edef#6{##1}%
+ #2%
+ \the\c_mult_set\relax}}
+
+\unexpanded\def\installdefinitionset#1#2%
+ {\normalexpanded
+ {\mult_interfaces_install_definition_set
+ {\noexpand#1}% \??aa
+ \expandafter\noexpand\csname set_#2_toks\endcsname
+ \expandafter\noexpand\csname #2_nesting_depth\endcsname
+ \expandafter\noexpand\csname push#2\endcsname
+ \expandafter\noexpand\csname pop#2\endcsname
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname use#2\endcsname}}
+
+\unexpanded\def\mult_interfaces_install_definition_set_member#1#2#3#4#5#6#7#8#9% no everysetups etc
+ {\let#5#2%
+ \unexpanded\def#2%
+ {\ifcase#4\relax\expandafter#5\else\expandafter#6\fi}%
+ \unexpanded\def#6%
+ {\dodoubleempty#7}%
+ \unexpanded\def#7[##1][##2]%
+ {\ifsecondargument
+ #3\c_mult_set\expandafter{\the\c_mult_set#9[##1][##2]}%
+ \else\iffirstargument
+ #3\c_mult_set\expandafter{\the\c_mult_set#8[##1]}%
+ \fi\fi}}
+
+\unexpanded\def\installdefinitionsetmember#1#2#3#4%
+ {\normalexpanded
+ {\mult_interfaces_install_definition_set_member
+ {\noexpand#3}% \??aa
+ \expandafter\noexpand\csname setup#4\endcsname
+ \expandafter\noexpand\csname set_#2_toks\endcsname
+ \expandafter\noexpand\csname #2_nesting_depth\endcsname
+ \expandafter\noexpand\csname normal_setup_#4\endcsname
+ \expandafter\noexpand\csname delayed_setup_#4\endcsname
+ \expandafter\noexpand\csname do_delayed_setup_#4\endcsname
+ \expandafter\noexpand\csname setup#4_\s!single\endcsname
+ \expandafter\noexpand\csname setup#4_\s!double\endcsname}}
+
+%D Another experiment:
+
+\unexpanded\def\mult_interfaces_install_parent_injector#1#2#3#4%
+ {\unexpanded\def#4##1%
+ {\ifx#3\empty
+ \expandafter\def\csname#1#2:\s!parent\endcsname{#1##1}%
+ \fi}}
+
+\unexpanded\def\installparentinjector#1#2%
+ {\normalexpanded{\mult_interfaces_install_parent_injector
+ {\noexpand#1}%
+ \expandafter\noexpand\csname current#2\endcsname
+ \expandafter\noexpand\csname current#2parent\endcsname
+ \expandafter\noexpand\csname inject#2parent\endcsname}}
+
\protect
+
+%\unprotect
+% \installcorenamespace {test} \installcommandhandler \??test {test} \??test
+% \unexpanded\def\TestMeA[#1]%
+% {\edef\currenttest{#1}
+% \edef\p_before{\testparameter\c!before}%
+% \ifx\p_before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeB[#1]%
+% {\edef\currenttest{#1}
+% \doifelsenothing{\testparameter\c!before}\relax\relax}
+% \unexpanded\def\TestMeC[#1]%
+% {\edef\currenttest{#1}
+% \expandafter\expandafter\expandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \unexpanded\def\TestMeD[#1]%
+% {\edef\currenttest{#1}
+% \doubleexpandafter\ifx\testparameter\c!before\empty \relax \else \relax \fi}
+% \protect
+%
+% \starttext
+% \definetest[foo] \definetest[bar][foo] \setuptest[bar][before=indeed]
+% \resettimer \dorecurse{100000}{\TestMeA[bar]} A:\elapsedtime \par % 0.502
+% \resettimer \dorecurse{100000}{\TestMeB[bar]} B:\elapsedtime \par % 0.530
+% \resettimer \dorecurse{100000}{\TestMeC[bar]} C:\elapsedtime \par % 0.487
+% \resettimer \dorecurse{100000}{\TestMeD[bar]} D:\elapsedtime \par % 0.493
+% \stoptext
diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua
index 2a2dfcd4b..44a9f739f 100644
--- a/tex/context/base/mult-chk.lua
+++ b/tex/context/base/mult-chk.lua
@@ -16,7 +16,8 @@ local allocate = utilities.storage.allocate
local report_interface = logs.reporter("interface","checking")
-interfaces = interfaces or { }
+local interfaces = interfaces
+local implement = interfaces.implement
interfaces.syntax = allocate {
test = { keys = table.tohash { "a","b","c","d","e","f","g" } }
@@ -48,6 +49,18 @@ function interfaces.addvalidkeys(category,list)
end
end
+implement {
+ name = "setvalidinterfacekeys",
+ actions = interfaces.setvalidkeys,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "addvalidinterfacekeys",
+ actions = interfaces.addvalidkeys,
+ arguments = { "string", "string" }
+}
+
-- weird code, looks incomplete ... probably an experiment
local prefix, category, keys
@@ -73,4 +86,8 @@ function interfaces.getcheckedparameters(k,p,s)
end
end
--- _igcp_ = interfaces.getcheckedparameters
+implement {
+ name = "getcheckedinterfaceparameters",
+ actions = interfaces.getcheckedparameters,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/mult-chk.mkiv b/tex/context/base/mult-chk.mkiv
index 1d02f166d..9208a73e1 100644
--- a/tex/context/base/mult-chk.mkiv
+++ b/tex/context/base/mult-chk.mkiv
@@ -38,8 +38,8 @@
\unexpanded\def\setvalidparameterkeys{\dodoubleargument\mult_checkers_set_valid_parameter_keys}
\unexpanded\def\addvalidparameterkeys{\dodoubleargument\mult_checkers_add_valid_parameter_keys}
-\def\mult_checkers_set_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.setvalidkeys("#1",\!!bs#2\!!es)}}
-\def\mult_checkers_add_valid_parameter_keys[#1][#2]{\ctxlua{interfaces.addvalidkeys("#1",\!!bs#2\!!es)}}
+\def\mult_checkers_set_valid_parameter_keys[#1][#2]{\clf_setvalidinterfacekeys{#1}{#2}}
+\def\mult_checkers_add_valid_parameter_keys[#1][#2]{\clf_addvalidinterfacekeys{#1}{#2}}
\def\mult_checkers_get_checked_parameters_yes[#1]#2[#3]#4[#5%
{\if\noexpand#5]%
@@ -50,8 +50,7 @@
\fi{#1}{#3}#5}
\def\mult_checkers_get_checked_parameters_yes_indeed#1#2#3]%
- %{\ctxlua{_igcp_("#1","#2",\!!bs\detokenize{#3}\!!es)}}
- {\ctxlua{interfaces.getcheckedparameters("#1","#2",\!!bs\detokenize{#3}\!!es)}}
+ {\clf_getcheckedinterfaceparameters{#1}{#2}{\detokenize{#3}}}
\def\mult_checkers_get_checked_parameters_nop[#1]#2[#3]#4[#5%
{\if\noexpand#5]%
diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii
index 5f2714ce6..90aae390e 100644
--- a/tex/context/base/mult-de.mkii
+++ b/tex/context/base/mult-de.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{fusszeile}
\setinterfacevariable{footnote}{fussnote}
\setinterfacevariable{force}{zwinge}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{gespiegelt}
\setinterfacevariable{monday}{montag}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monofett}
\setinterfacevariable{month}{monat}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positiv}
\setinterfacevariable{postponing}{verschieben}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{einstellung}
\setinterfacevariable{preview}{vorschau}
\setinterfacevariable{previous}{vorig}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{kleinerabstand}
-\setinterfacevariable{setups}{impostazioni}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kurz}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{verknuepfung}
\setinterfaceconstant{couplingway}{verkopplungsart}
\setinterfaceconstant{criterium}{kriterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{aktuell}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{strich}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{geraderand}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{datei}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{ersteseite}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{einziehen}
\setinterfaceconstant{indentnext}{ziehefolgendeein}
\setinterfaceconstant{indicator}{indikator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{innen}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nrechts}
\setinterfaceconstant{ntop}{noben}
\setinterfaceconstant{number}{nummer}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{nummernfarbe}
\setinterfaceconstant{numbercommand}{nummerbefehl}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definieretabellenvorlage}
\setinterfacecommand{definetabulate}{definieretabulator}
\setinterfacecommand{definetext}{definieretext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definetextposition}
\setinterfacecommand{definetextvariable}{definetextvariable}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{papierhoehe}
\setinterfacecommand{paperwidth}{papierbreite}
\setinterfacecommand{periods}{punkt}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{platzierebookmarks}
\setinterfacecommand{placecombinedlist}{platzierezusammengestellteliste}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{platzierelegende}
\setinterfacecommand{placelist}{platziereliste}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{platzierelokalefussnoten}
\setinterfacecommand{placelogos}{platzierelogo}
\setinterfacecommand{placeongrid}{amgitterausrichten}
\setinterfacecommand{placeontopofeachother}{platziereuntereinander}
\setinterfacecommand{placepagenumber}{placepagenumber}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{platziereregister}
\setinterfacecommand{placerule}{placerule}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{stelleregisterein}
\setinterfacecommand{setuprotate}{stelledrehenein}
\setinterfacecommand{setuprule}{setuprule}
-\setinterfacecommand{setups}{einstellungen}
\setinterfacecommand{setupscreens}{stellerasterein}
\setinterfacecommand{setupsection}{stelleabschnittein}
\setinterfacecommand{setupsectionblock}{stelleabschnittsblockein}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{stelletabellenein}
\setinterfacecommand{setuptabulate}{stelletabulatorein}
\setinterfacecommand{setuptext}{stelletextein}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{setuptextposition}
\setinterfacecommand{setuptextrules}{stelletextumrissein}
\setinterfacecommand{setuptexttexts}{stelletexttexteein}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdokument}
\setinterfacecommand{startenvironment}{startumgebung}
\setinterfacecommand{startfigure}{startabbildung}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startzeile}
\setinterfacecommand{startlinecorrection}{startzeilenkorrektur}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabelle}
\setinterfacecommand{starttables}{starttabellen}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{starttextlinie}
\setinterfacecommand{startunpacked}{startgrosserdurchschuss}
\setinterfacecommand{startversion}{startversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopkomponente}
\setinterfacecommand{stopdocument}{stopdokument}
\setinterfacecommand{stopenvironment}{stopumgebung}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopglobal}
\setinterfacecommand{stopline}{stopzeile}
\setinterfacecommand{stoplinecorrection}{stopzeilenkorrektur}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabelle}
\setinterfacecommand{stoptables}{stoptabellen}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoptextlinie}
\setinterfacecommand{stopunpacked}{stopgrosserdurchschuss}
\setinterfacecommand{stopversion}{stopversion}
diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua
index afd466531..c0831de2d 100644
--- a/tex/context/base/mult-def.lua
+++ b/tex/context/base/mult-def.lua
@@ -1275,6 +1275,10 @@ return {
["pe"]="تعریفمترادفها",
["ro"]="definestesinonim",
},
+ ["placelistofsynonyms"]={
+ ["en"]="placelistofsynonyms",
+ ["nl"]="plaatslijstmetsynoniemen",
+ },
["definetabletemplate"]={
["cs"]="definujsablonutabulky",
["de"]="definieretabellenvorlage",
@@ -2905,7 +2909,7 @@ return {
["pe"]="نقطهها",
["ro"]="puncte",
},
- ["plaatsruwelijst"]={
+ ["placerawlist"]={
["cs"]="placerawlist",
["de"]="placerawlist",
["en"]="placerawlist",
@@ -3055,7 +3059,7 @@ return {
["pe"]="درجشمارهصفحه",
["ro"]="punenumarpagina",
},
- ["placereferencelist"]={
+ ["placereferencelist"]={ -- not in mkiv
["cs"]="placereferencelist",
["de"]="placereferencelist",
["en"]="placereferencelist",
@@ -4509,16 +4513,6 @@ return {
["pe"]="بارگذاریخط",
["ro"]="seteazarigla",
},
- ["setups"]={
- ["cs"]="nastaveni",
- ["de"]="einstellungen",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="impostazioni",
- ["nl"]="instellingen",
- ["pe"]="بارگذاریها",
- ["ro"]="setari",
- },
["setupscreens"]={
["cs"]="nastavrastr",
["de"]="stellerasterein",
@@ -5039,6 +5033,30 @@ return {
["pe"]="شروعتنظیم",
["ro"]="startaliniere",
},
+ ["starttextbackground"]={
+ ["en"]="starttextbackground",
+ ["nl"]="starttekstachtergrond",
+ },
+ ["stoptextbackground"]={
+ ["en"]="stoptextbackground",
+ ["nl"]="stoptekstachtergrond",
+ },
+ ["setuptextbackground"]={
+ ["en"]="setuptextbackground",
+ ["nl"]="steltekstachtergrondin",
+ },
+ ["definetextbackground"]={
+ ["en"]="definetextbackground",
+ ["nl"]="definieertekstachtergrond",
+ },
+ ["startframed"]={
+ ["en"]="startframed",
+ ["nl"]="startomlijnd",
+ },
+ ["stopframed"]={
+ ["en"]="stopframed",
+ ["nl"]="stopomlijnd",
+ },
["startbackground"]={
["cs"]="startpozadi",
["de"]="starthintergrund",
@@ -6454,6 +6472,10 @@ return {
},
},
["constants"]={
+ ["setups"]={
+ ["comment"]="no translations",
+ ["en"]="setups",
+ },
-- select/simplefonts
["regularfont"] ={ ["en"]="regularfont" },
["boldfont"] ={ ["en"]="boldfont" },
@@ -6508,8 +6530,8 @@ return {
["export"] = {
["en"]="export",
},
- ["css"] = {
- ["en"]="css",
+ ["cssfile"] = {
+ ["en"]="cssfile",
},
["xhtml"] = {
["en"]="xhtml",
@@ -6522,7 +6544,7 @@ return {
["en"]="labeloffset",
},
["exitoffset"]={
- ["en"]="labeloffset",
+ ["en"]="exitoffset",
},
["commentoffset"]={
["en"]="commentoffset",
@@ -6558,6 +6580,10 @@ return {
["en"]="headalign",
["nl"]="kopuitlijnen",
},
+ ["numberalign"]={
+ ["en"]="numberalign",
+ ["nl"]="nummeruitlijnen",
+ },
["alignsymbol"]={
["en"]="alignsymbol",
},
@@ -6613,6 +6639,9 @@ return {
["firstnamesep"]={
["en"]="firstnamesep",
},
+ ["surnamefirstnamesep"]={
+ ["en"]="surnamefirstnamesep",
+ },
["vonsep"]={
["en"]="vonsep",
},
@@ -6622,6 +6651,12 @@ return {
["surnamesep"]={
["en"]="surnamesep",
},
+ ["initialsep"]={
+ ["en"]="initialsep",
+ },
+ ["surnameinitialsep"]={
+ ["en"]="surnameinitialsep",
+ },
["lastnamesep"]={
["en"]="lastnamesep",
},
@@ -6637,6 +6672,9 @@ return {
["lastpubsep"]={
["en"]="lastpubsep",
},
+ ["finalpubsep"]={
+ ["en"]="finalpubsep",
+ },
["refcommand"]={
["en"]="refcommand",
},
@@ -8935,7 +8973,7 @@ return {
["de"]="mindepth",
["en"]="mindepth",
["fr"]="profondeurmin",
- ["it"]="mindeoth",
+ ["it"]="mindepth",
["nl"]="mindiepte",
["pe"]="کمترینعمق",
["ro"]="mindepth",
@@ -9702,7 +9740,7 @@ return {
["en"]="reference",
["fr"]="reference",
["it"]="riferimento",
- ["nl"]="verwijzing",
+ ["nl"]="referentie",
["pe"]="مرجع",
["ro"]="referinta",
},
@@ -10124,16 +10162,6 @@ return {
["pe"]="قراربده",
["ro"]="set",
},
- ["setups"]={
- ["cs"]="setups",
- ["de"]="setups",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="setups",
- ["nl"]="setups",
- ["pe"]="بارگذاریها",
- ["ro"]="setups",
- },
["shrink"]={
["en"]="shrink",
["nl"]="krimp",
@@ -10911,7 +10939,7 @@ return {
["en"]="unknownreference",
["fr"]="referenceinconnue",
["it"]="riferimentoingoto",
- ["nl"]="onbekendeverwijzing",
+ ["nl"]="onbekendereferentie",
["pe"]="مرجعناشناس",
["ro"]="referintanecunoscuta",
},
@@ -11393,6 +11421,18 @@ return {
},
},
["variables"]={
+ ["setups"]={
+ ["comment"]="no translations",
+ ["en"]="setups",
+ },
+ ["followingpage"]={
+ ["en"]="followingpage",
+ ["nl"]="opvolgendepagina",
+ },
+ ["precedingpage"]={
+ ["en"]="followingpage",
+ ["nl"]="voorafgaandepagina",
+ },
["math"]={
["en"]="math",
},
@@ -11509,21 +11549,27 @@ return {
},
["maxheight"]={
["en"]="maxheight",
+ ["nl"]="maxhoogte",
},
["maxdepth"]={
["en"]="maxdepth",
+ ["nl"]="maxdiepte",
},
["maxwidth"]={
["en"]="maxwidth",
+ ["nl"]="maxbreedte",
},
["minheight"]={
["en"]="minheight",
+ ["nl"]="minhoogte",
},
["mindepth"]={
["en"]="mindepth",
+ ["nl"]="mindiepte",
},
["minwidth"]={
["en"]="minwidth",
+ ["nl"]="minbreedte",
},
["short"]={
["nl"]="kort",
@@ -14378,6 +14424,16 @@ return {
["pe"]="مونو",
["ro"]="mono",
},
+ ["monobold"]={
+ ["cs"]="monotucne",
+ ["de"]="monofett",
+ ["en"]="monobold",
+ ["fr"]="monogras",
+ ["it"]="monograssetto",
+ ["nl"]="monovet",
+ ["pe"]="monobold",
+ ["ro"]="monoaldin",
+ },
["month"]={
["cs"]="mesic",
["de"]="monat",
@@ -15475,16 +15531,6 @@ return {
["pe"]="تنگهم",
["ro"]="serried",
},
- ["setups"]={
- ["cs"]="einstellungen",
- ["de"]="impostazioni",
- ["en"]="setups",
- ["fr"]="reglages",
- ["it"]="nastaveni",
- ["nl"]="instellingen",
- ["pe"]="بارگذاریها",
- ["ro"]="setari",
- },
["sheet"]={
["cs"]="sheet",
["de"]="sheet",
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 192a380ee..d547a7b81 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -30,76 +30,130 @@
% \input mult-\userinterfacetag \relax
% \input mult-m\userresponsestag \relax
-\ctxlua{interfaces.setuserinterface("\userinterfacetag","\userresponsestag")}
-
-% start todo:
-
-\def\c!fences {fences}
-\def\c!keeptogether {keeptogether}
-
-\def\c!dataset {dataset}
-\def\c!sectionblock {sectionblock}
-\def\c!language {language}
-\def\c!compressseparator{compressseparator}
-\def\c!renderingsetup {renderingsetup}
-\def\c!filler {filler}
-\def\c!resources {resources}
-\def\c!first {first}
-\def\c!last {last}
-\def\c!quotechar {quotechar}
-\def\c!commentchar {commentchar}
-\def\c!symbolcommand {symbolcommand}
-\def\c!xmlsetup {xmlsetup}
-\def\c!comma {comma}
-\def\c!period {period}
-\def\c!monthconversion {monthconversion}
-\def\c!comment {comment}
-\def\c!textalign {textalign}
-\def\c!up {up}
-\def\c!down {down}
-\def\c!instance {instance}
-\def\c!database {database}
-\def\c!group {group}
-\def\c!groupsuffix {groupsuffix}
-
-\def\v!compressseparator{compressseparator}
-\def\v!notation {notation}
-\def\v!endnote {endnote}
-\def\v!interactive {interactive}
-\def\v!autopunctuation {autopunctuation}
-\def\v!integral {integral}
-\def\v!shiftup {shiftup}
-\def\v!shiftdown {shiftdown}
-\def\v!construction {construction}
-\def\v!unframed {unframed}
-\def\v!chemical {chemical}
-\def\v!chemicals {chemicals}
-\def\v!words {words}
-\def\v!combination {combination}
-\def\v!norepeat {norepeat}
-\def\v!mixed {mixed}
-
-\def\s!lcgreek {lcgreek}
-\def\s!ucgreek {ucgreek}
-\def\s!sygreek {sygreek}
-\def\s!italics {italics}
-\def\s!integral {integral}
-\def\s!insert {insert} % maybe insertclass
-\def\s!marker {marker}
-
-\def\s!mixedcolumn {mixedcolumn}
-
-\def\s!double {double}
-\def\s!decimal {decimal}
-
-\def\s!current {current}
-
-\def\s!rel {rel}
-\def\s!ord {ord}
-
-\def\c!HL {HL}
-\def\c!VL {VL}
-\def\c!NL {NL}
+\clf_setuserinterface{\userinterfacetag}{\userresponsestag}
+
+% start todo in mult-def.lua:
+
+\def\c!openup {openup}
+
+\def\v!serifnormal {serifnormal}
+\def\v!serifbold {serifbold}
+\def\v!sansnormal {sansnormal}
+%def\v!sansbold {sansbold}
+\def\v!mononormal {mononormal}
+\def\v!monobold {monobold}
+
+\def\c!functionstyle {functionstyle}
+\def\c!functioncolor {functioncolor}
+
+\def\v!extremestretch {extremestretch}
+
+\def\v!alphabetic {alphabetic}
+\def\v!Alphabetic {Alphabetic}
+
+\def\c!svgstyle {svgstyle}
+
+\def\c!translate {translate}
+
+\def\c!nextleft {nextleft}
+\def\c!nextright {nextright}
+\def\c!nextleftquotation {nextleftquotation}
+\def\c!nextrightquotation{nextrightquotation}
+
+\def\c!profile {profile}
+
+\def\c!fences {fences}
+\def\c!words {words}
+\def\c!characters {characters}
+\def\c!hyphens {hyphens}
+\def\c!joiners {joiners}
+\def\c!leftwords {leftwords}
+\def\c!rightwords {rightwords}
+\def\c!keeptogether {keeptogether}
+\def\c!viewerprefix {viewerprefix}
+
+\def\v!display {display}
+\def\v!inline {inline}
+
+\def\v!camel {camel}
+
+\def\c!dataset {dataset}
+\def\c!sectionblock {sectionblock}
+\def\c!language {language}
+\def\c!compressseparator {compressseparator}
+\def\c!renderingsetup {renderingsetup}
+\def\c!filler {filler}
+\def\c!resources {resources}
+\def\c!first {first}
+\def\c!last {last}
+\def\c!quotechar {quotechar}
+\def\c!commentchar {commentchar}
+\def\c!symbolcommand {symbolcommand}
+\def\c!xmlsetup {xmlsetup}
+\def\c!comma {comma}
+\def\c!period {period}
+\def\c!monthconversion {monthconversion}
+\def\c!authorconversion {authorconversion}
+\def\c!comment {comment}
+\def\c!textalign {textalign}
+\def\c!up {up}
+\def\c!down {down}
+\def\c!instance {instance}
+\def\c!database {database}
+\def\c!group {group}
+\def\c!groupsuffix {groupsuffix}
+\def\c!properties {properties}
+\def\c!journalconversion {journalconversion}
+\def\c!register {register}
+\def\c!note {note}
+\def\c!field {field}
+\def\c!ignore {ignore}
+\def\c!specification {specification}
+
+\def\c!pageleft {pageleft}
+\def\c!pageright {pageright}
+\def\c!pagesep {pagesep}
+\def\c!lastpagesep {lastpagesep}
+\def\c!finalpagesep {finalpagesep}
+\def\c!pageconnector {pageconnector}
+
+\def\c!referencemethod {referencemethod} % forward both
+
+\def\v!dataset {dataset}
+\def\v!compressseparator {compressseparator}
+\def\v!notation {notation}
+\def\v!endnote {endnote}
+\def\v!interactive {interactive}
+\def\v!autopunctuation {autopunctuation}
+\def\v!integral {integral}
+\def\v!shiftup {shiftup}
+\def\v!shiftdown {shiftdown}
+\def\v!construction {construction}
+\def\v!unframed {unframed}
+\def\v!chemical {chemical}
+\def\v!chemicals {chemicals}
+\def\v!words {words}
+\def\v!combination {combination}
+\def\v!norepeat {norepeat}
+\def\v!mixed {mixed}
+\def\v!centerlast {centerlast}
+\def\v!long {long}
+\def\v!box {box}
+
+\def\v!noline {noline}
+\def\v!noheight {noheight}
+\def\v!nodepth {nodepth}
+
+\def\v!bookmark {bookmark}
+
+\def\v!vfenced {vfenced}
+\def\v!bothtext {bothtext}
+
+\def\s!traditional {traditional}
+
+\def\c!HL {HL}
+\def\c!VL {VL}
+\def\c!NL {NL}
\ifdefined\v!kerncharacters\else \def\v!kerncharacters{kerncharacters} \fi % no time now for translations should be a e! actually
\ifdefined\v!letterspacing \else \def\v!letterspacing {letterspacing} \fi % no time now for translations should be a e! actually
@@ -112,6 +166,11 @@
\def\c!etallimit {etallimit}
\def\c!etaldisplay{etaldisplay}
\def\c!etaltext {etaltext}
+\def\c!etaloption {etaloption}
+
+\ifdefined\v!simplelist\else \def\v!simplelist{simplelist} \fi
+\ifdefined\v!sorting \else \def\v!sorting {sorting} \fi
+\ifdefined\v!synonym \else \def\v!synonym {synonym} \fi
% stop todo
diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii
index 97732dab7..b08070ba0 100644
--- a/tex/context/base/mult-en.mkii
+++ b/tex/context/base/mult-en.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{footer}
\setinterfacevariable{footnote}{footnote}
\setinterfacevariable{force}{force}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{mirrored}
\setinterfacevariable{monday}{monday}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monobold}
\setinterfacevariable{month}{month}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positive}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preference}
\setinterfacevariable{preview}{preview}
\setinterfacevariable{previous}{previous}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{coupling}
\setinterfaceconstant{couplingway}{couplingway}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{current}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{evenmargin}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{firstpage}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{indenting}
\setinterfaceconstant{indentnext}{indentnext}
\setinterfaceconstant{indicator}{indicator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{inner}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nright}
\setinterfaceconstant{ntop}{ntop}
\setinterfaceconstant{number}{number}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{numbercolor}
\setinterfaceconstant{numbercommand}{numbercommand}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definetabletemplate}
\setinterfacecommand{definetabulate}{definetabulate}
\setinterfacecommand{definetext}{definetext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definetextposition}
\setinterfacecommand{definetextvariable}{definetextvariable}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{paperheight}
\setinterfacecommand{paperwidth}{paperwidth}
\setinterfacecommand{periods}{periods}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{placebookmarks}
\setinterfacecommand{placecombinedlist}{placecombinedlist}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{placelegend}
\setinterfacecommand{placelist}{placelist}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{placelocalfootnotes}
\setinterfacecommand{placelogos}{placelogos}
\setinterfacecommand{placeongrid}{placeongrid}
\setinterfacecommand{placeontopofeachother}{placeontopofeachother}
\setinterfacecommand{placepagenumber}{placepagenumber}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{placeregister}
\setinterfacecommand{placerule}{placerule}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{setupregister}
\setinterfacecommand{setuprotate}{setuprotate}
\setinterfacecommand{setuprule}{setuprule}
-\setinterfacecommand{setups}{setups}
\setinterfacecommand{setupscreens}{setupscreens}
\setinterfacecommand{setupsection}{setupsection}
\setinterfacecommand{setupsectionblock}{setupsectionblock}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{setuptables}
\setinterfacecommand{setuptabulate}{setuptabulate}
\setinterfacecommand{setuptext}{setuptext}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{setuptextposition}
\setinterfacecommand{setuptextrules}{setuptextrules}
\setinterfacecommand{setuptexttexts}{setuptexttexts}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startenvironment}
\setinterfacecommand{startfigure}{startfigure}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startline}
\setinterfacecommand{startlinecorrection}{startlinecorrection}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttable}
\setinterfacecommand{starttables}{starttables}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{starttextrule}
\setinterfacecommand{startunpacked}{startunpacked}
\setinterfacecommand{startversion}{startversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopcomponent}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopenvironment}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopglobal}
\setinterfacecommand{stopline}{stopline}
\setinterfacecommand{stoplinecorrection}{stoplinecorrection}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptable}
\setinterfacecommand{stoptables}{stoptables}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoptextrule}
\setinterfacecommand{stopunpacked}{stopunpacked}
\setinterfacecommand{stopversion}{stopversion}
diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii
index 520f8e1a6..d76da18d9 100644
--- a/tex/context/base/mult-fr.mkii
+++ b/tex/context/base/mult-fr.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{pdp}
\setinterfacevariable{footnote}{notepdp}
\setinterfacevariable{force}{force}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{reflete}
\setinterfacevariable{monday}{lundi}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monogras}
\setinterfacevariable{month}{mois}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positif}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preference}
\setinterfacevariable{preview}{previsualisation}
\setinterfacevariable{previous}{precedent}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{septembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
-\setinterfacevariable{setups}{reglages}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{couplage}
\setinterfaceconstant{couplingway}{modecouplage}
\setinterfaceconstant{criterium}{critere}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{courant}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{pointille}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{margepaire}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansion}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{fichier}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{premierepage}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{composeenalinea}
\setinterfaceconstant{indentnext}{indentesuivant}
\setinterfaceconstant{indicator}{indicateur}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{interieur}
\setinterfaceconstant{innermargin}{margeinterieure}
\setinterfaceconstant{inputfile}{fichierentree}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndroite}
\setinterfaceconstant{ntop}{nsup}
\setinterfaceconstant{number}{numero}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{couleurnumero}
\setinterfaceconstant{numbercommand}{commandenumero}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1001,7 +1007,7 @@
\setinterfaceconstant{separatorcolor}{separatorcolor}
\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{set}
-\setinterfaceconstant{setups}{reglages}
+\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{cote}
\setinterfaceconstant{sidealign}{sidealign}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definittrametableau}
\setinterfacecommand{definetabulate}{definittabulation}
\setinterfacecommand{definetext}{definittexte}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definitpositiontexte}
\setinterfacecommand{definetextvariable}{definitvariabletexte}
\setinterfacecommand{definetype}{definittype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{hauteurpapier}
\setinterfacecommand{paperwidth}{largeurpapier}
\setinterfacecommand{periods}{periodes}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{placemarquespages}
\setinterfacecommand{placecombinedlist}{placelisteinmbriquee}
\setinterfacecommand{placefloat}{placeflottant}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placetextetete}
\setinterfacecommand{placelegend}{placelegende}
\setinterfacecommand{placelist}{placeliste}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{placenotespdplocales}
\setinterfacecommand{placelogos}{placelogos}
\setinterfacecommand{placeongrid}{placesurgrille}
\setinterfacecommand{placeontopofeachother}{placelesunsaudessusdesautres}
\setinterfacecommand{placepagenumber}{placenumeropage}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placelistereference}
\setinterfacecommand{placeregister}{placeregistre}
\setinterfacecommand{placerule}{placeregle}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{regleregistre}
\setinterfacecommand{setuprotate}{regleoriente}
\setinterfacecommand{setuprule}{regleregle}
-\setinterfacecommand{setups}{reglages}
\setinterfacecommand{setupscreens}{regleecrans}
\setinterfacecommand{setupsection}{reglesection}
\setinterfacecommand{setupsectionblock}{regleblocsection}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{regletableaux}
\setinterfacecommand{setuptabulate}{regletabulation}
\setinterfacecommand{setuptext}{regletexte}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{reglepositiontexte}
\setinterfacecommand{setuptextrules}{reglelignesreglestexte}
\setinterfacecommand{setuptexttexts}{regletextestexte}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{demarredocument}
\setinterfacecommand{startenvironment}{demarreenvironement}
\setinterfacecommand{startfigure}{demarrefigure}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{demarreglobal}
\setinterfacecommand{startline}{demarreligne}
\setinterfacecommand{startlinecorrection}{demarrecorrectionligne}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{demarretableau}
\setinterfacecommand{starttables}{demarretableaux}
\setinterfacecommand{starttext}{demarretexte}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{demarreligneregleetexte}
\setinterfacecommand{startunpacked}{demarredegroupe}
\setinterfacecommand{startversion}{demarreversion}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stoppecomposant}
\setinterfacecommand{stopdocument}{stoppedocument}
\setinterfacecommand{stopenvironment}{stoppeenvironement}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stoppeglobal}
\setinterfacecommand{stopline}{stoppeligne}
\setinterfacecommand{stoplinecorrection}{stoppecorrectionligne}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoppetableau}
\setinterfacecommand{stoptables}{stoppetableaux}
\setinterfacecommand{stoptext}{stoppetexte}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stoppeligneregleetexte}
\setinterfacecommand{stopunpacked}{stoppedegroupe}
\setinterfacecommand{stopversion}{stoppeversion}
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 2101b95e9..27aa32055 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -4,26 +4,30 @@ return {
"nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel",
"shadefactor",
"textextoffset",
- "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent",
- "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent",
- "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
--- "originlength", "tickstep ", "ticklength",
--- "autoarrows", "ahfactor",
--- "angleoffset", anglelength", anglemethod",
+ "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent",
+ "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent",
+ "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent",
+ "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
+ -- "originlength", "tickstep ", "ticklength",
+ -- "autoarrows", "ahfactor",
+ -- "angleoffset", anglelength", anglemethod",
"metapostversion",
"maxdimensions",
},
commands = {
+ "transparency",
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
+ "zmod",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
"llcircle", "lrcircle", "urcircle", "ulcircle",
"tcircle", "bcircle", "lcircle", "rcircle",
"lltriangle", "lrtriangle", "urtriangle", "ultriangle",
+ "uptriangle", "downtriangle", "lefttriangle", "righttriangle", "triangle",
"smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened",
"punked", "curved", "unspiked", "simplified", "blownup", "stretched",
"enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
@@ -36,24 +40,32 @@ return {
"xsized", "ysized", "xysized", "sized", "xyscaled",
"intersection_point", "intersection_found", "penpoint",
"bbwidth", "bbheight",
- "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto",
- "withcircularshade", "withlinearshade",
+ "withshade", "withcircularshade", "withlinearshade", -- old but kept
+ "defineshade", "shaded",
+ -- "withshading", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor",
+ "shadedinto", "withshadecolors", "withshadedomain", "withshademethod", "withshadefactor", "withshadevector", "withshadecenter",
"cmyk", "spotcolor", "multitonecolor", "namedcolor",
"drawfill", "undrawfill",
"inverted", "uncolored", "softened", "grayed", "greyed",
"onlayer",
"along",
- "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage",
+ "graphictext", "loadfigure", "externalfigure", "figure", "register",
+ "withmask", "bitmapimage",
"colordecimals", "ddecimal", "dddecimal", "ddddecimal",
- "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign",
+ "textext", "thetextext", "rawtextext", "textextoffset",
+ "verbatim",
+ "thelabel", "label",
+ "autoalign",
"transparent", "withtransparency",
"property", "properties", "withproperties",
"asgroup",
- "infont", -- redefined usign textext
- -- "property", "withproperties", "properties", -- not yet
- "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade",
- "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade",
- "space", "CRLF",
+ "infont", -- redefined using textext
+ -- "set_linear_vector", "set_circular_vector",
+ -- "linear_shade", "circular_shade",
+ -- "define_linear_shade", "define_circular_shade",
+ -- "define_circular_linear_shade", "define_circular_linear_shade",
+ -- "define_sampled_linear_shade", "define_sampled_circular_shade",
+ "space", "crlf", "dquote", "percent", "SPACE", "CRLF", "DQUOTE", "PERCENT",
"grayscale", "greyscale", "withgray", "withgrey",
"colorpart",
"readfile",
@@ -63,10 +75,12 @@ return {
"break",
"xstretched", "ystretched", "snapped",
--
- "pathconnectors", "function", "constructedpath", "constructedpairs",
- "punkedfunction", "curvedfunction", "tightfunction",
- "punkedpath", "curvedpath", "tightpath",
- "punkedpairs", "curvedpairs", "tightpairs",
+ "pathconnectors", "function",
+ "constructedfunction", "constructedpath", "constructedpairs",
+ -- "punkedfunction", "punkedpath", "punkedpairs",
+ "straightfunction", "straightpath", "straightpairs",
+ "curvedfunction", "curvedpath", "curvedpairs",
+ -- "tightfunction", "tightpath", "tightpairs",
--
"evenly", "oddly",
--
@@ -75,26 +89,31 @@ return {
"pushcurrentpicture", "popcurrentpicture",
--
"arrowpath",
--- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
--- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
--- "showgrid",
--- "phantom",
--- "xshifted", "yshifted",
--- "drawarrowpath", "midarrowhead", "arrowheadonpath",
--- "drawxticks", "drawyticks", "drawticks",
--- "pointarrow",
--- "thefreelabel", "freelabel", "freedotlabel",
--- "anglebetween", "colorcircle",
--- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
--- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
+ -- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
+ -- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
+ -- "showgrid",
+ -- "phantom",
+ -- "xshifted", "yshifted",
+ -- "drawarrowpath", "midarrowhead", "arrowheadonpath",
+ -- "drawxticks", "drawyticks", "drawticks",
+ -- "pointarrow",
+ -- "thefreelabel", "freelabel", "freedotlabel",
+ -- "anglebetween", "colorcircle",
+ -- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
+ -- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
"tensecircle", "roundedsquare",
- "colortype", "whitecolor", "blackcolor",
+ "colortype", "whitecolor", "blackcolor", "basiccolors",
--
--- "swappointlabels",
+ -- "swappointlabels",
"normalfill", "normaldraw", "visualizepaths", "naturalizepaths",
- "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox",
- "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
- "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
+ "drawboundary", "drawwholepath",
+ "visualizeddraw", "visualizedfill",
+ "draworigin", "drawboundingbox",
+ "drawpath",
+ "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines",
+ "drawpointlabels",
+ "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions",
+ "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
--
"undashed",
--
@@ -102,5 +121,8 @@ return {
--
"passvariable", "passarrayvariable", "tostring", "format", "formatted",
"startpassingvariable", "stoppassingvariable",
+ --
+ "eofill", "eoclip",
+ "area",
},
}
diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua
index e3ff904a6..bd3b1d38b 100644
--- a/tex/context/base/mult-ini.lua
+++ b/tex/context/base/mult-ini.lua
@@ -12,6 +12,7 @@ local serialize = table.serialize
local context = context
local commands = commands
+local implement = interfaces.implement
local allocate = utilities.storage.allocate
local mark = utilities.storage.mark
@@ -240,9 +241,17 @@ function interfaces.setuserinterface(interface,response)
end
report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations",
nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations)
+ else
+ report_interface("the language(s) can only be set when making the format")
end
end
+interfaces.implement {
+ name = "setuserinterface",
+ actions = interfaces.setuserinterface,
+ arguments = { "string", "string" }
+}
+
interfaces.cachedsetups = interfaces.cachedsetups or { }
interfaces.hashedsetups = interfaces.hashedsetups or { }
@@ -265,9 +274,15 @@ function interfaces.cachesetup(t)
end
end
-function interfaces.is_command(str)
- return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
-end
+-- if token.lookup then
+-- interfaces.is_command = token.lookup
+-- else
+
+ function interfaces.is_command(str)
+ return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
+ end
+
+-- end
function interfaces.interfacedcommand(name)
local command = complete.commands[name]
@@ -276,41 +291,56 @@ end
-- interface
-function commands.writestatus(category,message,...)
- local r = reporters[category]
- if r then
- r(message,...)
- end
+function interfaces.writestatus(category,message)
+ reporters[category](message) -- could also be a setmetatablecall
end
-commands.registernamespace = interfaces.registernamespace
-commands.setinterfaceconstant = interfaces.setconstant
-commands.setinterfacevariable = interfaces.setvariable
-commands.setinterfaceelement = interfaces.setelement
-commands.setinterfacemessage = interfaces.setmessage
-commands.setinterfacemessages = interfaces.setmessages
-commands.showmessage = interfaces.showmessage
+implement { name = "registernamespace", actions = interfaces.registernamespace, arguments = { "integer", "string" } }
+implement { name = "setinterfaceconstant", actions = interfaces.setconstant, arguments = { "string", "string" } }
+implement { name = "setinterfacevariable", actions = interfaces.setvariable, arguments = { "string", "string" } }
+implement { name = "setinterfaceelement", actions = interfaces.setelement, arguments = { "string", "string" } }
+implement { name = "setinterfacemessage", actions = interfaces.setmessage, arguments = { "string", "string", "string" } }
+implement { name = "setinterfacemessages", actions = interfaces.setmessages, arguments = { "string", "string" } }
+implement { name = "showmessage", actions = interfaces.showmessage, arguments = { "string", "string", "string" } }
+
+implement {
+ name = "doifelsemessage",
+ actions = { interfaces.doifelsemessage, commands.doifelse },
+ arguments = { "string", "string" },
+}
-function commands.doifelsemessage(category,tag)
- commands.doifelse(interfaces.doifelsemessage(category,tag))
-end
+implement {
+ name = "getmessage",
+ actions = { interfaces.getmessage, context },
+ arguments = { "string", "string", "string" },
+}
-function commands.getmessage(category,tag,default)
- context(interfaces.getmessage(category,tag,default))
-end
+implement {
+ name = "writestatus",
+ overload = true,
+ actions = interfaces.writestatus,
+ arguments = { "string", "string" },
+}
-function commands.showassignerror(namespace,key,value,line)
- local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
+local function showassignerror(namespace,key,line)
+ local ns, instance = match(namespace,"^(%d+)[^%a]+(%a*)")
if ns then
namespace = corenamespaces[tonumber(ns)] or ns
end
- if instance then
+ -- injected in the stream for timing:
+ if instance and instance ~= "" then
context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
else
context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
end
end
+implement {
+ name = "showassignerror",
+ actions = showassignerror,
+ arguments = { "string", "string", "integer" },
+}
+
-- a simple helper
local settings_to_hash = utilities.parsers.settings_to_hash
diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv
index 09fc5daf0..1dd5a696a 100644
--- a/tex/context/base/mult-ini.mkiv
+++ b/tex/context/base/mult-ini.mkiv
@@ -365,10 +365,10 @@
{\bgroup
\ifcsname\m!prefix!#2\endcsname\else\setgvalue{\m!prefix!#2}{#2}\fi
\catcode\endoflineasciicode\activecatcode
- \doifinsetelse{#1}{\currentresponses,all}\mult_messages_start_yes\mult_messages_start_nop{#2}}
+ \doifelseinset{#1}{\currentresponses,all}\mult_messages_start_yes\mult_messages_start_nop{#2}}
\def\mult_messages_start_yes#1#2\stopmessages
- {\ctxcommand{setinterfacemessages("#1",\!!bs#2\!!es)}%
+ {\clf_setinterfacemessages{#1}{#2}%
\egroup}
\def\mult_messages_start_nop#1#2\stopmessages
@@ -378,13 +378,15 @@
\unexpanded\def\setinterfacemessage#1#2#3%
{\ifcsname\m!prefix!#1\endcsname\else\setgvalue{\m!prefix!#1}{#1}\fi
- \ctxcommand{setinterfacemessage("#1","#2",\!!bs#3\!!es)}}
+ \clf_setinterfacemessage{#1}{#2}{#3}}
-\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\ctxcommand{getmessage("#1","#2")}}}
-\unexpanded\def\getmessage #1#2{\ctxcommand{getmessage("#1","#2")}}
-\unexpanded\def\doifelsemessage #1#2{\ctxcommand{doifelsemessage("#1","#2")}}
-\unexpanded\def\showmessage #1#2#3{\ctxcommand{showmessage("#1","#2",\!!bs#3\!!es)}}
-\unexpanded\def\writestatus #1#2{\ctxcommand{writestatus("#1",\!!bs#2\!!es)}}
+\unexpanded\def\setmessagetext #1#2{\edef\currentmessagetext{\clf_getmessage{#1}{#2}}}
+\unexpanded\def\getmessage #1#2{\clf_getmessage{#1}{#2}}
+\unexpanded\def\doifelsemessage #1#2{\clf_doifelsemessage{#1}{#2}}
+\unexpanded\def\showmessage #1#2#3{\clf_showmessage{#1}{#2}{#3}}
+\unexpanded\def\writestatus #1#2{\clf_writestatus{#1}{#2}}
+
+\let\doifmessageelse\doifelsemessage
%D \macros
%D {ifshowwarnings, ifshowmessages}
@@ -730,11 +732,11 @@
% temporary mkiv hack (we can best just store the whole table in memory)
\unexpanded\def\setinterfaceconstant#1#2%
- {\ctxcommand{setinterfaceconstant("#1","#2")}%
+ {\clf_setinterfaceconstant{#1}{#2}%
\expandafter\def\csname\c!prefix!#1\endcsname{#1}}
\unexpanded\def\setinterfacevariable#1#2%
- {\ctxcommand{setinterfacevariable("#1","#2")}%
+ {\clf_setinterfacevariable{#1}{#2}%
\expandafter\def\csname\v!prefix!#1\endcsname{#2}}
%D \macros
@@ -763,7 +765,7 @@
%D part is needed, we use a \type{-}:
\unexpanded\def\setinterfaceelement#1#2%
- {\ctxcommand{setinterfaceelement("#1","#2")}%
+ {\clf_setinterfaceelement{#1}{#2}%
\ifcsname\e!prefix!#1\endcsname
\doifnotvalue{\e!prefix!#1}{#2}{\setvalue{\e!prefix!#1}{#2}}%
\else
diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii
index 2b31e8e10..6474d93c4 100644
--- a/tex/context/base/mult-it.mkii
+++ b/tex/context/base/mult-it.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{piedipagina}
\setinterfacevariable{footnote}{notapdp}
\setinterfacevariable{force}{forza}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{riflesso}
\setinterfacevariable{monday}{lunedi}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monograssetto}
\setinterfacevariable{month}{mese}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positivo}
\setinterfacevariable{postponing}{posporre}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preferenza}
\setinterfacevariable{preview}{anteprima}
\setinterfacevariable{previous}{precedente}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{settembre}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{vicino}
-\setinterfacevariable{setups}{nastaveni}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{accoppiamento}
\setinterfaceconstant{couplingway}{modoaccoppiamento}
\setinterfaceconstant{criterium}{criterio}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{corrente}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{marginepari}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{espansione}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{rientro}
\setinterfaceconstant{indentnext}{rientrasuccessivo}
\setinterfaceconstant{indicator}{indicatore}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{interno}
\setinterfaceconstant{innermargin}{margineinterno}
\setinterfaceconstant{inputfile}{inputfile}
@@ -833,7 +838,7 @@
\setinterfaceconstant{middletext}{testocentro}
\setinterfaceconstant{midsentence}{midsentence}
\setinterfaceconstant{min}{min}
-\setinterfaceconstant{mindepth}{mindeoth}
+\setinterfaceconstant{mindepth}{mindepth}
\setinterfaceconstant{minheight}{altezzamin}
\setinterfaceconstant{minwidth}{ampiezzamin}
\setinterfaceconstant{moffset}{moffset}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndestra}
\setinterfaceconstant{ntop}{ncima}
\setinterfaceconstant{number}{numero}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{colorenumero}
\setinterfaceconstant{numbercommand}{comandonumero}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definiscimodellotabella}
\setinterfacecommand{definetabulate}{definiscitabulato}
\setinterfacecommand{definetext}{definiscitesto}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definisciposizionetesto}
\setinterfacecommand{definetextvariable}{definiscivariabiletesto}
\setinterfacecommand{definetype}{definiscitype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{altezzacarta}
\setinterfacecommand{paperwidth}{ampiezzacarta}
\setinterfacecommand{periods}{punti}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{mettisegnalibro}
\setinterfacecommand{placecombinedlist}{mettielencocombinato}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{posizionatestotesta}
\setinterfacecommand{placelegend}{mettilegenda}
\setinterfacecommand{placelist}{mettielenco}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{mettinotepdplocali}
\setinterfacecommand{placelogos}{mettiloghi}
\setinterfacecommand{placeongrid}{mettiingriglia}
\setinterfacecommand{placeontopofeachother}{mettiunosullaltro}
\setinterfacecommand{placepagenumber}{mettinumeropagina}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{mettiregistro}
\setinterfacecommand{placerule}{mettilinea}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{impostaregistro}
\setinterfacecommand{setuprotate}{impostarotazione}
\setinterfacecommand{setuprule}{impostalinea}
-\setinterfacecommand{setups}{impostazioni}
\setinterfacecommand{setupscreens}{impostaschermi}
\setinterfacecommand{setupsection}{impostasezione}
\setinterfacecommand{setupsectionblock}{impostabloccosezione}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{impostatabelle}
\setinterfacecommand{setuptabulate}{impostatabulato}
\setinterfacecommand{setuptext}{impostatesto}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{impostaposizionetesto}
\setinterfacecommand{setuptextrules}{impostalineetesto}
\setinterfacecommand{setuptexttexts}{impostatestotesti}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{iniziadocumento}
\setinterfacecommand{startenvironment}{iniziaambiente}
\setinterfacecommand{startfigure}{iniziafigura}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{iniziaglobale}
\setinterfacecommand{startline}{iniziariga}
\setinterfacecommand{startlinecorrection}{iniziacorrezioneriga}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{iniziatabella}
\setinterfacecommand{starttables}{iniziatabelle}
\setinterfacecommand{starttext}{iniziatesto}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{inizialineatesto}
\setinterfacecommand{startunpacked}{iniziaunpacked}
\setinterfacecommand{startversion}{iniziaversione}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{terminacomponente}
\setinterfacecommand{stopdocument}{terminadocumento}
\setinterfacecommand{stopenvironment}{terminaambiente}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{terminaglobale}
\setinterfacecommand{stopline}{terminariga}
\setinterfacecommand{stoplinecorrection}{terminacorrezioneriga}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{terminatabella}
\setinterfacecommand{stoptables}{terminatabelle}
\setinterfacecommand{stoptext}{terminatesto}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{terminalineatesto}
\setinterfacecommand{stopunpacked}{terminaunpacked}
\setinterfacecommand{stopversion}{terminaversioni}
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index f82be039c..9a05e59d9 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -13,7 +13,7 @@ return {
--
"zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive",
"plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred",
- "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard",
+ "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard", "maxcardminusone",
"zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint",
"points", "halfpoint",
"zeroskip",
@@ -31,7 +31,9 @@ return {
--
"fmtname", "fmtversion", "texengine", "texenginename", "texengineversion",
"luatexengine", "pdftexengine", "xetexengine", "unknownengine",
- "etexversion", "pdftexversion", "xetexversion", "xetexrevision",
+ -- "etexversion",
+ -- "pdftexversion", "pdftexrevision",
+ -- "xetexversion", "xetexrevision",
--
"activecatcode",
--
@@ -47,7 +49,7 @@ return {
"inicatcodes",
"ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes",
"prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes",
- "xmlcatcodes",
+ "xmlcatcodes", "ctdcatcodes",
--
"escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode",
"endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode",
@@ -60,7 +62,7 @@ return {
"lessthanasciicode", "morethanasciicode", "doublecommentsignal",
"atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode",
"doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode",
- "primeasciicode",
+ "primeasciicode", "hyphenasciicode",
--
"activemathcharcode",
--
@@ -85,23 +87,27 @@ return {
"fontexheight", "fontemwidth", "fontextraspace", "slantperpoint",
"interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace",
"mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight",
+ "muquad",
--
-- maybe a different class
--
"startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset",
- "doifmode", "doifmodeelse", "doifnotmode",
- "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes",
+ "doifmode", "doifelsemode", "doifmodeelse", "doifnotmode",
+ "startmodeset","stopmodeset",
+ "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes",
+ "doifallmodes", "doifelseallmodes", "doifallmodeselse", "doifnotallmodes",
"startenvironment", "stopenvironment", "environment",
"startcomponent", "stopcomponent", "component",
"startproduct", "stopproduct", "product",
"startproject", "stopproject", "project",
"starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
"startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule","setupmodule","currentmoduleparameter","moduleparameter",
+ "everystarttext", "everystoptext",
--
"startTEXpage", "stopTEXpage",
-- "startMPpage", "stopMPpage", -- already catched by nested lexer
--
- "enablemode", "disablemode", "preventmode",
+ "enablemode", "disablemode", "preventmode", "definemode",
"globalenablemode", "globaldisablemode", "globalpreventmode",
"pushmode", "popmode",
--
@@ -119,11 +125,12 @@ return {
--
"lefttorightmark", "righttoleftmark",
--
- "breakablethinspace", "nobreakspace", "narrownobreakspace", "zerowidthnobreakspace",
+ "breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace",
"ideographicspace", "ideographichalffillspace",
"twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace",
"figurespace", "punctuationspace", "hairspace",
"zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
+ "optionalspace", "asciispacechar",
},
["helpers"] = {
--
@@ -136,9 +143,11 @@ return {
"starttexdefinition", "stoptexdefinition",
"starttexcode", "stoptexcode",
"startcontextcode", "stopcontextcode",
+ "startcontextdefinitioncode", "stopcontextdefinitioncode",
+ "texdefinition",
--
- "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
- "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
+ "doifelsesetups", "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup",
+ "doifelsecommandhandler", "doifcommandhandlerelse", "doifnotcommandhandler", "doifcommandhandler",
--
"newmode", "setmode", "resetmode",
"newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode",
@@ -150,7 +159,7 @@ return {
"then",
"begcsname",
--
- "strippedcsname",
+ "strippedcsname","checkedstrippedcsname",
--
"firstargumentfalse", "firstargumenttrue",
"secondargumentfalse", "secondargumenttrue",
@@ -165,6 +174,8 @@ return {
--
"donetrue", "donefalse",
--
+ "inlineordisplaymath","indisplaymath","forcedisplaymath","startforceddisplaymath","stopforceddisplaymath","reqno",
+ --
"htdp",
"unvoidbox",
"hfilll", "vfilll",
@@ -196,7 +207,7 @@ return {
--
"normalbaselineskip", "normallineskip", "normallineskiplimit",
--
- "availablehsize", "localhsize", "setlocalhsize",
+ "availablehsize", "localhsize", "setlocalhsize", "distributedhsize", "hsizefraction",
--
"nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
--
@@ -218,23 +229,36 @@ return {
"scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip",
--
"doif", "doifnot", "doifelse",
- "doifinset", "doifnotinset", "doifinsetelse",
- "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse",
- "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined",
+ "doifinset", "doifnotinset",
+ "doifelseinset", "doifinsetelse",
+ "doifelsenextchar", "doifnextcharelse",
+ "doifelsenextoptional", "doifnextoptionalelse",
+ "doifelsenextoptionalcs", "doifnextoptionalcselse",
+ "doifelsefastoptionalcheck", "doiffastoptionalcheckelse",
+ "doifelsenextbgroup", "doifnextbgroupelse",
+ "doifelsenextbgroupcs", "doifnextbgroupcselse",
+ "doifelsenextparenthesis", "doifnextparenthesiselse",
+ "doifelseundefined", "doifundefinedelse",
+ "doifelsedefined", "doifdefinedelse",
+ "doifundefined", "doifdefined",
"doifelsevalue", "doifvalue", "doifnotvalue",
- "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
- "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing",
- "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber",
- "doifcommonelse", "doifcommon", "doifnotcommon",
- "doifinstring", "doifnotinstring", "doifinstringelse",
- "doifassignmentelse", "docheckassignment",
+ "doifnothing", "doifsomething",
+ "doifelsenothing", "doifnothingelse",
+ "doifelsesomething", "doifsomethingelse",
+ "doifvaluenothing", "doifvaluesomething",
+ "doifelsevaluenothing", "doifvaluenothingelse",
+ "doifelsedimension", "doifdimensionelse",
+ "doifelsenumber", "doifnumberelse", "doifnumber", "doifnotnumber",
+ "doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon",
+ "doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse",
+ "doifelseassignment", "doifassignmentelse", "docheckassignment",
--
"tracingall", "tracingnone", "loggingall",
--
"removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
--
- "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace",
- "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
+ "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace", "nbsp",
+ "obeyspaces", "obeylines", "obeyedspace", "obeyedline", "obeyedtab", "obeyedpage",
"normalspace",
--
"executeifdefined",
@@ -273,13 +297,17 @@ return {
"firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments",
--
"firstofoneunexpanded",
+ "firstoftwounexpanded", "secondoftwounexpanded",
+ "firstofthreeunexpanded", "secondofthreeunexpanded", "thirdofthreeunexpanded",
--
"gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments",
"gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals",
--
"dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
--
- "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
+ "newconstant", "setnewconstant", "setconstant", "setconstantvalue",
+ "newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
+ --
"newmacro", "setnewmacro", "newfraction",
"newsignal",
--
@@ -292,7 +320,7 @@ return {
--
"modulonumber", "dividenumber",
--
- "getfirstcharacter", "doiffirstcharelse",
+ "getfirstcharacter", "doifelsefirstchar", "doiffirstcharelse",
--
"startnointerference", "stopnointerference",
--
@@ -300,7 +328,9 @@ return {
--
"leftorright",
--
- "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ "offinterlineskip", "oninterlineskip", "nointerlineskip",
+ --
+ "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
--
"ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
"ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
@@ -334,13 +364,17 @@ return {
"uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
"mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
"mathtext", "setmathsmalltextbox", "setmathtextbox",
+ "pushmathstyle", "popmathstyle",
--
"triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle",
"triggeruncrampedstyle", "triggercrampedstyle",
"triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle",
"triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle",
--
- "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse",
+ "luaexpr",
+ "expelsedoif", "expdoif", "expdoifnot",
+ "expdoifelsecommon", "expdoifcommonelse",
+ "expdoifelseinset", "expdoifinsetelse",
--
"ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport",
"ctxlua", "luacode", "lateluacode", "directluacode",
@@ -351,6 +385,8 @@ return {
"definenamedlua",
"obeylualines", "obeyluatokens",
"startluacode", "stopluacode", "startlua", "stoplua",
+ "startctxfunction","stopctxfunction","ctxfunction",
+ "startctxfunctiondefinition","stopctxfunctiondefinition", "installctxfunction",
--
"carryoverpar",
--
@@ -365,5 +401,8 @@ return {
--
"lesshyphens", "morehyphens", "nohyphens", "dohyphens",
--
+ "Ucheckedstartdisplaymath", "Ucheckedstopdisplaymath",
+ --
+ "nobreak", "allowbreak", "goodbreak",
}
}
diff --git a/tex/context/base/mult-mes.lua b/tex/context/base/mult-mes.lua
index d9ee151a8..64184f8b0 100644
--- a/tex/context/base/mult-mes.lua
+++ b/tex/context/base/mult-mes.lua
@@ -996,6 +996,21 @@ return {
["publications:7"] = {
en = "placing all entries, use 'text' to be more selective",
},
+ ["publications:10"] = {
+ en = "unknown command %a, using built-in context variant %a",
+ },
+ ["publications:11"] = {
+ en = "unknown command %a",
+ },
+ ["publications:12"] = {
+ en = "missing setup: %s",
+ },
+ ["publications:13"] = {
+ en = "no field %a for tag %a in dataset %a",
+ },
+ ["publications:14"] = {
+ en = "unknown rendering alternative %a",
+ },
["pushing level: %a"] = {
nl = "niveau omhoog: %a",
},
diff --git a/tex/context/base/mult-mps.lua b/tex/context/base/mult-mps.lua
index 104b9d42e..a6bebc266 100644
--- a/tex/context/base/mult-mps.lua
+++ b/tex/context/base/mult-mps.lua
@@ -3,7 +3,7 @@ return {
"btex", "etex", "verbatimtex",
},
shortcuts = {
- "..", "...", "--", "---", "&",
+ "..", "...", "--", "---", "&", "\\",
},
primitives = { -- to be checked
"charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing",
@@ -31,8 +31,10 @@ return {
"def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
"tertiary", "primarydef", "secondarydef", "tertiarydef",
"randomseed", "also", "contour", "doublepath",
- "withcolor", "withcmykcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
- "forsuffixes", "downto", "upto", "step", "until",
+ "withcolor", "withcmykcolor", "withpen",
+ "dashed",
+ "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
+ "forsuffixes", "step", "until",
"charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
"boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
"fontmaking", "charexists",
@@ -50,27 +52,34 @@ return {
"withprescript", "withpostscript",
"top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt",
--
- "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
+ "redpart", "greenpart", "bluepart",
+ "cyanpart", "magentapart", "yellowpart",
+ "blackpart",
"prescriptpart", "postscriptpart",
- "rgbcolor", "cmykcolor", "greycolor", "graycolor",
- "colormodel", "graypart",
+ "rgbcolor", "cmykcolor", -- "greycolor", "graycolor",
+ "colormodel", "graypart", "greypart", "greycolor", "graycolor",
"dashpart", "penpart",
-- "colorpart",
"stroked", "filled", "textual", "clipped", "bounded", "pathpart",
"expandafter",
- "minute", "hour", "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline",
+ "minute", "hour",
+ "outputformat", "outputtemplate", "filenametemplate", "fontmapfile", "fontmapline",
"fontpart", "fontsize", "glyph", "restoreclipcolor", "troffmode",
+ --
+ "runscript", "maketext",
},
commands = {
+ "upto", "downto",
"beginfig", "endfig",
- "beginglyph", "endglyph", "charscale",
+ "beginglyph", "endglyph", -- actually a mult-fun one
"rotatedaround", "reflectedabout",
"arrowhead",
"currentpen", "currentpicture", "cuttings",
"defaultfont", "extra_beginfig", "extra_endfig",
"ditto", "EOF", "down",
"evenly", "fullcircle", "halfcircle", "identity", "in", "left",
- "origin", "pensquare", "quartercircle", "right",
+ "pensquare", "penrazor", "penspec",
+ "origin", "quartercircle", "right",
"unitsquare", "up", "withdots",
"abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir",
"directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod",
@@ -97,14 +106,13 @@ return {
"counterclockwise", "tensepath", "takepower", "direction",
"softjoin", -- "magstep",
"makelabel", -- "laboff",
- "rotatedabout", "flex", "superellipse", "erase", "image",
+ "rotatedabout", "flex", "superellipse", "image",
"nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory",
- "range", "numtok", "thru",
+ "range", "thru",
"z", "laboff",
"bye",
--
"red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background",
- "graypart", "graycolor",
--
"mm", "pt", "dd", "bp", "cm", "pc", "cc", "in",
--
@@ -114,8 +122,41 @@ return {
--
"mitered", "rounded", "beveled", "butt", "squared",
"eps", "epsilon", "infinity",
- "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius",
+ "bboxmargin",
+ "ahlength", "ahangle",
+ "labeloffset", "dotlabeldiam",
+ "defaultpen", "defaultscale",
+ "join_radius",
+ "charscale", -- actually a mult-fun one
--
"pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_",
},
+ metafont = {
+ -- :: =: =:| =:|> |=: |=:> |=:| |=:|> |=:|>> ||:
+ "autorounding", "beginchar", "blacker", "boundarychar", "capsule_def",
+ "capsule_end", "change_width", "chardp", "chardx", "chardy", "charexists",
+ "charext", "charht", "charic", "charlist", "charwd", "cull", "cullit",
+ "currenttransform", "currentwindow", "define_blacker_pixels",
+ "define_corrected_pixels", "define_good_x_pixels", "define_good_y_pixels",
+ "define_horizontal_corrected_pixels", "define_pixels",
+ "define_whole_blacker_pixels", "define_whole_pixels",
+ "define_whole_vertical_blacker_pixels", "define_whole_vertical_pixels",
+ "designsize", "display", "displaying", "endchar", "extensible", "extra_beginchar",
+ "extra_endchar", "extra_setup", "fillin", "font_coding_scheme",
+ "font_extra_space", "font_identifier", "font_normal_shrink",
+ "font_normal_space", "font_normal_stretch", "font_quad", "font_size",
+ "font_slant", "font_x_height", "fontdimen", "fontmaking", "gfcorners",
+ "granularity", "grayfont", "headerbyte", "hppp", "hround", "imagerules",
+ "italcorr", "kern", "labelfont", "ligtable", "lowres_fix", "makebox",
+ "makegrid", "maketicks", "mode_def", "mode_setup", "nodisplays",
+ "notransforms", "numspecial", "o_correction", "openit", "openwindow",
+ "pixels_per_inch", "proofing", "proofoffset", "proofrule", "proofrulethickness",
+ "rulepen", "screenchars", "screenrule", "screenstrokes", "screen_cols", "screen_rows",
+ "showit", "slantfont", "smode", "smoothing", "titlefont", "totalweight",
+ "tracingedges", "tracingpens", "turningcheck", "unitpixel", "vppp", "vround",
+ "xoffset", "yoffset",
+ },
+ disabled = {
+ "verbatimtex", "troffmode"
+ }
}
diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii
index 9f91515cb..22350dc50 100644
--- a/tex/context/base/mult-nl.mkii
+++ b/tex/context/base/mult-nl.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{lijnlinks}
\setinterfacevariable{flushouter}{lijnbuiten}
\setinterfacevariable{flushright}{lijnrechts}
+\setinterfacevariable{followingpage}{opvolgendepagina}
\setinterfacevariable{footer}{voet}
\setinterfacevariable{footnote}{voetnoot}
\setinterfacevariable{force}{forceer}
@@ -287,20 +288,21 @@
\setinterfacevariable{mathematics}{wiskunde}
\setinterfacevariable{mathmatrix}{wiskundematrix}
\setinterfacevariable{max}{max}
-\setinterfacevariable{maxdepth}{maxdepth}
-\setinterfacevariable{maxheight}{maxheight}
-\setinterfacevariable{maxwidth}{maxwidth}
+\setinterfacevariable{maxdepth}{maxdiepte}
+\setinterfacevariable{maxheight}{maxhoogte}
+\setinterfacevariable{maxwidth}{maxbreedte}
\setinterfacevariable{may}{mei}
\setinterfacevariable{mediaeval}{mediaeval}
\setinterfacevariable{medium}{middel}
\setinterfacevariable{middle}{midden}
\setinterfacevariable{min}{min}
-\setinterfacevariable{mindepth}{mindepth}
+\setinterfacevariable{mindepth}{mindiepte}
\setinterfacevariable{minheight}{minhoogte}
\setinterfacevariable{minwidth}{minbreedte}
\setinterfacevariable{mirrored}{gespiegeld}
\setinterfacevariable{monday}{maandag}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monovet}
\setinterfacevariable{month}{maand}
\setinterfacevariable{more}{meer}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positief}
\setinterfacevariable{postponing}{uitstellen}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{voorafgaandepagina}
\setinterfacevariable{preference}{voorkeur}
\setinterfacevariable{preview}{preview}
\setinterfacevariable{previous}{vorige}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{september}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{aanelkaar}
-\setinterfacevariable{setups}{instellingen}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{kort}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{koppeling}
\setinterfaceconstant{couplingway}{koppelwijze}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{huidige}
\setinterfaceconstant{cutspace}{snijwit}
\setinterfaceconstant{dash}{streep}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{evenmarge}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansie}
\setinterfaceconstant{export}{exporteer}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{file}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{eerstepagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{inspringen}
\setinterfaceconstant{indentnext}{springvolgendein}
\setinterfaceconstant{indicator}{aanduiding}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{binnen}
\setinterfaceconstant{innermargin}{binnenmarge}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nrechts}
\setinterfaceconstant{ntop}{nboven}
\setinterfaceconstant{number}{nummer}
+\setinterfaceconstant{numberalign}{nummeruitlijnen}
\setinterfaceconstant{numbercolor}{nummerkleur}
\setinterfaceconstant{numbercommand}{nummercommando}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -944,7 +950,7 @@
\setinterfaceconstant{reduction}{reductie}
\setinterfaceconstant{ref}{ref}
\setinterfaceconstant{refcommand}{refcommand}
-\setinterfaceconstant{reference}{verwijzing}
+\setinterfaceconstant{reference}{referentie}
\setinterfaceconstant{referenceprefix}{referenceprefix}
\setinterfaceconstant{referencing}{refereren}
\setinterfaceconstant{region}{gebied}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixscheider}
\setinterfaceconstant{suffixstopper}{suffixafsluiter}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1092,7 +1100,7 @@
\setinterfaceconstant{totalnumber}{totalnumber}
\setinterfaceconstant{type}{type}
\setinterfaceconstant{unit}{eenheid}
-\setinterfaceconstant{unknownreference}{onbekendeverwijzing}
+\setinterfaceconstant{unknownreference}{onbekendereferentie}
\setinterfaceconstant{urlalternative}{urlvariant}
\setinterfaceconstant{urlspace}{urlspatie}
\setinterfaceconstant{validate}{valideer}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definieertabelvorm}
\setinterfacecommand{definetabulate}{definieertabulatie}
\setinterfacecommand{definetext}{definieertekst}
+\setinterfacecommand{definetextbackground}{definieertekstachtergrond}
\setinterfacecommand{definetextposition}{definieertekstpositie}
\setinterfacecommand{definetextvariable}{definieertekstvariabele}
\setinterfacecommand{definetype}{definieertype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{papierhoogte}
\setinterfacecommand{paperwidth}{papierbreedte}
\setinterfacecommand{periods}{punten}
-\setinterfacecommand{plaatsruwelijst}{plaatsruwelijst}
\setinterfacecommand{placebookmarks}{plaatsbookmarks}
\setinterfacecommand{placecombinedlist}{plaatssamengesteldelijst}
\setinterfacecommand{placefloat}{plaatsplaatsblok}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{plaatskoptekst}
\setinterfacecommand{placelegend}{plaatslegenda}
\setinterfacecommand{placelist}{plaatslijst}
+\setinterfacecommand{placelistofsynonyms}{plaatslijstmetsynoniemen}
\setinterfacecommand{placelocalfootnotes}{plaatslokalevoetnoten}
\setinterfacecommand{placelogos}{plaatsbeeldmerken}
\setinterfacecommand{placeongrid}{plaatsopgrid}
\setinterfacecommand{placeontopofeachother}{plaatsonderelkaar}
\setinterfacecommand{placepagenumber}{plaatspaginanummer}
+\setinterfacecommand{placerawlist}{plaatsruwelijst}
\setinterfacecommand{placereferencelist}{plaatsreferentielijst}
\setinterfacecommand{placeregister}{plaatsregister}
\setinterfacecommand{placerule}{plaatslijn}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{stelregisterin}
\setinterfacecommand{setuprotate}{stelroterenin}
\setinterfacecommand{setuprule}{stellijnin}
-\setinterfacecommand{setups}{instellingen}
\setinterfacecommand{setupscreens}{stelrastersin}
\setinterfacecommand{setupsection}{stelsectiein}
\setinterfacecommand{setupsectionblock}{stelsectieblokin}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{steltabellenin}
\setinterfacecommand{setuptabulate}{steltabulatiein}
\setinterfacecommand{setuptext}{steltekstin}
+\setinterfacecommand{setuptextbackground}{steltekstachtergrondin}
\setinterfacecommand{setuptextposition}{steltekstpositiein}
\setinterfacecommand{setuptextrules}{steltekstlijnenin}
\setinterfacecommand{setuptexttexts}{stelteksttekstenin}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startomgeving}
\setinterfacecommand{startfigure}{startfiguur}
+\setinterfacecommand{startframed}{startomlijnd}
\setinterfacecommand{startglobal}{startglobaal}
\setinterfacecommand{startline}{startregel}
\setinterfacecommand{startlinecorrection}{startregelcorrectie}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabel}
\setinterfacecommand{starttables}{starttabellen}
\setinterfacecommand{starttext}{starttekst}
+\setinterfacecommand{starttextbackground}{starttekstachtergrond}
\setinterfacecommand{starttextrule}{starttekstlijn}
\setinterfacecommand{startunpacked}{startvanelkaar}
\setinterfacecommand{startversion}{startversie}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stoponderdeel}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopomgeving}
+\setinterfacecommand{stopframed}{stopomlijnd}
\setinterfacecommand{stopglobal}{stopglobaal}
\setinterfacecommand{stopline}{stopregel}
\setinterfacecommand{stoplinecorrection}{stopregelcorrectie}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabel}
\setinterfacecommand{stoptables}{stoptabellen}
\setinterfacecommand{stoptext}{stoptekst}
+\setinterfacecommand{stoptextbackground}{stoptekstachtergrond}
\setinterfacecommand{stoptextrule}{stoptekstlijn}
\setinterfacecommand{stopunpacked}{stopvanelkaar}
\setinterfacecommand{stopversion}{stopversie}
diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii
index 240130cdf..32cf32db1 100644
--- a/tex/context/base/mult-pe.mkii
+++ b/tex/context/base/mult-pe.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{پمپچپ}
\setinterfacevariable{flushouter}{پمپخارجی}
\setinterfacevariable{flushright}{پمپراست}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{تهبرگ}
\setinterfacevariable{footnote}{پانوشت}
\setinterfacevariable{force}{اجبار}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{منعکس}
\setinterfacevariable{monday}{دوشنبه}
\setinterfacevariable{mono}{مونو}
+\setinterfacevariable{monobold}{monobold}
\setinterfacevariable{month}{ماه}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{شکستکلماتبیشتر}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{مثبت}
\setinterfacevariable{postponing}{تاخیر}
\setinterfacevariable{postscript}{پستاسکریپت}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{ترجیح}
\setinterfacevariable{preview}{پیشدید}
\setinterfacevariable{previous}{قبلی}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{سپتامبر}
\setinterfacevariable{serif}{سریف}
\setinterfacevariable{serried}{تنگهم}
-\setinterfacevariable{setups}{بارگذاریها}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{ورقه}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{تزویج}
\setinterfaceconstant{couplingway}{روشتزویج}
\setinterfaceconstant{criterium}{criterium}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{جاری}
\setinterfaceconstant{cutspace}{فضایبرش}
\setinterfaceconstant{dash}{دش}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{عرضیکسان}
\setinterfaceconstant{escape}{فرار}
\setinterfaceconstant{evenmargin}{حاشیهزوج}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{گسترش}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{پرونده}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{صفحهاول}
\setinterfaceconstant{focus}{تمرکز}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{تورفتگی}
\setinterfaceconstant{indentnext}{متنتورفته}
\setinterfaceconstant{indicator}{اندیکاتور}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{داخلی}
\setinterfaceconstant{innermargin}{حاشیهداخلی}
\setinterfaceconstant{inputfile}{پروندهورودی}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{nright}
\setinterfaceconstant{ntop}{ntop}
\setinterfaceconstant{number}{شماره}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{رنگشماره}
\setinterfaceconstant{numbercommand}{فرمانشماره}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1001,7 +1007,7 @@
\setinterfaceconstant{separatorcolor}{separatorcolor}
\setinterfaceconstant{separatorstyle}{separatorstyle}
\setinterfaceconstant{set}{قراربده}
-\setinterfaceconstant{setups}{بارگذاریها}
+\setinterfaceconstant{setups}{setups}
\setinterfaceconstant{shrink}{shrink}
\setinterfaceconstant{side}{کنار}
\setinterfaceconstant{sidealign}{تنظیمکنار}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{پسوند}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{تعریفالگویجدول}
\setinterfacecommand{definetabulate}{تعریفجدولبندی}
\setinterfacecommand{definetext}{تعریفمتن}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{تعریفمکانمتن}
\setinterfacecommand{definetextvariable}{تعریفمتغیرمتن}
\setinterfacecommand{definetype}{تعریفتایپ}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{ارتفاعبرگ}
\setinterfacecommand{paperwidth}{عرضبرگ}
\setinterfacecommand{periods}{نقطهها}
-\setinterfacecommand{plaatsruwelijst}{درجلیستخام}
\setinterfacecommand{placebookmarks}{درجچوبخط}
\setinterfacecommand{placecombinedlist}{درجلیستمختلط}
\setinterfacecommand{placefloat}{درجشناور}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{درجمتنسر}
\setinterfacecommand{placelegend}{درجراهنما}
\setinterfacecommand{placelist}{درجلیست}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{درجپانوشتهایموضعی}
\setinterfacecommand{placelogos}{درجآرمها}
\setinterfacecommand{placeongrid}{درجدرتوری}
\setinterfacecommand{placeontopofeachother}{درجدربالاییکدیگر}
\setinterfacecommand{placepagenumber}{درجشمارهصفحه}
+\setinterfacecommand{placerawlist}{درجلیستخام}
\setinterfacecommand{placereferencelist}{درجلیستمرجع}
\setinterfacecommand{placeregister}{درجثبت}
\setinterfacecommand{placerule}{درجخط}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{بارگذاریثبت}
\setinterfacecommand{setuprotate}{بارگذاریدوران}
\setinterfacecommand{setuprule}{بارگذاریخط}
-\setinterfacecommand{setups}{بارگذاریها}
\setinterfacecommand{setupscreens}{بارگذاریپردهها}
\setinterfacecommand{setupsection}{بارگذاریبخش}
\setinterfacecommand{setupsectionblock}{بارگذاریبلوکبخش}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{بارگذاریجدولها}
\setinterfacecommand{setuptabulate}{بارگذاریجدولبندی}
\setinterfacecommand{setuptext}{بارگذاریمتن}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{بارگذاریمکانمتن}
\setinterfacecommand{setuptextrules}{بارگذاریخطهایمتن}
\setinterfacecommand{setuptexttexts}{بارگذاریمتنمتنها}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{شروعنوشتار}
\setinterfacecommand{startenvironment}{شروعمحیط}
\setinterfacecommand{startfigure}{شروعشکل}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{شروعسراسری}
\setinterfacecommand{startline}{شروعخط}
\setinterfacecommand{startlinecorrection}{شروعتصحیحخط}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{شروعجدول}
\setinterfacecommand{starttables}{شروعجدولها}
\setinterfacecommand{starttext}{شروعمتن}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{شروعخطمتن}
\setinterfacecommand{startunpacked}{شروعغیرفشرده}
\setinterfacecommand{startversion}{شروعنسخه}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{پایانمولفه}
\setinterfacecommand{stopdocument}{پایاننوشتار}
\setinterfacecommand{stopenvironment}{پایانمحیط}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{پایانسراسری}
\setinterfacecommand{stopline}{پایانخط}
\setinterfacecommand{stoplinecorrection}{پایانتصحیحخط}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{پایانجدول}
\setinterfacecommand{stoptables}{پایانجدولها}
\setinterfacecommand{stoptext}{پایانمتن}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{پایانخطمتن}
\setinterfacecommand{stopunpacked}{پایانغیرفشرده}
\setinterfacecommand{stopversion}{پایاننسخه}
diff --git a/tex/context/base/mult-prm.lua b/tex/context/base/mult-prm.lua
index e6fa4abcc..3b4d84356 100644
--- a/tex/context/base/mult-prm.lua
+++ b/tex/context/base/mult-prm.lua
@@ -232,9 +232,10 @@ return {
"latelua",
"luaescapestring",
"luastartup",
- "luatexdatestamp",
+ "luatexbanner",
"luatexrevision",
"luatexversion",
+ "luafunction",
"mathstyle",
"nokerns",
"noligs",
@@ -252,6 +253,7 @@ return {
"suppressifcsnameerror",
"suppresslongerror",
"suppressoutererror",
+ "suppressmathparerror",
"synctex",
},
["omega"]={
@@ -573,10 +575,10 @@ return {
"catcodetable",
"char",
"chardef",
- "chardp",
- "charht",
- "charit",
- "charwd",
+--"chardp",
+--"charht",
+--"charit",
+--"charwd",
"cleaders",
"clearmarks",
"closein",
@@ -772,7 +774,7 @@ return {
"lpcode",
"luaescapestring",
"luastartup",
- "luatexdatestamp",
+ "luatexbanner",
"luatexrevision",
"luatexversion",
"mag",
diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii
index 3b7206e44..a5d90033f 100644
--- a/tex/context/base/mult-ro.mkii
+++ b/tex/context/base/mult-ro.mkii
@@ -180,6 +180,7 @@
\setinterfacevariable{flushleft}{flushleft}
\setinterfacevariable{flushouter}{flushouter}
\setinterfacevariable{flushright}{flushright}
+\setinterfacevariable{followingpage}{followingpage}
\setinterfacevariable{footer}{subsol}
\setinterfacevariable{footnote}{notasubsol}
\setinterfacevariable{force}{fortat}
@@ -301,6 +302,7 @@
\setinterfacevariable{mirrored}{oglindit}
\setinterfacevariable{monday}{luni}
\setinterfacevariable{mono}{mono}
+\setinterfacevariable{monobold}{monoaldin}
\setinterfacevariable{month}{luna}
\setinterfacevariable{more}{more}
\setinterfacevariable{morehyphenation}{morehyphenation}
@@ -360,6 +362,7 @@
\setinterfacevariable{positive}{positiv}
\setinterfacevariable{postponing}{postponing}
\setinterfacevariable{postscript}{postscript}
+\setinterfacevariable{precedingpage}{followingpage}
\setinterfacevariable{preference}{preferinta}
\setinterfacevariable{preview}{previzualizare}
\setinterfacevariable{previous}{precedent}
@@ -419,7 +422,7 @@
\setinterfacevariable{september}{septembrie}
\setinterfacevariable{serif}{serif}
\setinterfacevariable{serried}{serried}
-\setinterfacevariable{setups}{setari}
+\setinterfacevariable{setups}{setups}
\setinterfacevariable{sheet}{sheet}
\setinterfacevariable{short}{short}
\setinterfacevariable{simplefonts}{simplefonts}
@@ -651,7 +654,7 @@
\setinterfaceconstant{coupling}{cuplare}
\setinterfaceconstant{couplingway}{modcuplare}
\setinterfaceconstant{criterium}{criteriu}
-\setinterfaceconstant{css}{css}
+\setinterfaceconstant{cssfile}{cssfile}
\setinterfaceconstant{current}{curent}
\setinterfaceconstant{cutspace}{cutspace}
\setinterfaceconstant{dash}{dash}
@@ -686,7 +689,7 @@
\setinterfaceconstant{equalwidth}{equalwidth}
\setinterfaceconstant{escape}{escape}
\setinterfaceconstant{evenmargin}{marginepara}
-\setinterfaceconstant{exitoffset}{labeloffset}
+\setinterfaceconstant{exitoffset}{exitoffset}
\setinterfaceconstant{expansion}{expansiune}
\setinterfaceconstant{export}{export}
\setinterfaceconstant{extras}{extras}
@@ -701,6 +704,7 @@
\setinterfaceconstant{file}{fisier}
\setinterfaceconstant{filtercommand}{filtercommand}
\setinterfaceconstant{finalnamesep}{finalnamesep}
+\setinterfaceconstant{finalpubsep}{finalpubsep}
\setinterfaceconstant{firstnamesep}{firstnamesep}
\setinterfaceconstant{firstpage}{primapagina}
\setinterfaceconstant{focus}{focus}
@@ -752,6 +756,7 @@
\setinterfaceconstant{indenting}{aliniat}
\setinterfaceconstant{indentnext}{aliniaturmator}
\setinterfaceconstant{indicator}{indicator}
+\setinterfaceconstant{initialsep}{initialsep}
\setinterfaceconstant{inner}{intern}
\setinterfaceconstant{innermargin}{innermargin}
\setinterfaceconstant{inputfile}{inputfile}
@@ -857,6 +862,7 @@
\setinterfaceconstant{nright}{ndreapta}
\setinterfaceconstant{ntop}{nsus}
\setinterfaceconstant{number}{numar}
+\setinterfaceconstant{numberalign}{numberalign}
\setinterfaceconstant{numbercolor}{culoarenumar}
\setinterfaceconstant{numbercommand}{comandanumar}
\setinterfaceconstant{numberconversion}{numberconversion}
@@ -1045,6 +1051,8 @@
\setinterfaceconstant{suffix}{suffix}
\setinterfaceconstant{suffixseparator}{suffixseparator}
\setinterfaceconstant{suffixstopper}{suffixstopper}
+\setinterfaceconstant{surnamefirstnamesep}{surnamefirstnamesep}
+\setinterfaceconstant{surnameinitialsep}{surnameinitialsep}
\setinterfaceconstant{surnamesep}{surnamesep}
\setinterfaceconstant{sx}{sx}
\setinterfaceconstant{sy}{sy}
@@ -1278,6 +1286,7 @@
\setinterfacecommand{definetabletemplate}{definestesablontabel}
\setinterfacecommand{definetabulate}{definestetabulatori}
\setinterfacecommand{definetext}{definestetext}
+\setinterfacecommand{definetextbackground}{definetextbackground}
\setinterfacecommand{definetextposition}{definestepozitietext}
\setinterfacecommand{definetextvariable}{definestevariabilatext}
\setinterfacecommand{definetype}{definetype}
@@ -1438,7 +1447,6 @@
\setinterfacecommand{paperheight}{inaltimehartie}
\setinterfacecommand{paperwidth}{latimehartie}
\setinterfacecommand{periods}{puncte}
-\setinterfacecommand{plaatsruwelijst}{placerawlist}
\setinterfacecommand{placebookmarks}{plaseazasemnecarte}
\setinterfacecommand{placecombinedlist}{punelistacombinata}
\setinterfacecommand{placefloat}{placefloat}
@@ -1448,11 +1456,13 @@
\setinterfacecommand{placeheadtext}{placeheadtext}
\setinterfacecommand{placelegend}{punelegenda}
\setinterfacecommand{placelist}{punelista}
+\setinterfacecommand{placelistofsynonyms}{placelistofsynonyms}
\setinterfacecommand{placelocalfootnotes}{punenotesubsollocale}
\setinterfacecommand{placelogos}{punelogouri}
\setinterfacecommand{placeongrid}{plaseazapegrid}
\setinterfacecommand{placeontopofeachother}{punedeasuprafiecareia}
\setinterfacecommand{placepagenumber}{punenumarpagina}
+\setinterfacecommand{placerawlist}{placerawlist}
\setinterfacecommand{placereferencelist}{placereferencelist}
\setinterfacecommand{placeregister}{puneregistru}
\setinterfacecommand{placerule}{punerigla}
@@ -1599,7 +1609,6 @@
\setinterfacecommand{setupregister}{seteazaregistru}
\setinterfacecommand{setuprotate}{seteazarotare}
\setinterfacecommand{setuprule}{seteazarigla}
-\setinterfacecommand{setups}{setari}
\setinterfacecommand{setupscreens}{seteazaecrane}
\setinterfacecommand{setupsection}{seteazasectiune}
\setinterfacecommand{setupsectionblock}{seteazablocsectiune}
@@ -1617,6 +1626,7 @@
\setinterfacecommand{setuptables}{seteazatabele}
\setinterfacecommand{setuptabulate}{seteazatabulatori}
\setinterfacecommand{setuptext}{seteazatext}
+\setinterfacecommand{setuptextbackground}{setuptextbackground}
\setinterfacecommand{setuptextposition}{seteazapozitietext}
\setinterfacecommand{setuptextrules}{seteazarigletext}
\setinterfacecommand{setuptexttexts}{seteazatextetext}
@@ -1663,6 +1673,7 @@
\setinterfacecommand{startdocument}{startdocument}
\setinterfacecommand{startenvironment}{startmediu}
\setinterfacecommand{startfigure}{startfigura}
+\setinterfacecommand{startframed}{startframed}
\setinterfacecommand{startglobal}{startglobal}
\setinterfacecommand{startline}{startlinie}
\setinterfacecommand{startlinecorrection}{startcorectielinie}
@@ -1689,6 +1700,7 @@
\setinterfacecommand{starttable}{starttabel}
\setinterfacecommand{starttables}{starttabele}
\setinterfacecommand{starttext}{starttext}
+\setinterfacecommand{starttextbackground}{starttextbackground}
\setinterfacecommand{starttextrule}{startriglatext}
\setinterfacecommand{startunpacked}{startneimpachetat}
\setinterfacecommand{startversion}{startversiune}
@@ -1703,6 +1715,7 @@
\setinterfacecommand{stopcomponent}{stopcomponenta}
\setinterfacecommand{stopdocument}{stopdocument}
\setinterfacecommand{stopenvironment}{stopmediu}
+\setinterfacecommand{stopframed}{stopframed}
\setinterfacecommand{stopglobal}{stopblobal}
\setinterfacecommand{stopline}{stoplinie}
\setinterfacecommand{stoplinecorrection}{stopcorectielinie}
@@ -1728,6 +1741,7 @@
\setinterfacecommand{stoptable}{stoptabel}
\setinterfacecommand{stoptables}{stoptabele}
\setinterfacecommand{stoptext}{stoptext}
+\setinterfacecommand{stoptextbackground}{stoptextbackground}
\setinterfacecommand{stoptextrule}{stopriglatext}
\setinterfacecommand{stopunpacked}{stopneimpachetat}
\setinterfacecommand{stopversion}{stopversiune}
diff --git a/tex/context/base/mult-sys.mkiv b/tex/context/base/mult-sys.mkiv
index 8c1bff2bc..b1475f2dc 100644
--- a/tex/context/base/mult-sys.mkiv
+++ b/tex/context/base/mult-sys.mkiv
@@ -124,7 +124,7 @@
\definesystemconstant {SansSlanted}
\definesystemconstant {SansBoldSlanted}
\definesystemconstant {SansCaps}
-% \definesystemconstant {SansCapsSlanted}
+%definesystemconstant {SansCapsSlanted}
\definesystemconstant {Mono}
\definesystemconstant {MonoBold}
@@ -133,8 +133,8 @@
\definesystemconstant {MonoSlanted}
\definesystemconstant {MonoBoldSlanted}
\definesystemconstant {MonoCaps}
-% \definesystemconstant {MonoCapsSlanted}
-% \definesystemconstant {MonoVariable}
+%definesystemconstant {MonoCapsSlanted}
+%definesystemconstant {MonoVariable}
\definesystemconstant {DefaultFont}
@@ -261,6 +261,20 @@
\definesystemconstant {single}
\definesystemconstant {multi}
\definesystemconstant {indeed}
+\definesystemconstant {internal}
+\definesystemconstant {current}
+\definesystemconstant {chain}
+
+% translating setups is asking for a mess so we keep them as-is:
+
+\definesystemconstant {setups}
+
+\definesystemconstant {cite}
+\definesystemconstant {nocite}
+\definesystemconstant {list}
+\definesystemconstant {register}
+\definesystemconstant {author}
+\definesystemconstant {numbering}
% \def\s!parent{->} % 1% faster / => does not work in assignments
% \def\s!child {<-} % 1% faster / <= does not work in assignments
@@ -279,12 +293,16 @@
\definesystemconstant {bold}
\definesystemconstant {italic}
\definesystemconstant {slanted}
-
-\definesystemconstant {default}
\definesystemconstant {smallcaps}
+\definesystemconstant {lcgreek}
+\definesystemconstant {ucgreek}
+\definesystemconstant {sygreek}
+\definesystemconstant {italics}
\definesystemconstant {run}
+\definesystemconstant {default}
+
\definesystemconstant {mode}
\definesystemconstant {setup}
\definesystemconstant {environment}
@@ -298,11 +316,22 @@
\definesystemconstant {uncramped}
\definesystemconstant {cramped}
+\definesystemconstant {hyphenmin}
\definesystemconstant {lefthyphenmin}
\definesystemconstant {righthyphenmin}
\definesystemconstant {lefthyphenchar}
\definesystemconstant {righthyphenchar}
+\definesystemconstant {head}
+\definesystemconstant {symbol}
+\definesystemconstant {sub}
+\definesystemconstant {margin}
+\definesystemconstant {edge}
+
+\definesystemconstant {double}
+\definesystemconstant {decimal}
+\definesystemconstant {binary}
+
%definesystemconstant {skewchar}
%definesystemconstant {hyphenchar}
\definesystemconstant {catcodes}
@@ -322,6 +351,12 @@
\definesystemconstant {designsize}
%definesystemconstant {background}
%definesystemconstant {ucmap}
+\definesystemconstant {level}
+\definesystemconstant {integral}
+\definesystemconstant {insert} % maybe insertclass
+\definesystemconstant {marker}
+
+\definesystemconstant {mixedcolumn}
%definesystemconstant {property}
%definesystemconstant {overprint}
@@ -382,8 +417,13 @@
\definesystemconstant {size}
\definesystemconstant {depth}
+\definesystemconstant {height}
+\definesystemconstant {noheight}
\definesystemconstant {nodepth}
+\definesystemconstant {rel}
+\definesystemconstant {ord}
+
%D Just to be complete we define the standard \TEX\ units.
\definesystemconstant {cm}
@@ -538,6 +578,13 @@
\defineinterfacevariable {c} {c}
\defineinterfacevariable {d} {d}
+%D For tagging:
+
+\definesystemconstant {subtype}
+\definesystemconstant {top}
+\definesystemconstant {bottom}
+\definesystemconstant {both}
+
%D Special purpose variables:
\def\v!oddeven#1{\ifodd#1\v!odd\else\v!even\fi}
diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua
index 81ae496b2..e684aeb7b 100644
--- a/tex/context/base/node-acc.lua
+++ b/tex/context/base/node-acc.lua
@@ -11,10 +11,27 @@ local nodes, node = nodes, node
local nodecodes = nodes.nodecodes
local tasks = nodes.tasks
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local copy_node = node.copy
-local free_nodelist = node.flush_list
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local getid = nuts.getid
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local copy_node = nuts.copy
+local free_nodelist = nuts.flush_list
+local insert_after = nuts.insert_after
+
+local new_gluespec = nuts.pool.gluespec -- temp hack
local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
@@ -24,62 +41,85 @@ local vlist_code = nodecodes.vlist
local a_characters = attributes.private("characters")
-local threshold = 65536
+local threshold = 65536 -- not used
+local nofreplaced = 0
-- todo: nbsp etc
-- todo: collapse kerns
+-- p_id
+
local function injectspaces(head)
- local p
+ local p, p_id
local n = head
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
- -- if n.spec.width > 0 then -- threshold
- if p and p.id == glyph_code then
+ -- if getfield(getfield(n,"spec"),"width") > 0 then -- threshold
+-- if p and p_id == glyph_code then
+ if p and getid(p) == glyph_code then
local g = copy_node(p)
- local c = g.components
+ local c = getfield(g,"components")
if c then -- it happens that we copied a ligature
free_nodelist(c)
- g.components = nil
- g.subtype = 256
+ setfield(g,"components",nil)
+ setfield(g,"subtype",256)
end
- local a = n[a_characters]
- local s = copy_node(n.spec)
- g.char, n.spec = 32, s
- p.next, g.prev = g, p
- g.next, n.prev = n, g
- s.width = s.width - g.width
+ local a = getattr(n,a_characters)
+ -- local s = copy_node(getfield(n,"spec"))
+ -- this will be fixed in luatex but for now a temp hack (zero test)
+ local s = getfield(n,"spec")
+ s = s == 0 and new_gluespec(0) or copy_node(s)
+ --
+ setfield(g,"char",32)
+ setfield(n,"spec",s)
+ -- insert_after(p,p,g)
+ setfield(p,"next",g)
+ setfield(g,"prev",p)
+ setfield(g,"next",n)
+ setfield(n,"prev",g)
+ setfield(s,"width",getfield(s,"width") - getfield(g,"width"))
if a then
- g[a_characters] = a
+ setattr(g,a_characters,a)
end
- s[a_characters] = 0
- n[a_characters] = 0
+ setattr(s,a_characters,0)
+ setattr(n,a_characters,0)
+ nofreplaced = nofreplaced + 1
end
-- end
elseif id == hlist_code or id == vlist_code then
- injectspaces(n.list,attribute)
+ injectspaces(getlist(n),attribute)
-- elseif id == kern_code then -- the backend already collapses
-- local first = n
-- while true do
- -- local nn = n.next
- -- if nn and nn.id == kern_code then
+ -- local nn = getnext(n)
+ -- if nn and getid(nn) == kern_code then
-- -- maybe we should delete kerns but who cares at this stage
- -- first.kern = first.kern + nn.kern
- -- nn.kern = 0
+ -- setfield(first,"kern",getfield(first,"kern") + getfield(nn,"kern")
+ -- setfield(nn,"kern",0)
-- n = nn
-- else
-- break
-- end
-- end
end
+ p_id = id
p = n
- n = n.next
+ n = getnext(n)
end
- return head, true
+ return head, true -- always done anyway
+end
+
+nodes.handlers.accessibility = function(head)
+ local head, done = injectspaces(tonut(head))
+ return tonode(head), done
end
-nodes.handlers.accessibility = injectspaces
+statistics.register("inserted spaces in output",function()
+ if nofreplaced > 0 then
+ return nofreplaced
+ end
+end)
-- todo:
@@ -90,16 +130,18 @@ nodes.handlers.accessibility = injectspaces
-- local function compact(n)
-- local t = { }
-- for n in traverse_id(glyph_code,n) do
--- t[#t+1] = utfchar(n.char) -- check for unicode
+-- t[#t+1] = utfchar(getchar(n)) -- check for unicode
-- end
-- return concat(t,"")
-- end
--
-- local function injectspans(head)
--- for n in traverse_nodes(head) do
--- local id = n.id
+-- local done = false
+-- for n in traverse_nodes(tonuts(head)) do
+-- local id = getid(n)
-- if id == disc then
--- local r, p = n.replace, n.pre
+-- local r = getfield(n,"replace")
+-- local p = getfield(n,"pre")
-- if r and p then
-- local str = compact(r)
-- local hsh = hyphenated[str]
@@ -108,13 +150,14 @@ nodes.handlers.accessibility = injectspaces
-- hyphenated[str] = hsh
-- codes[hsh] = str
-- end
--- n[a_hyphenated] = hsh
+-- setattr(n,a_hyphenated,hsh)
+-- done = true
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(n.list)
+-- injectspans(getlist(n))
-- end
-- end
--- return head, true
+-- return tonodes(head), done
-- end
--
-- nodes.injectspans = injectspans
@@ -122,19 +165,22 @@ nodes.handlers.accessibility = injectspaces
-- tasks.appendaction("processors", "words", "nodes.injectspans")
--
-- local function injectspans(head)
--- for n in traverse_nodes(head) do
--- local id = n.id
+-- local done = false
+-- for n in traverse_nodes(tonut(head)) do
+-- local id = getid(n)
-- if id == disc then
--- local a = n[a_hyphenated]
+-- local a = getattr(n,a_hyphenated)
-- if a then
-- local str = codes[a]
-- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
-- local e = new_pdfliteral("EMC")
--- node.insert_before(head,n,b)
--- node.insert_after(head,n,e)
+-- insert_before(head,n,b)
+-- insert_after(head,n,e)
+-- done = true
-- end
-- elseif id == hlist_code or id == vlist_code then
--- injectspans(n.list)
+-- injectspans(getlist(n))
-- end
-- end
+-- return tonodes(head), done
-- end
diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua
index 443c78547..12da8ea8a 100644
--- a/tex/context/base/node-aux.lua
+++ b/tex/context/base/node-aux.lua
@@ -22,82 +22,153 @@ local vlist_code = nodecodes.vlist
local attributelist_code = nodecodes.attributelist -- temporary
local math_code = nodecodes.math
-local nodepool = nodes.pool
-
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local vianuts = nuts.vianuts
+
+local getbox = nuts.getbox
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local free_node = nuts.free
+local hpack_nodes = nuts.hpack
+local unset_attribute = nuts.unset_attribute
+local first_glyph = nuts.first_glyph
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_tail = nuts.tail
+local insert_node_after = nuts.insert_after
+local isnode = nuts.is_node
+local getbox = nuts.getbox
+
+local nodes_traverse_id = nodes.traverse_id
+local nodes_first_glyph = nodes.first_glyph
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_glyph = nodepool.glyph
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local hpack_nodes = node.hpack
-local unset_attribute = node.unset_attribute
-local first_glyph = node.first_glyph or node.first_character
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local insert_node_after = node.insert_after
-local isnode = node.is_node
-
local unsetvalue = attributes.unsetvalue
local current_font = font.current
-local texgetbox = tex.getbox
+local texsetbox = tex.setbox
local report_error = logs.reporter("node-aux:error")
-function nodes.repackhlist(list,...)
---~ nodes.showsimplelist(list)
+-- At some point we figured that copying before using was the safest bet
+-- when dealing with boxes at the tex end. This is because tex also needs
+-- to manage the grouping (i.e. savestack). However, there is an easy
+-- solution that keeps the tex end happy as tex.setbox deals with this. The
+-- overhead of one temporary list node is neglectable.
+--
+-- function tex.takebox(id)
+-- local box = tex.getbox(id)
+-- if box then
+-- local copy = node.copy(box)
+-- local list = box.list
+-- copy.list = list
+-- box.list = nil
+-- tex.setbox(id,nil)
+-- return copy
+-- end
+-- end
+
+local function takebox(id)
+ local box = getbox(id)
+ if box then
+ local copy = copy_node(box)
+ local list = getlist(box)
+ setfield(copy,"list",list)
+ setfield(box,"list",nil)
+ texsetbox(id,nil)
+ return copy
+ end
+end
+
+function nodes.takebox(id)
+ local b = takebox(id)
+ if b then
+ return tonode(b)
+ end
+end
+
+nuts.takebox = takebox
+tex.takebox = nodes.takebox -- sometimes more clear
+
+-- so far
+
+local function repackhlist(list,...)
local temp, b = hpack_nodes(list,...)
- list = temp.list
- temp.list = nil
+ list = getlist(temp)
+ setfield(temp,"list",nil)
free_node(temp)
return list, b
end
+nuts.repackhlist = repackhlist
+
+function nodes.repackhlist(list,...)
+ local list, b = repackhlist(tonut(list),...)
+ return tonode(list), b
+end
+
local function set_attributes(head,attr,value)
for n in traverse_nodes(head) do
- n[attr] = value
- local id = n.id
+ setattr(n,attr,value)
+ local id = getid(n)
if id == hlist_node or id == vlist_node then
- set_attributes(n.list,attr,value)
+ set_attributes(getlist(n),attr,value)
end
end
end
local function set_unset_attributes(head,attr,value)
for n in traverse_nodes(head) do
- if not n[attr] then
- n[attr] = value
+ if not getattr(n,attr) then
+ setattr(n,attr,value)
end
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- set_unset_attributes(n.list,attr,value)
+ set_unset_attributes(getlist(n),attr,value)
end
end
end
local function unset_attributes(head,attr)
for n in traverse_nodes(head) do
- n[attr] = unsetvalue
- local id = n.id
+ setattr(n,attr,unsetvalue)
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- unset_attributes(n.list,attr)
+ unset_attributes(getlist(n),attr)
end
end
end
-nodes.setattribute = node.set_attribute
-nodes.getattribute = node.has_attribute
-nodes.unsetattribute = node.unset_attribute
-nodes.has_attribute = node.has_attribute
+-- for old times sake
+
+nuts.setattribute = nuts.setattr nodes.setattribute = nodes.setattr
+nuts.getattribute = nuts.getattr nodes.getattribute = nodes.getattr
+nuts.unsetattribute = nuts.unset_attribute nodes.unsetattribute = nodes.unset_attribute
+nuts.has_attribute = nuts.has_attribute nodes.has_attribute = nodes.has_attribute
+nuts.firstglyph = nuts.first_glyph nodes.firstglyph = nodes.first_glyph
-nodes.firstglyph = first_glyph
-nodes.setattributes = set_attributes
-nodes.setunsetattributes = set_unset_attributes
-nodes.unsetattributes = unset_attributes
+nuts.setattributes = set_attributes nodes.setattributes = vianuts(set_attributes)
+nuts.setunsetattributes = set_unset_attributes nodes.setunsetattributes = vianuts(set_unset_attributes)
+nuts.unsetattributes = unset_attributes nodes.unsetattributes = vianuts(unset_attributes)
+-- history:
+--
-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
-- return (
-- id ~= glyph_node
@@ -106,29 +177,26 @@ nodes.unsetattributes = unset_attributes
-- or id == adjust_node
-- or id == penalty_node
-- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
--- or (id == math_node and a.surround == 0)
--- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
--- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
--- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
+-- or (id == disc_node and getfield(a,"pre") == nil and getfield(a,"post") == nil and getfield(a,"replace") == nil)
+-- or (id == math_node and getfield(a,"surround") == 0)
+-- or (id == kern_node and (getfield(a,"kern") == 0 or getsubtype(subtype) == NORMAL))
+-- or (id == hlist_node and getfield(a,"width") == 0 and getfield(a,"height") == 0 and getfield(a,"depth") == 0 and getlist(a) == nil)
+-- or (id == whatsit_node and getsubtype(a) ~= pdf_refximage_node and getsubtype(a) ~= pdf_refxform_node)
-- )
-- end
-
--- history:
---
--
-- local function glyph_width(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and ch.width) or 0
-- end
--
-- local function glyph_total(a)
--- local ch = chardata[a.font][a.char]
+-- local ch = chardata[getfont(a)][getchar(a)]
-- return (ch and (ch.height+ch.depth)) or 0
-- end
--
-- local function non_discardable(a) -- inline
--- return a.id < math_node -- brrrr
+-- return getid(id) < math_node -- brrrr
-- end
--
-- local function calculate_badness(t,s)
@@ -183,8 +251,36 @@ nodes.unsetattributes = unset_attributes
-- return -u
-- end
-- end
+--
+-- if not node.end_of_math then
+-- function node.end_of_math(n)
+-- for n in traverse_id(math_code,getnext(next)) do
+-- return n
+-- end
+-- end
+-- end
+--
+-- nodes.endofmath = node.end_of_math
+--
+-- local function firstline(n)
+-- while n do
+-- local id = getid(n)
+-- if id == hlist_code then
+-- if getsubtype(n) == line_code then
+-- return n
+-- else
+-- return firstline(getlist(n))
+-- end
+-- elseif id == vlist_code then
+-- return firstline(getlist(n))
+-- end
+-- n = getnext(n)
+-- end
+-- end
+--
+-- nodes.firstline = firstline
-function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+function nuts.firstcharacter(n,untagged) -- tagged == subtype > 255
if untagged then
return first_glyph(n)
else
@@ -194,43 +290,41 @@ function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
end
end
-function nodes.firstcharinbox(n)
- local l = texgetbox(n).list
+-- function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+-- if untagged then
+-- return nodes_first_glyph(n)
+-- else
+-- for g in nodes_traverse_id(glyph_code,n) do
+-- return g
+-- end
+-- end
+-- end
+
+local function firstcharinbox(n)
+ local l = getlist(getbox(n))
if l then
for g in traverse_id(glyph_code,l) do
- return g.char
+ return getchar(g)
end
end
return 0
end
-if not node.end_of_math then
- function node.end_of_math(n)
- for n in traverse_id(math_code,n.next) do
- return n
+nuts .firstcharinbox = firstcharinbox
+nodes.firstcharinbox = firstcharinbox
+nodes.firstcharacter = vianuts(firstcharacter)
+
+interfaces.implement {
+ name = "buildtextaccent",
+ arguments = "integer",
+ actions = function(n) -- Is this crap really used? Or was it an experiment?
+ local char = firstcharinbox(n)
+ if char > 0 then
+ -- context.accent(false,char)
+ context([[\accent%s\relax]],char)
end
end
-end
-
-nodes.endofmath = node.end_of_math
-
--- local function firstline(n)
--- while n do
--- local id = n.id
--- if id == hlist_code then
--- if n.subtype == line_code then
--- return n
--- else
--- return firstline(n.list)
--- end
--- elseif id == vlist_code then
--- return firstline(n.list)
--- end
--- n = n.next
--- end
--- end
-
--- nodes.firstline = firstline
+}
-- this depends on fonts, so we have a funny dependency ... will be
-- sorted out .. we could make tonodes a plugin into this
@@ -242,10 +336,8 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
local head, tail, space, fnt, template = nil, nil, nil, nil, nil
if not fnt then
fnt = current_font()
- elseif type(fnt) ~= "number" and fnt.id == "glyph" then
- fnt, template = nil, fnt
- -- else
- -- already a number
+ elseif type(fnt) ~= "number" and getid(fnt) == glyph_code then -- so it has to be a real node
+ fnt, template = nil, tonut(fnt)
end
for s in utfvalues(str) do
local n
@@ -259,12 +351,13 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
end
elseif template then
n = copy_node(template)
- n.char = s
+ setvalue(n,"char",s)
else
n = new_glyph(fnt,s)
end
if attr then -- normally false when template
- n.attr = copy_node_list(attr)
+ -- setfield(n,"attr",copy_node_list(attr))
+ setfield(n,"attr",attr)
end
if head then
insert_node_after(head,tail,n)
@@ -276,69 +369,130 @@ local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-
return head, tail
end
-nodes.tonodes = tonodes
+nuts.tonodes = tonodes
-local function link(list,currentfont,currentattr,head,tail)
+nodes.tonodes = function(str,fnt,attr)
+ local head, tail = tonodes(str,fnt,attr)
+ return tonode(head), tonode(tail)
+end
+
+-- local function link(list,currentfont,currentattr,head,tail)
+-- for i=1,#list do
+-- local n = list[i]
+-- if n then
+-- local tn = isnode(n)
+-- if not tn then
+-- local tn = type(n)
+-- if tn == "number" then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(tostring(n),currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head = h
+-- tail = t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- elseif tn == "string" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- local h, t = tonodes(n,currentfont,currentattr)
+-- if not h then
+-- -- skip
+-- elseif not head then
+-- head, tail = h, t
+-- else
+-- setfield(tail,"next",h)
+-- setfield(h,"prev",t)
+-- tail = t
+-- end
+-- end
+-- elseif tn == "table" then
+-- if #tn > 0 then
+-- if not currentfont then
+-- currentfont = current_font()
+-- end
+-- head, tail = link(n,currentfont,currentattr,head,tail)
+-- end
+-- end
+-- elseif not head then
+-- head = n
+-- tail = find_tail(n)
+-- elseif getid(n) == attributelist_code then
+-- -- weird case
+-- report_error("weird node type in list at index %s:",i)
+-- for i=1,#list do
+-- local l = list[i]
+-- report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
+-- end
+-- os.exit()
+-- else
+-- setfield(tail,"next",n)
+-- setfield(n,"prev",tail)
+-- if getnext(n) then
+-- tail = find_tail(n)
+-- else
+-- tail = n
+-- end
+-- end
+-- else
+-- -- permitting nil is convenient
+-- end
+-- end
+-- return head, tail
+-- end
+
+local function link(list,currentfont,currentattr,head,tail) -- an oldie, might be replaced
for i=1,#list do
local n = list[i]
if n then
- local tn = isnode(n)
- if not tn then
- local tn = type(n)
- if tn == "number" then
+ local tn = type(n)
+ if tn == "string" then
+ if #tn > 0 then
if not currentfont then
currentfont = current_font()
end
- local h, t = tonodes(tostring(n),currentfont,currentattr)
+ local h, t = tonodes(n,currentfont,currentattr)
if not h then
-- skip
elseif not head then
head, tail = h, t
else
- tail.next, h.prev, tail = h, t, t
- end
- elseif tn == "string" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- local h, t = tonodes(n,currentfont,currentattr)
- if not h then
- -- skip
- elseif not head then
- head, tail = h, t
- else
- tail.next, h.prev, tail = h, t, t
- end
+ setfield(tail,"next",h)
+ setfield(h,"prev",t)
+ tail = t
end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- head, tail = link(n,currentfont,currentattr,head,tail)
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
end
+ head, tail = link(n,currentfont,currentattr,head,tail)
end
elseif not head then
head = n
- if n.next then
- tail = slide_nodes(n)
- else
- tail = n
- end
- elseif n.id == attributelist_code then
+ tail = find_tail(n)
+ elseif getid(n) == attributelist_code then
-- weird case
report_error("weird node type in list at index %s:",i)
for i=1,#list do
local l = list[i]
- report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
+ report_error("%3i: %s %S",i,getid(l) == attributelist_code and "!" or ">",l)
end
os.exit()
else
- tail.next = n
- n.prev = tail
- if n.next then
- tail = slide_nodes(n)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ if getnext(n) then
+ tail = find_tail(n)
else
tail = n
end
@@ -350,17 +504,22 @@ local function link(list,currentfont,currentattr,head,tail)
return head, tail
end
-nodes.link = link
+nuts.link = link
+
+nodes.link = function(list,currentfont,currentattr,head,tail)
+ local head, tail = link(list,currentfont,currentattr,tonut(head),tonut(tail))
+ return tonode(head), tonode(tail)
+end
local function locate(start,wantedid,wantedsubtype)
for n in traverse_nodes(start) do
- local id = n.id
+ local id = getid(n)
if id == wantedid then
- if not wantedsubtype or n.subtype == wantedsubtype then
+ if not wantedsubtype or getsubtype(n) == wantedsubtype then
return n
end
elseif id == hlist_code or id == vlist_code then
- local found = locate(n.list,wantedid,wantedsubtype)
+ local found = locate(getlist(n),wantedid,wantedsubtype)
if found then
return found
end
@@ -368,7 +527,12 @@ local function locate(start,wantedid,wantedsubtype)
end
end
-nodes.locate = locate
+nuts.locate = locate
+
+nodes.locate = function(start,wantedid,wantedsubtype)
+ local found = locate(tonut(start),wantedid,wantedsubtype)
+ return found and tonode(found)
+end
-- I have no use for this yet:
--
@@ -381,10 +545,12 @@ nodes.locate = locate
-- return (badness/100)^(1/3)
-- end
--
--- function tex.stretch_amount(skip,badness)
+-- function tex.stretch_amount(skip,badness) -- node no nut
-- if skip.id == gluespec_code then
-- return skip.width + (badness and (badness/100)^(1/3) or 1) * skip.stretch
-- else
-- return 0
-- end
-- end
+
+
diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua
index feaa2c684..99992de09 100644
--- a/tex/context/base/node-bck.lua
+++ b/tex/context/base/node-bck.lua
@@ -11,6 +11,8 @@ if not modules then modules = { } end modules ['node-bck'] = {
local attributes, nodes, node = attributes, nodes, node
+local tasks = nodes.tasks
+
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -19,11 +21,25 @@ local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
local cell_code = listcodes.cell
-local traverse = node.traverse
-local traverse_id = node.traverse_id
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
-local nodepool = nodes.pool
-local tasks = nodes.tasks
local new_rule = nodepool.rule
local new_glue = nodepool.glue
@@ -37,50 +53,50 @@ local a_alignbackground = attributes.private('alignbackground')
local function add_backgrounds(head) -- rather old code .. to be redone
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
local head = add_backgrounds(list)
if head then
- current.list = head
+ setfield(current,"list",head)
list = head
end
end
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local background = current[a_background]
+ local background = getattr(current,a_background)
if background then
-- direct to hbox
-- colorspace is already set so we can omit that and stick to color
- local mode = current[a_colorspace]
+ local mode = getattr(current,a_colorspace)
if mode then
- local height = current.height
- local depth = current.depth
+ local height = getfield(current,"height")
+ local depth = getfield(current,"depth")
local skip = id == hlist_code and width or (height + depth)
local glue = new_glue(-skip)
local rule = new_rule(width,height,depth)
- local color = current[a_color]
- local transparency = current[a_transparency]
- rule[a_colorspace] = mode
+ local color = getattr(current,a_color)
+ local transparency = getattr(current,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
@@ -88,16 +104,16 @@ end
local function add_alignbackgrounds(head)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- local list = current.list
+ local list = getlist(current)
if not list then
-- no need to look
- elseif current.subtype == cell_code then
+ elseif getsubtype(current) == cell_code then
local background = nil
local found = nil
-- for l in traverse(list) do
- -- background = l[a_alignbackground]
+ -- background = getattr(l,a_alignbackground)
-- if background then
-- found = l
-- break
@@ -106,7 +122,7 @@ local function add_alignbackgrounds(head)
-- we know that it's a fake hlist (could be user node)
-- but we cannot store tables in user nodes yet
for l in traverse_id(hpack_code,list) do
- background = l[a_alignbackground]
+ background = getattr(l,a_alignbackground)
if background then
found = l
end
@@ -115,28 +131,28 @@ local function add_alignbackgrounds(head)
--
if background then
-- current has subtype 5 (cell)
- local width = current.width
+ local width = getfield(current,"width")
if width > 0 then
- local mode = found[a_colorspace]
+ local mode = getattr(found,a_colorspace)
if mode then
local glue = new_glue(-width)
- local rule = new_rule(width,current.height,current.depth)
- local color = found[a_color]
- local transparency = found[a_transparency]
- rule[a_colorspace] = mode
+ local rule = new_rule(width,getfield(current,"height"),getfield(current,"depth"))
+ local color = getattr(found,a_color)
+ local transparency = getattr(found,a_transparency)
+ setattr(rule,a_colorspace,mode)
if color then
- rule[a_color] = color
+ setattr(rule,a_color,color)
end
if transparency then
- rule[a_transparency] = transparency
+ setattr(rule,a_transparency,transparency)
end
- rule.next = glue
- glue.prev = rule
+ setfield(rule,"next",glue)
+ setfield(glue,"prev",rule)
if list then
- glue.next = list
- list.prev = glue
+ setfield(glue,"next",list)
+ setfield(list,"prev",glue)
end
- current.list = rule
+ setfield(current,"list",rule)
end
end
end
@@ -144,18 +160,37 @@ local function add_alignbackgrounds(head)
add_alignbackgrounds(list)
end
elseif id == vlist_code then
- local list = current.list
+ local list = getlist(current)
if list then
add_alignbackgrounds(list)
end
end
- current = current.next
+ current = getnext(current)
end
return head, true
end
-nodes.handlers.backgrounds = add_backgrounds
-nodes.handlers.alignbackgrounds = add_alignbackgrounds
+-- nodes.handlers.backgrounds = add_backgrounds
+-- nodes.handlers.alignbackgrounds = add_alignbackgrounds
-tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
-tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
+nodes.handlers.backgrounds = function(head) local head, done = add_backgrounds (tonut(head)) return tonode(head), done end
+nodes.handlers.alignbackgrounds = function(head) local head, done = add_alignbackgrounds(tonut(head)) return tonode(head), done end
+
+-- elsewhere: needs checking
+
+-- tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
+-- tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
+
+interfaces.implement {
+ name = "enablebackgroundboxes",
+ onlyonce = true,
+ actions = nodes.tasks.enableaction,
+ arguments = { "'shipouts'", "'nodes.handlers.backgrounds'" }
+}
+
+interfaces.implement {
+ name = "enablebackgroundalign",
+ onlyonce = true,
+ actions = nodes.tasks.enableaction,
+ arguments = { "'shipouts'", "'nodes.handlers.alignbackgrounds'" }
+}
diff --git a/tex/context/base/node-bck.mkiv b/tex/context/base/node-bck.mkiv
index 25739c560..ff0de6a5d 100644
--- a/tex/context/base/node-bck.mkiv
+++ b/tex/context/base/node-bck.mkiv
@@ -21,14 +21,6 @@
\registerctxluafile{node-bck}{1.001}
-\def\node_backgrounds_boxes_initialize % will move to lua
- {\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.backgrounds")}%
- \glet\node_backgrounds_boxes_initialize\donothing}
-
-\def\node_backgrounds_align_initialize % will move to lua
- {\ctxlua{nodes.tasks.enableaction("shipouts","nodes.handlers.alignbackgrounds")}%
- \glet\node_backgrounds_align_initialize\donothing}
-
% \backgroundvbox[green] {\input tufte } \par
% \backgroundvbox[blue] {\input ward } \par
% \backgroundvbox[red] {\input knuth } \par
@@ -71,7 +63,7 @@
% \def\node_backgrounds_boxes_add#1[#2]%
% {\begingroup
-% \node_backgrounds_boxes_initialize
+% \clf_enablebackgroundboxes
% \dousecolorparameter{#2}%
% \normalexpanded{\endgroup#1
% attr \backgroundattribute \plusone
@@ -82,13 +74,13 @@
% more efficient:
\def\node_backgrounds_boxes_add#1[#2]%
- {\node_backgrounds_boxes_initialize
+ {\clf_enablebackgroundboxes
#1\backgroundcolorattr{#2}}
% less argument carry over:
%
% \def\node_backgrounds_boxes_add#1[#2]%
-% {\node_backgrounds_boxes_initialize#1%
+% {\clf_enablebackgroundboxes#1%
% \ifcsname\??colorattribute\currentcolorprefix#2\endcsname
% \thebackgroundcolorattr{\currentcolorprefix#2}%
% \else\ifcsname\??colorattribute#2\endcsname
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 63a5ef83e..84c5b9a2f 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -8,42 +8,63 @@ if not modules then modules = { } end modules ['node-fin'] = {
-- this module is being reconstructed
-- local functions, only slightly slower
+--
+-- leaders are also triggers ... see colo-ext for an example (negate a box)
local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
-local copy_node = node.copy
-local find_tail = node.slide
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getleader = nuts.getleader
+local getattr = nuts.getattr
+
+local copy_node = nuts.copy
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
-local pdfliteral_code = whatcodes.pdfliteral
+local pdfliteral_code = whatcodes.pdfliteral
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
+local implement = interfaces.implement
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
-- these two will be like trackers
function states.enabletriggering () triggering = true end
function states.disabletriggering() triggering = false end
+implement { name = "enablestatetriggering", actions = states.enabletriggering }
+implement { name = "disablestatetriggering", actions = states.disabletriggering }
+
nodes.plugindata = nil
-- inheritance: -0x7FFFFFFF -- we can best use nil and skip !
@@ -102,14 +123,17 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- the injectors
+-- for the moment:
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local function copied(n)
+ return copy_node(tonut(n))
+end
+
+-- the injectors
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
-local nsbegin, nsend
+local nsbegin, nsend, nsreset
function states.initialize(namespace,attribute,head)
nsdata = namespace.data
@@ -123,6 +147,7 @@ function states.initialize(namespace,attribute,head)
done = false -- todo: done cleanup
nsstep = namespace.resolve_step
if nsstep then
+ nsreset = namespace.resolve_reset
nsbegin = namespace.resolve_begin
nsend = namespace.resolve_end
nspush = namespace.push
@@ -132,23 +157,24 @@ end
function states.finalize(namespace,attribute,head) -- is this one ok?
if current > 0 and nsnone then
- local id = head.id
+ head = tonut(head)
+ local id = getid(head)
if id == hlist_code or id == vlist_code then
- local list = head.list
+ local list = getlist(head)
if list then
- head.list = insert_node_before(list,list,copy_node(nsnone))
+ list = insert_node_before(list,list,copied(nsnone)) -- two return values
+ setfield(head,"list",list)
end
else
- head = insert_node_before(head,head,copy_node(nsnone))
+ head = insert_node_before(head,head,copied(nsnone))
end
- return head, true, true
+ return tonode(head), true, true
end
return head, false, false
end
--- disc nodes can be ignored
-- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+-- if id == glyph_code or (id == whatsit_code and getsubtype(stack) == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
local function process(namespace,attribute,head,inheritance,default) -- one attribute
local stack = head
@@ -156,53 +182,59 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
+ -- elseif id == disc_code then
+ -- check = true -- no longer needed as we flatten replace
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- begin nested --
- local ok
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ local list, ok = process(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
-- much faster this way than using a check() and nested() function
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+ head = insert_node_before(head,stack,copied(nsdata[c]))
current = c
done = true
end
if leader then
local savedcurrent = current
- local ci = leader.id
+ local ci = getid(leader)
if ci == hlist_code or ci == vlist_code then
-- else we reset inside a box unneeded, okay, the downside is
-- that we trigger color in each repeated box, so there is room
@@ -210,41 +242,48 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
current = 0
end
-- begin nested --
- local ok = false
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
+ local list, ok = process(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested --
- done = done or ok
current = savedcurrent
leader = false
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ head = insert_node_before(head,stack,copied(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current = 0
done = true
end
check = false
end
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.process = process
+states.process = function(namespace,attribute,head,default)
+ local head, done = process(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- we can force a selector, e.g. document wide color spaces, saves a little
-- watch out, we need to check both the selector state (like colorspace) and
@@ -258,93 +297,105 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
local check = false
local leader = nil
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
+ -- elseif id == disc_code then
+ -- check = true -- no longer needed as we flatten replace
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ local list, ok = selective(namespace,attribute,content,inheritance,outer)
+ setfield(stack,"list",list)
+ done = done or ok
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(namespace,attribute,content,inheritance,default)
+ setfield(stack,"list",list)
+ done = done or ok
end
-- end nested
- done = done or ok
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local c = stack[attribute]
+ local c = getattr(stack,attribute)
if c then
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
else
- local s = stack[nsselector]
+ local s = getattr(stack,nsselector)
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = c
current_selector = s
done = true
end
end
if leader then
- local ok = false
-- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
+ if nstrigger and getattr(stack,nstrigger) then
+ local outer = getatribute(stack,attribute)
if outer ~= inheritance then
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
+ local list, ok = selective(namespace,attribute,leader,inheritance,outer)
+ setfield(stack,"leader",list)
+ done = done or ok
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ setfield(stack,"leader",list)
+ done = done or ok
end
-- end nested
- done = done or ok
leader = false
end
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
return head, done
end
-states.selective = selective
+states.selective = function(namespace,attribute,head,default)
+ local head, done = selective(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- Ideally the next one should be merged with the previous but keeping it separate is
-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
@@ -363,135 +414,145 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local check = false
local leader = false
while stack do
- local id = stack.id
+ local id = getid(stack)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = stack.leader
+ leader = getleader(stack)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = stack.list
+ local content = getlist(stack)
if content then
-- the problem is that broken lines gets the attribute which can be a later one
if nslistwise then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
- current, done = a, true
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
- stack.list = stacked(namespace,attribute,content,current)
- head, stack = insert_node_after(head,stack,copy_node(nsnone))
+ current = a
+ head = insert_node_before(head,stack,copied(nsdata[a]))
+ local list = stacked(namespace,attribute,content,current) -- two return values
+ setfield(stack,"list",list)
+ done = true
+ head, stack = insert_node_after(head,stack,copied(nsnone))
current = p
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"list",list) -- only if ok
done = done or ok
end
end
elseif id == rule_code then
- check = stack.width ~= 0
+ check = getfield(stack,"width") ~= 0
end
if check then
- local a = stack[attribute]
+ local a = getattr(stack,attribute)
if a then
if current ~= a then
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ head = insert_node_before(head,stack,copied(nsdata[a]))
depth = depth + 1
current, done = a, true
end
if leader then
- local ok = false
- stack.leader, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(namespace,attribute,content,current)
+ setfield(stack,"leader",list) -- only if ok
done = done or ok
leader = false
end
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
+ head = insert_node_before(head,stack,copied(nsnone))
depth = depth - 1
current, done = 0, true
end
check = false
end
-
- stack = stack.next
+ stack = getnext(stack)
end
while depth > 0 do
- head = insert_node_after(head,stack,copy_node(nsnone))
+ head = insert_node_after(head,stack,copied(nsnone))
depth = depth - 1
end
return head, done
end
-states.stacked = stacked
+states.stacked = function(namespace,attribute,head,default)
+ local head, done = stacked(namespace,attribute,tonut(head),default)
+ return tonode(head), done
+end
-- experimental
local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
- nsbegin()
+
+-- nsbegin()
+ local stacked = false
+
local current = head
local previous = head
local done = false
- local okay = false
local attrib = default or unsetvalue
local check = false
local leader = false
+
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
check = true
elseif id == glue_code then
- leader = current.leader
+ leader = getleader(current)
if leader then
check = true
end
elseif id == hlist_code or id == vlist_code then
- local content = current.list
+ local content = getlist(current)
if not content then
-- skip
elseif nslistwise then
- local a = current[attribute]
+ local a = getattr(current,attribute)
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+ head = insert_node_before(head,current,copied(nsdata[a]))
+ local list = stacker(namespace,attribute,content,a)
+ setfield(current,"list",list)
done = true
- head = insert_node_before(head,current,copy_node(nsdata[a]))
- current.list = stacker(namespace,attribute,content,a)
- head, current = insert_node_after(head,current,copy_node(nsnone))
+ head, current = insert_node_after(head,current,copied(nsnone))
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,attrib)
+ local list, ok = stacker(namespace,attribute,content,attrib)
+ setfield(current,"list",list)
done = done or ok
end
else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,default)
+ local list, ok = stacker(namespace,attribute,content,default)
+ setfield(current,"list",list)
done = done or ok
end
elseif id == rule_code then
- check = current.width ~= 0
+ check = getfield(current,"width") ~= 0
end
if check then
- local a = current[attribute] or unsetvalue
+ local a = getattr(current,attribute) or unsetvalue
if a ~= attrib then
+ if not stacked then
+ stacked = true
+ nsbegin()
+ end
local n = nsstep(a)
if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,n) -- a
+ head = insert_node_before(head,current,tonut(n)) -- a
end
- attrib, done, okay = a, true, true
+ attrib, done = a, true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local _, ok = stacker(namespace,attribute,leader,attrib)
+ local list, ok = stacker(namespace,attribute,leader,attrib)
done = done or ok
leader = false
end
@@ -500,20 +561,27 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
previous = current
- current = current.next
+ current = getnext(current)
end
- if okay then
- local n = nsend()
- if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,n)
- end
+
+if stacked then
+
+ local n = nsend()
+ while n do
+ head = insert_node_after(head,previous,tonut(n))
+ n = nsend()
end
+
+end
+
return head, done
end
-states.stacker = stacker
+states.stacker = function(namespace,attribute,head,default)
+ local head, done = stacker(namespace,attribute,tonut(head),default)
+ nsreset()
+ return tonode(head), done
+end
-- -- --
diff --git a/tex/context/base/node-fin.mkiv b/tex/context/base/node-fin.mkiv
index 2eb033fc1..7c95699dd 100644
--- a/tex/context/base/node-fin.mkiv
+++ b/tex/context/base/node-fin.mkiv
@@ -23,12 +23,12 @@
% we might have two variants at some point (efficiency)
-\unexpanded\def\finalizeobjectbox #1{\ctxcommand{finalizebox(\number#1)}}
-\unexpanded\def\finalizeshipoutbox#1{\ctxcommand{finalizebox(\number#1)}}
+\unexpanded\def\finalizeobjectbox #1{\clf_finalizebox#1\relax}
+\unexpanded\def\finalizeshipoutbox#1{\clf_finalizebox#1\relax}
% Experimental (for Aditya):
-\unexpanded\def\cleanupbox#1{\ctxcommand{cleanupbox(\number#1)}}
+\unexpanded\def\cleanupbox#1{\clf_cleanupbox#1\relax}
% Tricky stuff: this might become obsolete.
@@ -61,12 +61,12 @@
\stopinheritattributes}
\def\enableattributeinheritance
- {\ctxlua{attributes.states.enabletriggering()}%
+ {\clf_enablestatetriggering
\let\attributedcopy\doattributedcopy
\let\attributedbox \doattributedbox}
\def\disableattributeinheritance
- {\ctxlua{attributes.states.disabletriggering()}%
+ {\clf_disablestatetriggering
\let\attributedcopy\copy
\let\attributedbox \box}
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index 2f59d513c..774a68718 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -13,8 +13,11 @@ local concat, keys = table.concat, table.keys
local nodes, node, fonts = nodes, node, fonts
-local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
-local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end)
+local trace_characters = false trackers .register("nodes.characters", function(v) trace_characters = v end)
+local trace_fontrun = false trackers .register("nodes.fontrun", function(v) trace_fontrun = v end)
+
+local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end)
+local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end)
local report_fonts = logs.reporter("fonts","processing")
@@ -23,12 +26,27 @@ local fontdata = fonthashes.identifiers
local otf = fonts.handlers.otf
-local traverse_id = node.traverse_id
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
+
local nodecodes = nodes.nodecodes
local handlers = nodes.handlers
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfield = nuts.getfield
+
+local traverse_id = nuts.traverse_id
+
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
@@ -99,68 +117,153 @@ fonts.hashes.processes = fontprocesses
-- inside a run which means that we need to keep track of this which in turn complicates matters
-- in a way i don't like
+-- we need to deal with the basemode fonts here and can only run over ranges as we
+-- otherwise get luatex craches due to all kind of asserts in the disc/lig builder
+
+local ligaturing = node.ligaturing
+local kerning = node.kerning
+
function handlers.characters(head)
-- either next or not, but definitely no already processed list
starttiming(nodes)
- local usedfonts, attrfonts = { }, { }
- local a, u, prevfont, prevattr, done = 0, 0, nil, 0, false
+
+ local usedfonts = { }
+ local attrfonts = { }
+ local basefonts = { }
+ local a, u, b = 0, 0, 0
+ local basefont = nil
+ local prevfont = nil
+ local prevattr = 0
+ local done = false
+
if trace_fontrun then
run = run + 1
report_fonts()
report_fonts("checking node list, run %s",run)
report_fonts()
- local n = head
+ local n = tonut(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local font = n.font
- local attr = n[0] or 0
- report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,n.char)
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0
+ report_fonts("font %03i, dynamic %03i, glyph %C",font,attr,getchar(n))
elseif id == disc_code then
report_fonts("[disc] %s",nodes.listtoutf(n,true,false,n))
else
report_fonts("[%s]",nodecodes[id])
end
- n = n.next
+ n = getnext(n)
end
end
- for n in traverse_id(glyph_code,head) do
- -- if n.subtype<256 then -- all are 1
- local font = n.font
- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
- if font ~= prevfont or attr ~= prevattr then
- if attr > 0 then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
+
+ local nuthead = tonut(head)
+
+ for n in traverse_id(glyph_code,nuthead) do
+ if getsubtype(n) < 256 then -- all are 1
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ if font ~= prevfont or attr ~= prevattr then
+ if basefont then
+ basefont[2] = tonode(getprev(n)) -- todo, save p
end
- if not used[attr] then
- local fd = setfontdynamics[font]
- if fd then
- used[attr] = fd[attr]
- a = a + 1
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
+ elseif force_basepass then
+ b = b + 1
+ basefont = { tonode(n), nil }
+ basefonts[b] = basefont
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ local fp = fontprocesses[font]
+ if fp then
+ usedfonts[font] = fp
+ u = u + 1
+ elseif force_basepass then
+ b = b + 1
+ basefont = { tonode(n), nil }
+ basefonts[b] = basefont
+ end
end
end
- else
- local used = usedfonts[font]
- if not used then
- local fp = fontprocesses[font]
- if fp then
- usedfonts[font] = fp
- u = u + 1
- end
+ prevfont = font
+ prevattr = attr
+ end
+ end
+ end
+
+ -- could be an optional pass : seldom needed, only for documentation as a discretionary
+ -- with pre/post/replace will normally not occur on it's own
+
+ if force_discrun then
+
+ -- basefont is not supported in disc only runs ... it would mean a lot of
+ -- ranges .. we could try to run basemode as a separate processor run but
+ -- not for now (we can consider it when the new node code is tested
+
+ -- local prevfont = nil
+ -- local prevattr = 0
+
+ for d in traverse_id(disc_code,nuthead) do
+ -- we could use first_glyph
+ local r = getfield(n,"replace") -- good enough
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ if getsubtype(n) < 256 then -- all are 1
+ local font = getfont(n)
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ if font ~= prevfont or attr ~= prevattr then
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ local fp = fontprocesses[font]
+ if fp then
+ usedfonts[font] = fp
+ u = u + 1
+ end
+ end
+ end
+ prevfont = font
+ prevattr = attr
+ end
+ end
+ break
end
end
- prevfont = font
- prevattr = attr
end
- -- end
+
end
+
if trace_fontrun then
report_fonts()
- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
+ report_fonts("statics : %s",u > 0 and concat(keys(usedfonts)," ") or "none")
+ report_fonts("dynamics: %s",a > 0 and concat(keys(attrfonts)," ") or "none")
+ report_fonts("built-in: %s",b > 0 and b or "none")
report_fonts()
end
-- in context we always have at least 2 processors
@@ -212,6 +315,49 @@ function handlers.characters(head)
end
end
end
+ if b == 0 then
+ -- skip
+ elseif b == 1 then
+ -- only one font
+ local front = head == start
+ local range = basefonts[1]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then -- safeguard
+ start = ligaturing(start)
+ start = kerning(start)
+ else
+ -- something bad happened
+ end
+ if front then
+ -- shouldn't happen
+ head = start
+ end
+ else
+ -- multiple fonts
+ local front = head == start
+ for i=1,b do
+ local range = basefonts[i]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then -- safeguard
+ start = ligaturing(start)
+ start = kerning(start)
+ else
+ -- something bad happened
+ end
+ end
+ if front then
+ -- shouldn't happen
+ head = start
+ end
+ end
stoptiming(nodes)
if trace_characters then
nodes.report(head,done)
@@ -280,7 +426,9 @@ end
-- return false
-- end
-- end)
-
+--
+-- -- TODO: basepasses!
+--
-- function handlers.characters(head)
-- -- either next or not, but definitely no already processed list
-- starttiming(nodes)
@@ -391,5 +539,18 @@ end
-- return head, true
-- end
-handlers.protectglyphs = node.protect_glyphs
-handlers.unprotectglyphs = node.unprotect_glyphs
+local d_protect_glyphs = nuts.protect_glyphs
+local d_unprotect_glyphs = nuts.unprotect_glyphs
+
+handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
+handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
+
+-- function handlers.protectglyphs(h)
+-- local h = tonut(h)
+-- for n in traverse_id(disc_code,h) do
+-- local d = getfield(n,"pre") if d then d_protect_glyphs(d) end
+-- local d = getfield(n,"post") if d then d_protect_glyphs(d) end
+-- local d = getfield(n,"replace") if d then d_protect_glyphs(d) end
+-- end
+-- return d_protect_glyphs(h)
+-- end
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 652b46caf..02d4c7a3f 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -154,9 +154,9 @@ local disccodes = allocate {
[0] = "discretionary", -- \discretionary
[1] = "explicit", -- \-
[2] = "automatic", -- following a -
- [3] = "regular", -- simple
- [4] = "first", -- hard first item
- [5] = "second", -- hard second item
+ [3] = "regular", -- by hyphenator: simple
+ [4] = "first", -- by hyphenator: hard first item
+ [5] = "second", -- by hyphenator: hard second item
}
local accentcodes = allocate {
@@ -206,7 +206,7 @@ nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more offici
nodes.listcodes = listcodes
nodes.glyphcodes = glyphcodes
nodes.kerncodes = kerncodes
-nodes.penaltycodes = kerncodes
+nodes.penaltycodes = penaltycodes
nodes.mathcodes = mathcodes
nodes.fillcodes = fillcodes
nodes.margincodes = margincodes
@@ -220,6 +220,8 @@ listcodes.column = listcodes.alignment
kerncodes.italiccorrection = kerncodes.userkern
kerncodes.kerning = kerncodes.fontkern
+whatcodes.textdir = whatcodes.dir
+
nodes.codes = allocate { -- mostly for listing
glue = skipcodes,
noad = noadcodes,
diff --git a/tex/context/base/node-ini.mkiv b/tex/context/base/node-ini.mkiv
index e99653327..d04e647de 100644
--- a/tex/context/base/node-ini.mkiv
+++ b/tex/context/base/node-ini.mkiv
@@ -19,10 +19,9 @@
\registerctxluafile{node-ini}{1.001}
\registerctxluafile{node-met}{1.001}
-
-\ctxlua{if nodes.gonuts then context.registerctxluafile("node-nut","1.001") end}
-
+\registerctxluafile{node-nut}{1.001}
\registerctxluafile{node-res}{1.001}
+\registerctxluafile{node-ppt}{1.001} % experimental
\registerctxluafile{node-dir}{1.001}
\registerctxluafile{node-aux}{1.001}
\registerctxluafile{node-tst}{1.001}
@@ -36,6 +35,8 @@
\registerctxluafile{node-acc}{1.001} % experimental
%registerctxluafile{node-prp}{1.001} % makes no sense (yet)
+\doifelsefile{node-ppt.lua}{\registerctxluafile{node-ppt}{1.001}}{}
+
\newcount\c_node_tracers_show_box % box number
\unexpanded\def\shownextnodes{\afterassignment\node_tracers_show_next\c_node_tracers_show_box}
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index ae48150a6..402403529 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -8,10 +8,9 @@ if not modules then modules = { } end modules ['node-inj'] = {
-- This is very experimental (this will change when we have luatex > .50 and
-- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
+-- test fonts. Some optimizations can go away when we have faster machines.
--- todo: make a special one for context
+-- todo: ignore kerns between disc and glyph
local next = next
local utfchar = utf.char
@@ -31,12 +30,30 @@ local injections = nodes.injections
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
-local nodepool = nodes.pool
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
local newkern = nodepool.kern
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local a_kernpair = attributes.private('kernpair')
local a_ligacomp = attributes.private('ligacomp')
@@ -47,6 +64,8 @@ local a_cursbase = attributes.private('cursbase')
local a_curscurs = attributes.private('curscurs')
local a_cursdone = attributes.private('cursdone')
+local unsetvalue = attributes.unsetvalue
+
-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
@@ -67,12 +86,39 @@ local kerns = { }
-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
-- checking with husayni (volt and fontforge).
+function injections.reset(n)
+-- if getattr(n,a_kernpair) then
+-- setattr(n,a_kernpair,unsetvalue)
+-- end
+-- if getattr(n,a_markdone) then
+-- setattr(n,a_markbase,unsetvalue)
+-- setattr(n,a_markmark,unsetvalue)
+-- setattr(n,a_markdone,unsetvalue)
+-- end
+-- if getattr(n,a_cursdone) then
+-- setattr(n,a_cursbase,unsetvalue)
+-- setattr(n,a_curscurs,unsetvalue)
+-- setattr(n,a_cursdone,unsetvalue)
+-- end
+-- if getattr(n,a_ligacomp) then
+-- setattr(n,a_ligacomp,unsetvalue)
+-- end
+end
+
+function injections.setligaindex(n,index)
+ setattr(n,a_ligacomp,index)
+end
+
+function injections.getligaindex(n,default)
+ return getattr(n,a_ligacomp) or default
+end
+
function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
local ws, wn = tfmstart.width, tfmnext.width
local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
+ setattr(start,a_cursbase,bound)
+ setattr(nxt,a_curscurs,bound)
cursives[bound] = { rlmode, dx, dy, ws, wn }
return dx, dy, bound
end
@@ -81,14 +127,14 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
-- dy = y - h
if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
+ local bound = getattr(current,a_kernpair)
if bound then
local kb = kerns[bound]
-- inefficient but singles have less, but weird anyway, needs checking
kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
else
bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
end
return x, y, w, h, bound
@@ -100,7 +146,7 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
local dx = factor*x
if dx ~= 0 then
local bound = #kerns + 1
- current[a_kernpair] = bound
+ setattr(current,a_kernpair,bound)
kerns[bound] = { rlmode, dx }
return dx, bound
else
@@ -108,9 +154,9 @@ function injections.setkern(current,factor,rlmode,x,tfmchr)
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
+function injections.setmark(start,base,factor,rlmode,ba,ma) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ local bound = getattr(base,a_markbase)
local index = 1
if bound then
local mb = marks[bound]
@@ -118,20 +164,19 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark) --
-- if not index then index = #mb + 1 end
index = #mb + 1
mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
return dx, dy, bound
else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
end
end
--- index = index or 1
index = index or 1
bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode, baseismark } }
+ setattr(base,a_markbase,bound)
+ setattr(start,a_markmark,bound)
+ setattr(start,a_markdone,index)
+ marks[bound] = { [index] = { dx, dy, rlmode } }
return dx, dy, bound
end
@@ -142,15 +187,15 @@ end
local function trace(head)
report_injections("begin run")
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if getsubtype(n) < 256 then
+ local kp = getattr(n,a_kernpair)
+ local mb = getattr(n,a_markbase)
+ local mm = getattr(n,a_markmark)
+ local md = getattr(n,a_markdone)
+ local cb = getattr(n,a_cursbase)
+ local cc = getattr(n,a_curscurs)
+ local char = getchar(n)
+ report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
if kp then
local k = kerns[kp]
if k[3] then
@@ -198,22 +243,24 @@ local function show_result(head)
local current = head
local skipping = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
skipping = false
elseif id == kern_code then
- report_injections("kern: %p",current.kern)
+ report_injections("kern: %p",getfield(current,"kern"))
skipping = false
elseif not skipping then
report_injections()
skipping = true
end
- current = current.next
+ current = getnext(current)
end
end
function injections.handler(head,where,keep)
+ head = tonut(head)
local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
if has_marks or has_cursives then
if trace_injections then
@@ -224,17 +271,18 @@ function injections.handler(head,where,keep)
if has_kerns then -- move outside loop
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
- local k = n[a_kernpair]
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
@@ -254,15 +302,16 @@ function injections.handler(head,where,keep)
else
local nf, tm = nil, nil
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
+ if getsubtype(n) < 256 then
nofvalid = nofvalid + 1
valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
end
if tm then
- mk[n] = tm[n.char]
+ mk[n] = tm[getchar(n)]
end
end
end
@@ -272,7 +321,7 @@ function injections.handler(head,where,keep)
local cx = { }
if has_kerns and next(ky) then
for n, k in next, ky do
- n.yoffset = k
+ setfield(n,"yoffset",k)
end
end
-- todo: reuse t and use maxt
@@ -283,9 +332,9 @@ function injections.handler(head,where,keep)
for i=1,nofvalid do -- valid == glyphs
local n = valid[i]
if not mk[n] then
- local n_cursbase = n[a_cursbase]
+ local n_cursbase = getattr(n,a_cursbase)
if p_cursbase then
- local n_curscurs = n[a_curscurs]
+ local n_curscurs = getattr(n,a_curscurs)
if p_cursbase == n_curscurs then
local c = cursives[n_curscurs]
if c then
@@ -310,20 +359,20 @@ function injections.handler(head,where,keep)
end
end
elseif maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ti.yoffset + ny
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny)
end
maxt = 0
end
if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset")
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny) -- maybe add to current yoffset
end
maxt = 0
end
@@ -331,11 +380,11 @@ function injections.handler(head,where,keep)
end
end
if maxt > 0 then
- local ny = n.yoffset
+ local ny = getfield(n,"yoffset") -- hm, n unset ?
for i=maxt,1,-1 do
ny = ny + d[i]
local ti = t[i]
- ti.yoffset = ny
+ setfield(ti,"yoffset",ny)
end
maxt = 0
end
@@ -346,57 +395,83 @@ function injections.handler(head,where,keep)
if has_marks then
for i=1,nofvalid do
local p = valid[i]
- local p_markbase = p[a_markbase]
+ local p_markbase = getattr(p,a_markbase)
if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,getnext(p)) do
+ local n_markmark = getattr(n,a_markmark)
if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
+ local index = getattr(n,a_markdone) or 1
local d = mrks[index]
if d then
local rlmode = d[3]
--
local k = wx[p]
+ local px = getfield(p,"xoffset")
+ local ox = 0
if k then
local x = k[2]
local w = k[4]
if w then
if rlmode and rlmode >= 0 then
-- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ ox = px - getfield(p,"width") + d[1] - (w-x)
+ -- report_injections("l2r case 1: %p",ox)
else
-- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 1: %p",ox)
end
else
if rlmode and rlmode >= 0 then
-- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - getfield(p,"width") + d[1]
+ -- report_injections("r2l case 2: %p",ox)
else
-- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
+ ox = px - d[1] - x
+ -- report_injections("r2l case 2: %p",ox)
end
end
else
+ -- if rlmode and rlmode >= 0 then
+ -- ox = px - getfield(p,"width") + d[1]
+ -- -- report_injections("l2r case 3: %p",ox)
+ -- else
+ -- ox = px - d[1]
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- end
+ --
+ -- we need to deal with fonts that have marks with width
+ --
+ local wp = getfield(p,"width")
+ local wn = getfield(n,"width") -- in arial marks have widths
if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
+ ox = px - wp + d[1]
+ -- report_injections("l2r case 3: %p",ox)
else
- n.xoffset = p.xoffset - d[1]
+ ox = px - d[1]
+ -- report_injections("r2l case 3: %p",ox)
end
- local w = n.width
- if w ~= 0 then
- insert_node_before(head,n,newkern(-w/2))
- insert_node_after(head,n,newkern(-w/2))
+ if wn ~= 0 then
+ -- bad: we should center
+ insert_node_before(head,n,newkern(-wn/2))
+ insert_node_after(head,n,newkern(-wn/2))
+ -- wx[n] = { 0, -wn/2, 0, -wn }
end
+ -- so far
end
- -- --
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
if mk[p] then
- n.yoffset = p.yoffset + d[2]
+ oy = py + d[2]
else
- n.yoffset = n.yoffset + p.yoffset + d[2]
+ oy = getfield(n,"yoffset") + py + d[2]
end
+ setfield(n,"yoffset",oy)
--
if nofmarks == 1 then
break
@@ -404,6 +479,8 @@ function injections.handler(head,where,keep)
nofmarks = nofmarks - 1
end
end
+ elseif not n_markmark then
+ break -- HH: added 2013-09-12: no need to deal with non marks
else
-- KE: there can be sequences in ligatures
end
@@ -465,7 +542,7 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
elseif not keep then
kerns, cursives, marks = { }, { }, { }
end
@@ -474,14 +551,14 @@ function injections.handler(head,where,keep)
trace(head)
end
for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
+ if getsubtype(n) < 256 then
+ local k = getattr(n,a_kernpair)
if k then
local kk = kerns[k]
if kk then
local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
+ setfield(n,"yoffset",y) -- todo: h ?
end
if w then
-- copied from above
@@ -518,9 +595,9 @@ function injections.handler(head,where,keep)
-- if trace_injections then
-- show_result(head)
-- end
- return head, true
+ return tonode(head), true
else
-- no tracing needed
end
- return head, false
+ return tonode(head), false
end
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index c52e001df..e4956f7df 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -18,7 +18,6 @@ if not modules then modules = { } end modules ['node-par'] = {
-- todo: add a couple of plugin hooks
-- todo: maybe split expansion code paths
-- todo: fix line numbers (cur_list.pg_field needed)
--- todo: make kerns stretch an option and disable it by default (definitely not shrink)
-- todo: check and improve protrusion
-- todo: arabic etc (we could use pretty large scales there) .. marks and cursive
@@ -73,7 +72,8 @@ if not modules then modules = { } end modules ['node-par'] = {
To be honest, I slowly start to grasp the magic here as normally I start from scratch when implementing
something (as it's the only way I can understand things). This time I had a recently acquired stack of
- Porcupine Tree disks to get me through.
+ Porcupine Tree disks to get me through, although I must admit that watching their dvd's is more fun
+ than coding.
Picking up this effort was inspired by discussions between Luigi Scarso and me about efficiency of Lua
code and we needed some stress tests to compare regular LuaTeX and LuajitTeX. One of the tests was
@@ -121,6 +121,13 @@ if not modules then modules = { } end modules ['node-par'] = {
is enabled, but in the Lua variant the extra overhead is way less significant. This means that when we
retrofit the same approach into the core, the overhead of expansion can be sort of nilled.
+ In 2013 the expansion factor method became also used at the TeX end so then I could complete the code
+ here, and indeed, expansions works quite well now (not compatible of course because we use floats at the
+ Lua end. The Lua base variant is still slower but quite ok, especially if we go nuts.
+
+ A next iteration will provide plug-ins and more control. I will also explore the possibility to avoid the
+ redundant hpack calculations (easier now, although I've only done some quick and dirty experiments.)
+
]]--
local utfchar = utf.char
@@ -180,22 +187,38 @@ local chardata = fonthashes.characters
local quaddata = fonthashes.quads
local parameters = fonthashes.parameters
-local slide_nodes = node.slide
-local new_node = node.new
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local flush_node = node.free
-local flush_node_list = node.flush_list
-local hpack_nodes = node.hpack
-local xpack_nodes = node.hpack
-local replace_node = nodes.replace
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local traverse_by_id = node.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+
+local slide_nodelist = nuts.slide -- get rid of this, probably ok > 78.2
+local find_tail = nuts.tail
+local new_node = nuts.new
+local copy_node = nuts.copy
+local copy_nodelist = nuts.copy_list
+local flush_node = nuts.free
+local flush_nodelist = nuts.flush_list
+local hpack_nodes = nuts.hpack
+local xpack_nodes = nuts.hpack
+local replace_node = nuts.replace
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local traverse_by_id = nuts.traverse_id
local setnodecolor = nodes.tracers.colors.set
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -287,7 +310,8 @@ local glyphdir_is_equal = nodes.glyphdir_is_equal
local dir_pops = nodes.dir_is_pop
local dir_negations = nodes.dir_negation
-local is_skipable = node.protrusion_skippable
+local is_skipable = nuts.protrusion_skippable
+
local a_fontkern = attributes.private('fontkern')
-- helpers --
@@ -308,12 +332,12 @@ local function checked_line_dir(stack,current)
local n = stack.n + 1
stack.n = n
stack[n] = current
- return current.dir
+ return getfield(current,"dir")
elseif n > 0 then
local n = stack.n
local dirnode = stack[n]
dirstack.n = n - 1
- return dirnode.dir
+ return getfield(dirnode,"dir")
else
report_parbuilders("warning: missing pop node (%a)",1) -- in line ...
end
@@ -328,8 +352,8 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
local n = stack.n
local h = nil
while start and start ~= stop do
- if start.id == whatsit_code and start.subtype == dir_code then
- if not dir_pops[start.dir] then
+ if getid(start) == whatsit_code and getsubtype(start) == dir_code then
+ if not dir_pops[getfield(start,"dir")] then -- weird, what is this #
n = n + 1
stack[n] = start
elseif n > 0 then
@@ -338,10 +362,10 @@ local function inject_dirs_at_end_of_line(stack,current,start,stop)
report_parbuilders("warning: missing pop node (%a)",2) -- in line ...
end
end
- start = start.next
+ start = getnext(start)
end
for i=n,1,-1 do
- h, current = insert_node_after(current,current,new_dir(dir_negations[stack[i].dir]))
+ h, current = insert_node_after(current,current,new_dir(dir_negations[getfield(stack[i],"dir")]))
end
stack.n = n
return current
@@ -382,22 +406,8 @@ end
-- resolvers --
-local whatsiters = {
- get_width = { },
- get_dimensions = { },
-}
-
-local get_whatsit_width = whatsiters.get_width
-local get_whatsit_dimensions = whatsiters.get_dimensions
-
-local function get_width (n) return n.width end
-local function get_dimensions(n) return n.width, n.height, n.depth end
-
-get_whatsit_width[pdfrefximage_code] = get_width
-get_whatsit_width[pdfrefxform_code ] = get_width
-
-get_whatsit_dimensions[pdfrefximage_code] = get_dimensions
-get_whatsit_dimensions[pdfrefxform_code ] = get_dimensions
+local get_whatsit_width = nodes.whatsitters.getters.width
+local get_whatsit_dimensions = nodes.whatsitters.getters.dimensions
-- expansion etc --
@@ -414,13 +424,13 @@ end
local function check_shrinkage(par,n)
-- called often, so maybe move inline -- use NORMAL
- if n.shrink_order ~= 0 and n.shrink ~= 0 then
+ if getfield(n,"shrink_order") ~= 0 and getfield(n,"shrink") ~= 0 then
if par.no_shrink_error_yet then
par.no_shrink_error_yet = false
report_parbuilders("infinite glue shrinkage found in a paragraph and removed")
end
n = copy_node(n)
- n.shrink_order = 0
+ setfield(n,"shrink_order",0)
end
return n
end
@@ -467,48 +477,10 @@ setmetatableindex(expansions,function(t,font) -- we can store this in tfmdata if
end
end)
--- local function char_stretch_shrink(p)
--- local data = expansions[p.font][p.char]
--- if data then
--- return data.glyphstretch, data.glyphshrink
--- else
--- return 0, 0
--- end
--- end
---
--- local cal_margin_kern_var = char_stretch_shrink
-
--- local function kern_stretch_shrink(p,d)
--- local l = p.prev
--- if l and l.id == glyph_code then -- how about disc nodes?
--- local r = p.next
--- if r and r.id == glyph_code then
--- local lf, rf = l.font, r.font
--- if lf == rf then
--- local data = expansions[lf][l.char]
--- if data then
--- local stretch = data.stretch
--- local shrink = data.shrink
--- if stretch ~= 0 then
--- -- stretch = data.factor * (d * stretch - d)
--- stretch = data.factor * d * (stretch - 1)
--- end
--- if shrink ~= 0 then
--- -- shrink = data.factor * (d * shrink - d)
--- shrink = data.factor * d * (shrink - 1)
--- end
--- return stretch, shrink
--- end
--- end
--- end
--- end
--- return 0, 0
--- end
-
local function kern_stretch_shrink(p,d)
- local left = p.prev
- if left and left.id == glyph_code then -- how about disc nodes?
- local data = expansions[left.font][left.char]
+ local left = getprev(p)
+ if left and getid(left) == glyph_code then -- how about disc nodes?
+ local data = expansions[getfont(left)][getchar(left)]
if data then
local stretch = data.stretch
local shrink = data.shrink
@@ -526,14 +498,8 @@ local function kern_stretch_shrink(p,d)
return 0, 0
end
--- local function kern_stretch_shrink(p,d)
--- -- maybe make it an option in luatex where we also need to check for attribute fontkern but in general
--- -- it makes no sense to scale kerns
--- return 0, 0
--- end
-
local expand_kerns = false
--- local expand_kerns = "both"
+----- expand_kerns = "both"
directives.register("builders.paragraphs.adjusting.kerns",function(v)
if not v then
@@ -623,18 +589,18 @@ end
local function find(head) -- do we really want to recurse into an hlist?
while head do
- local id = head.id
+ local id = getid(head)
if id == glyph_code then
return head
elseif id == hlist_code then
- local found = find(head.list)
+ local found = find(getlist(head))
if found then
return found
else
- head = head.next
+ head = getnext(head)
end
elseif is_skipable(head) then
- head = head.next
+ head = getnext(head)
else
return head
end
@@ -643,38 +609,38 @@ local function find(head) -- do we really want to recurse into an hlist?
end
local function find_protchar_left(l) -- weird function
- local ln = l.next
- if ln and ln.id == hlist_code and not ln.list and ln.width == 0 and ln.height == 0 and ln.depth == 0 then
- l = l.next
+ local ln = getnext(l)
+ if ln and getid(ln) == hlist_code and not getlist(ln) and getfield(ln,"width") == 0 and getfield(ln,"height") == 0 and getfield(ln,"depth") == 0 then
+ l = getnext(l)
else -- if d then -- was always true
- local id = l.id
+ local id = getid(l)
while ln and not (id == glyph_code or id < math_code) do -- is there always a glyph?
l = ln
- ln = l.next
- id = ln.id
+ ln = getnext(l)
+ id = getid(ln)
end
end
- -- if l.id == glyph_code then
+ -- if getid(l) == glyph_code then
-- return l
-- end
return find(l) or l
end
local function find(head,tail)
- local tail = tail or slide_nodes(head)
+ local tail = tail or find_tail(head)
while tail do
- local id = tail.id
+ local id = getid(tail)
if id == glyph_code then
return tail
elseif id == hlist_code then
- local found = find(tail.list)
+ local found = find(getlist(tail))
if found then
return found
else
- tail = tail.prev
+ tail = getprev(tail)
end
elseif is_skipable(tail) then
- tail = tail.prev
+ tail = getprev(tail)
else
return tail
end
@@ -687,8 +653,8 @@ local function find_protchar_right(l,r)
end
local function left_pw(p)
- local font = p.font
- local prot = chardata[font][p.char].left_protruding
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].left_protruding
if not prot or prot == 0 then
return 0
end
@@ -696,8 +662,8 @@ local function left_pw(p)
end
local function right_pw(p)
- local font = p.font
- local prot = chardata[font][p.char].right_protruding
+ local font = getfont(p)
+ local prot = chardata[font][getchar(p)].right_protruding
if not prot or prot == 0 then
return 0
end
@@ -721,17 +687,17 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
local adjust_stretch = 0
local adjust_shrink = 0
while s do
- local id = s.id
+ local id = getid(s)
if id == glyph_code then
if is_rotated[line_break_dir] then -- can be shared
- size = size + s.height + s.depth
+ size = size + getfield(s,"height") + getfield(s,"depth")
else
- size = size + s.width
+ size = size + getfield(s,"width")
end
if checked_expansion then
- local data = checked_expansion[s.font]
+ local data = checked_expansion[getfont(s)]
if data then
- data = data[s.char]
+ data = data[getchar(s)]
if data then
adjust_stretch = adjust_stretch + data.glyphstretch
adjust_shrink = adjust_shrink + data.glyphshrink
@@ -739,16 +705,16 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
end
end
elseif id == hlist_code or id == vlist_code then
- if is_parallel[s.dir][line_break_dir] then
- size = size + s.width
+ if is_parallel[getfield(s,"dir")][line_break_dir] then
+ size = size + getfield(s,"width")
else
- size = size + s.depth + s.height
+ size = size + getfield(s,"height") + getfield(s,"depth")
end
elseif id == kern_code then
- local d = s.kern
- if d ~= 0 then
- if checked_expansion and expand_kerns and (s.subtype == kerning_code or s[a_fontkern]) then
- local stretch, shrink = kern_stretch_shrink(s,d)
+ local kern = getfield(s,"kern")
+ if kern ~= 0 then
+ if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(s,kern)
if expand_kerns == "stretch" then
adjust_stretch = adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -758,14 +724,14 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
adjust_shrink = adjust_shrink + shrink
end
end
- size = size + d
+ size = size + kern
end
elseif id == rule_code then
- size = size + s.width
- else
+ size = size + getfield(s,"width")
+ elseif trace_unsupported then
report_parbuilders("unsupported node at location %a",6)
end
- s = s.next
+ s = getnext(s)
end
return size, adjust_stretch, adjust_shrink
end
@@ -779,14 +745,14 @@ local function compute_break_width(par,break_type,p) -- split in two
local break_size = break_width.size + disc_width.size
local break_adjust_stretch = break_width.adjust_stretch + disc_width.adjust_stretch
local break_adjust_shrink = break_width.adjust_shrink + disc_width.adjust_shrink
- local replace = p.replace
+ local replace = getfield(p,"replace")
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
break_size = break_size - size
break_adjust_stretch = break_adjust_stretch - adjust_stretch
break_adjust_shrink = break_adjust_shrink - adjust_shrink
end
- local post = p.post
+ local post = getfield(p,"post")
if post then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,post)
break_size = break_size + size
@@ -797,56 +763,56 @@ local function compute_break_width(par,break_type,p) -- split in two
break_width.adjust_stretch = break_adjust_stretch
break_width.adjust_shrink = break_adjust_shrink
if not post then
- p = p.next
+ p = getnext(p)
else
return
end
end
while p do -- skip spacing etc
- local id = p.id
+ local id = getid(p)
if id == glyph_code then
return -- happens often
elseif id == glue_code then
- local spec = p.spec
- local order = stretch_orders[spec.stretch_order]
- break_width.size = break_width.size - spec.width
- break_width[order] = break_width[order] - spec.stretch
- break_width.shrink = break_width.shrink - spec.shrink
+ local spec = getfield(p,"spec")
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ break_width.size = break_width.size - getfield(spec,"width")
+ break_width[order] = break_width[order] - getfield(spec,"stretch")
+ break_width.shrink = break_width.shrink - getfield(spec,"shrink")
elseif id == penalty_code then
-- do nothing
elseif id == kern_code then
- if p.subtype == userkern_code then
- break_width.size = break_width.size - p.kern
+ if getsubtype(p) == userkern_code then
+ break_width.size = break_width.size - getfield(p,"kern")
else
return
end
elseif id == math_code then
- break_width.size = break_width.size - p.surround
+ break_width.size = break_width.size - getfield(p,"surround")
else
return
end
- p = p.next
+ p = getnext(p)
end
end
local function append_to_vlist(par, b)
local prev_depth = par.prev_depth
if prev_depth > par.ignored_dimen then
- if b.id == hlist_code then
- local d = par.baseline_skip.width - prev_depth - b.height -- deficiency of space between baselines
- local s = d < par.line_skip_limit and new_lineskip(tex.lineskip) or new_baselineskip(d)
+ if getid(b) == hlist_code then
+ local d = getfield(par.baseline_skip,"width") - prev_depth - getfield(b,"height") -- deficiency of space between baselines
+ local s = d < par.line_skip_limit and new_lineskip(par.lineskip) or new_baselineskip(d)
-- local s = d < par.line_skip_limit
-- if s then
-- s = new_lineskip()
- -- s.spec = tex.lineskip
+ -- setfield(s,"spec",tex.lineskip)
-- else
-- s = new_baselineskip(d)
-- end
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = s
- s.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",s)
+ setfield(s,"prev",n)
else
par.head_field = s
end
@@ -854,14 +820,14 @@ local function append_to_vlist(par, b)
end
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = b
- b.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
else
par.head_field = b
end
- if b.id == hlist_code then
- local pd = b.depth
+ if getid(b) == hlist_code then
+ local pd = getfield(b,"depth")
par.prev_depth = pd
texnest[texnest.ptr].prevdepth = pd
end
@@ -870,9 +836,9 @@ end
local function append_list(par, b)
local head_field = par.head_field
if head_field then
- local n = slide_nodes(head_field)
- n.next = b
- b.prev = n
+ local n = slide_nodelist(head_field) -- todo: find_tail
+ setfield(n,"next",b)
+ setfield(b,"prev",n)
else
par.head_field = b
end
@@ -884,14 +850,18 @@ end
local hztolerance = 2500
local hzwarned = false
+local function used_skip(s)
+ return s and (getfield(s,"width") ~= 0 or getfield(s,"stretch") ~= 0 or getfield(s,"shrink") ~= 0) and s or nil
+end
+
local function initialize_line_break(head,display)
local hang_indent = tex.hangindent or 0
local hsize = tex.hsize or 0
local hang_after = tex.hangafter or 0
local par_shape_ptr = tex.parshape
- local left_skip = tex.leftskip -- nodes
- local right_skip = tex.rightskip -- nodes
+ local left_skip = tonut(tex.leftskip) -- nodes
+ local right_skip = tonut(tex.rightskip) -- nodes
local pretolerance = tex.pretolerance
local tolerance = tex.tolerance
local adjust_spacing = tex.pdfadjustspacing
@@ -899,7 +869,7 @@ local function initialize_line_break(head,display)
local last_line_fit = tex.lastlinefit
local newhead = new_temp()
- newhead.next = head
+ setfield(newhead,"next",head)
local adjust_spacing_status = adjust_spacing > 1 and -1 or 0
@@ -966,13 +936,13 @@ local function initialize_line_break(head,display)
last_line_depth = tex.pdflastlinedepth or 0, -- this will go away
ignored_dimen = tex.pdfignoreddimen or 0, -- this will go away
- baseline_skip = tex.baselineskip or 0,
- lineskip = tex.lineskip or 0,
- line_skip_limit = tex.lineskiplimit or 0,
+ baseline_skip = tonut(tex.baselineskip),
+ lineskip = tonut(tex.lineskip),
+ line_skip_limit = tex.lineskiplimit,
prev_depth = texnest[texnest.ptr].prevdepth,
- final_par_glue = slide_nodes(head), -- todo: we know tail already, slow
+ final_par_glue = slide_nodelist(head), -- todo: we know tail already, slow
par_break_dir = tex.pardir,
line_break_dir = tex.pardir,
@@ -1041,6 +1011,13 @@ local function initialize_line_break(head,display)
}
+ -- optimizers
+
+ par.used_left_skip = used_skip(par.left_skip)
+ par.used_right_skip = used_skip(par.right_skip)
+
+ -- so far
+
if adjust_spacing > 1 then
local checked_expansion = { par = par }
setmetatableindex(checked_expansion,check_expand_pars)
@@ -1062,13 +1039,13 @@ local function initialize_line_break(head,display)
local l = check_shrinkage(par,left_skip)
local r = check_shrinkage(par,right_skip)
- local l_order = stretch_orders[l.stretch_order]
- local r_order = stretch_orders[r.stretch_order]
+ local l_order = stretch_orders[getfield(l,"stretch_order")]
+ local r_order = stretch_orders[getfield(r,"stretch_order")]
- background.size = l.width + r.width
- background.shrink = l.shrink + r.shrink
- background[l_order] = l.stretch
- background[r_order] = r.stretch + background[r_order]
+ background.size = getfield(l,"width") + getfield(r,"width")
+ background.shrink = getfield(l,"shrink") + getfield(r,"shrink")
+ background[l_order] = getfield(l,"stretch")
+ background[r_order] = getfield(r,"stretch") + background[r_order]
-- this will move up so that we can assign the whole par table
@@ -1148,185 +1125,193 @@ local function initialize_line_break(head,display)
return par
end
+-- there are still all kind of artefacts in here (a side effect I guess of pdftex,
+-- etex, omega and other extensions that got obscured by patching)
+
local function post_line_break(par)
local prevgraf = texnest[texnest.ptr].prevgraf
- local cur_line = prevgraf + 1 -- the current line number being justified
- local cur_p = nil
+ local current_line = prevgraf + 1 -- the current line number being justified
local adjust_spacing = par.adjust_spacing
local protrude_chars = par.protrude_chars
local statistics = par.statistics
- local p, s, k, w -- check when local
+ local stack = new_dir_stack()
+
+ local leftskip = par.used_left_skip -- used or normal ?
+ local rightskip = par.right_skip
+ local parshape = par.par_shape_ptr
+ local ignored_dimen = par.ignored_dimen
- local q = par.best_bet.break_node
- repeat -- goto first breakpoint
- local r = q
- q = q.prev_break
- r.prev_break = cur_p
- cur_p = r
- until not q
+ local adapt_width = par.adapt_width
- local stack = new_dir_stack()
+ -- reverse the links of the relevant passive nodes, goto first breakpoint
+ local current_break = nil
+
+ local break_node = par.best_bet.break_node
repeat
+ local first_break = break_node
+ break_node = break_node.prev_break
+ first_break.prev_break = current_break
+ current_break = first_break
+ until not break_node
+
+ local head = par.head
- inject_dirs_at_begin_of_line(stack,par.head)
+ -- maybe : each_...
- local q = nil
- local r = cur_p.cur_break
+ while current_break do
+
+ inject_dirs_at_begin_of_line(stack,head)
local disc_break = false
local post_disc_break = false
local glue_break = false
- if not r then
- r = slide_nodes(par.head)
- if r == par.final_par_glue then
- q = r -- q refers to the last node of the line (and paragraph)
- r = r.prev -- r refers to the node after which the dir nodes should be closed
+ local lineend = nil -- q lineend refers to the last node of the line (and paragraph)
+ local lastnode = current_break.cur_break -- r lastnode refers to the node after which the dir nodes should be closed
+
+ if not lastnode then
+ -- only at the end
+ lastnode = slide_nodelist(head) -- todo: find_tail
+ if lastnode == par.final_par_glue then
+ lineend = lastnode
+ lastnode = getprev(lastnode)
end
- else
- local id = r.id
+ else -- todo: use insert_list_after
+ local id = getid(lastnode)
if id == glue_code then
- -- r is normal skip
- r = replace_node(r,new_rightskip(par.right_skip))
+ -- lastnode is normal skip
+ lastnode = replace_node(lastnode,new_rightskip(rightskip))
glue_break = true
- q = r -- q refers to the last node of the line
- r = r.prev -- r refers to the node after which the dir nodes should be closed
+ lineend = lastnode
+ lastnode = getprev(r)
elseif id == disc_code then
- -- todo: use insert_before/after
- local prev_r = r.prev
- local next_r = r.next
- local subtype = r.subtype
- local pre = r.pre
- local post = r.post
- local replace = r.replace
+ local prevlast = getprev(lastnode)
+ local nextlast = getnext(lastnode)
+ local subtype = getsubtype(lastnode)
+ local pre = getfield(lastnode,"pre")
+ local post = getfield(lastnode,"post")
+ local replace = getfield(lastnode,"replace")
if subtype == second_disc_code then
- if not (prev_r.id == disc_code and prev_r.subtype == first_disc_code) then
+ if not (getid(prevlast) == disc_code and getsubtype(prevlast) == first_disc_code) then
report_parbuilders('unsupported disc at location %a',3)
end
if pre then
- flush_node_list(pre)
- r.pre = nil
- pre = nil -- signal
+ flush_nodelist(pre)
+ setfield(lastnode,"pre",nil)
+ pre = nil -- signal
end
if replace then
- local n = slide_nodes(replace)
- prev_r.next = replace
- replace.prev = prev_r
- n.next = r
- r.prev = n
- r.replace = nil
- replace = nil -- signal
+ local n = find_tail(replace)
+ setfield(prevlast,"next",replace)
+ setfield(replace,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"replace",nil)
+ replace = nil -- signal
end
- local pre = prev_r.pre
- local post = prev_r.post
- local replace = prev_r.replace
+ local pre = getfield(prevlast,"pre")
+ local post = getfield(prevlast,"post")
+ local replace = getfield(prevlast,"replace")
if pre then
- flush_node_list(pre)
- prev_r.pre = nil
+ flush_nodelist(pre)
+ setfield(prevlast,"pre",nil)
end
if replace then
- flush_node_list(replace)
- prev_r.replace = nil
+ flush_nodelist(replace)
+ setfield(prevlast,"replace",nil)
end
if post then
- flush_node_list(post)
- prev_r.post = nil
+ flush_nodelist(post)
+ setfield(prevlast,"post",nil)
end
elseif subtype == first_disc_code then
- if not (v.id == disc_code and v.subtype == second_disc_code) then
+ -- what is v ... next probably
+ if not (getid(v) == disc_code and getsubtype(v) == second_disc_code) then
report_parbuilders('unsupported disc at location %a',4)
end
- next_r.subtype = regular_disc_code
- next_r.replace = post
- r.post = nil
+ setfield(nextlast,"subtype",regular_disc_code)
+ setfield(nextlast,"replace",post)
+ setfield(lastnode,"post",nil)
end
if replace then
- r.replace = nil -- free
- flush_node_list(replace)
+ setfield(lastnode,"replace",nil) -- free
+ flush_nodelist(replace)
end
if pre then
- local n = slide_nodes(pre)
- prev_r.next = pre
- pre.prev = prev_r
- n.next = r
- r.prev = n
- r.pre = nil
+ local n = find_tail(pre)
+ setfield(prevlast,"next",pre)
+ setfield(pre,"prev",prevlast)
+ setfield(n,"next",lastnode)
+ setfield(lastnode,"prev",n)
+ setfield(lastnode,"pre",nil)
end
if post then
- local n = slide_nodes(post)
- r.next = post
- post.prev = r
- n.next = next_r
- next_r.prev = n
- r.post = nil
+ local n = find_tail(post)
+ setfield(lastnode,"next",post)
+ setfield(post,"prev",lastnode)
+ setfield(n,"next",nextlast)
+ setfield(nextlast,"prev",n)
+ setfield(lastnode,"post",nil)
post_disc_break = true
end
disc_break = true
elseif id == kern_code then
- r.kern = 0
- elseif r.id == math_code then
- r.surround = 0
+ setfield(lastnode,"kern",0)
+ elseif getid(lastnode) == math_code then
+ setfield(lastnode,"surround",0)
end
end
- r = inject_dirs_at_end_of_line(stack,r,par.head.next,cur_p.cur_break)
- local crb = cur_p.passive_right_box
- if crb then
- local s = copy_node(crb)
- local e = r.next
- r.next = s
- s.prev = r
- s.next = e
- if e then
- e.prev = s
- end
- r = s
+ lastnode = inject_dirs_at_end_of_line(stack,lastnode,getnext(head),current_break.cur_break)
+ local rightbox = current_break.passive_right_box
+ if rightbox then
+ lastnode = insert_node_after(lastnode,lastnode,copy_node(rightbox))
end
- if not q then
- q = r
+ if not lineend then
+ lineend = lastnode
end
- if q and q ~= par.head and protrude_chars > 0 then
- local id = q.id
- local c = (disc_break and (id == glyph_code or id ~= disc_code) and q) or q.prev
- local p = find_protchar_right(par.head.next,c)
- if p and p.id == glyph_code then
+ if lineend and lineend ~= head and protrude_chars > 0 then
+ local id = getid(lineend)
+ local c = (disc_break and (id == glyph_code or id ~= disc_code) and lineend) or getprev(lineend)
+ local p = find_protchar_right(getnext(head),c)
+ if p and getid(p) == glyph_code then
local w, last_rightmost_char = right_pw(p)
if last_rightmost_char and w ~= 0 then
- -- so we inherit attributes, q is new pseudo head
- q, c = insert_node_after(q,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
+ -- so we inherit attributes, lineend is new pseudo head
+ lineend, c = insert_node_after(lineend,c,new_rightmarginkern(copy_node(last_rightmost_char),-w))
end
end
end
+ -- we finish the line
+ local r = getnext(lineend)
+ setfield(lineend,"next",nil)
if not glue_break then
- local h
- h, q = insert_node_after(q,q,new_rightskip(par.right_skip)) -- q moves on as pseudo head
- end
- r = q.next
- q.next = nil
- local phead = par.head
- q = phead.next
- phead.next = r
+ if rightskip then
+ insert_node_after(lineend,lineend,new_rightskip(right_skip)) -- lineend moves on as pseudo head
+ end
+ end
+ -- each time ?
+ local q = getnext(head)
+ setfield(head,"next",r)
if r then
- r.prev = phead
- end
- local clb = cur_p.passive_left_box
- if clb then -- here we miss some prev links
- local s = copy_node(cb)
- s = q.next
- r.next = q
- q = r
- if s and cur_line == (par.first_line + 1) and s.id == hlist_code and not s.list then
- q = q.next
- r.next = s.next
- s.next = r
+ setfield(r,"prev",head)
+ end
+ -- insert leftbox (if needed after parindent)
+ local leftbox = current_break.passive_left_box
+ if leftbox then
+ local first = getnext(q)
+ if first and current_line == (par.first_line + 1) and getid(first) == hlist_code and not getlist(first) then
+ insert_node_after(q,q,copy_node(leftbox))
+ else
+ q = insert_node_before(q,q,copy_node(leftbox))
end
end
if protrude_chars > 0 then
local p = find_protchar_left(q)
- if p and p.id == glyph_code then
+ if p and getid(p) == glyph_code then
local w, last_leftmost_char = left_pw(p)
if last_leftmost_char and w ~= 0 then
-- so we inherit attributes, q is pseudo head and moves back
@@ -1334,32 +1319,35 @@ local function post_line_break(par)
end
end
end
- local ls = par.left_skip
- if ls and (ls.width ~= 0 or ls.stretch ~= 0 or ls.shrink ~= 0) then
- q = insert_node_before(q,q,new_leftskip(ls))
+ if leftskip then
+ q = insert_node_before(q,q,new_leftskip(leftskip))
end
- local curwidth, cur_indent
- if cur_line > par.last_special_line then
+ local cur_width, cur_indent
+ if current_line > par.last_special_line then
cur_indent = par.second_indent
cur_width = par.second_width
+ elseif parshape then
+ local shape = parshape[current_line]
+ cur_indent = shape[1]
+ cur_width = shape[2]
else
- local psp = par.par_shape_ptr
- if psp then
- cur_indent = psp[cur_line][1]
- cur_width = psp[cur_line][2]
- else
- cur_indent = par.first_indent
- cur_width = par.first_width
- end
+ cur_indent = par.first_indent
+ cur_width = par.first_width
end
+
+ if adapt_width then -- extension
+ local l, r = adapt_width(par,current_line)
+ cur_indent = cur_indent + l
+ cur_width = cur_width - l - r
+ end
+
statistics.noflines = statistics.noflines + 1
+ local finished_line = nil
if adjust_spacing > 0 then
statistics.nofadjustedlines = statistics.nofadjustedlines + 1
- -- in the built-in hpack cal_expand_ratio will later on call subst_ext_font
- -- in the alternative approach we can do both in one run
- just_box = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir) -- ,cur_p.analysis)
+ finished_line = xpack_nodes(q,cur_width,"cal_expand_ratio",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
else
- just_box = xpack_nodes(q,cur_width,"exactly",par.par_break_dir) -- ,cur_p.analysis)
+ finished_line = xpack_nodes(q,cur_width,"exactly",par.par_break_dir,par.first_line,current_line) -- ,current_break.analysis)
end
if protrude_chars > 0 then
statistics.nofprotrudedlines = statistics.nofprotrudedlines + 1
@@ -1368,39 +1356,42 @@ local function post_line_break(par)
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
--
- just_box.shift = cur_indent
- if par.each_line_height ~= par.ignored_dimen then
- just_box.height = par.each_line_height
+ setfield(finished_line,"shift",cur_indent)
+ -- this will probably go away:
+ if par.each_line_height ~= ignored_dimen then
+ setfield(finished_line,"height",par.each_line_height)
end
- if par.each_line_depth ~= par.ignored_dimen then
- just_box.depth = par.each_line_depth
+ if par.each_line_depth ~= ignored_dimen then
+ setfield(finished_line,"depth",par.each_line_depth)
end
- if par.first_line_height ~= par.ignored_dimen and (cur_line == par.first_line + 1) then
- just_box.height = par.first_line_height
+ if par.first_line_height ~= ignored_dimen and (current_line == par.first_line + 1) then
+ setfield(finished_line,"height",par.first_line_height)
end
- if par.last_line_depth ~= par.ignored_dimen and cur_line + 1 == par.best_line then
- just_box.depth = par.last_line_depth
+ if par.last_line_depth ~= ignored_dimen and current_line + 1 == par.best_line then
+ setfield(finished_line,"depth",par.last_line_depth)
end
+ --
if texlists.pre_adjust_head ~= pre_adjust_head then
append_list(par, texlists.pre_adjust_head)
texlists.pre_adjust_head = pre_adjust_head
end
- append_to_vlist(par, just_box)
+ append_to_vlist(par,finished_line)
if texlists.adjust_head ~= adjust_head then
append_list(par, texlists.adjust_head)
texlists.adjust_head = adjust_head
end
+ --
local pen
- if cur_line + 1 ~= par.best_line then
- if cur_p.passive_pen_inter then
- pen = cur_p.passive_pen_inter
+ if current_line + 1 ~= par.best_line then
+ if current_break.passive_pen_inter then
+ pen = current_break.passive_pen_inter
else
pen = par.inter_line_penalty
end
- if cur_line == prevgraf + 1 then
+ if current_line == prevgraf + 1 then
pen = pen + par.club_penalty
end
- if cur_line + 2 == par.best_line then
+ if current_line + 2 == par.best_line then
if par.display then
pen = pen + par.display_widow_penalty
else
@@ -1408,56 +1399,62 @@ local function post_line_break(par)
end
end
if disc_break then
- if cur_p.passive_pen_broken ~= 0 then
- pen = pen + cur_p.passive_pen_broken
+ if current_break.passive_pen_broken ~= 0 then
+ pen = pen + current_break.passive_pen_broken
else
pen = pen + par.broken_penalty
end
end
if pen ~= 0 then
append_to_vlist(par,new_penalty(pen))
- end
+ end
end
- cur_line = cur_line + 1
- cur_p = cur_p.prev_break
- if cur_p and not post_disc_break then
- local phead = par.head
- local r = phead
+ current_line = current_line + 1
+ current_break = current_break.prev_break
+ if current_break and not post_disc_break then
+ local current = head
+ local next = nil
while true do
- q = r.next
- if q == cur_p.cur_break or q.id == glyph_code then
+ next = getnext(current)
+ if next == current_break.cur_break or getid(next) == glyph_code then
break
end
- local id = q.id
- if not (id == whatsit_code and q.subtype == localpar_code) then
- if id < math_code or (id == kern_code and q.subtype ~= userkern_code) then
- break
- end
+ local id = getid(next)
+ local subtype = getsubtype(next)
+ if id == whatsit_code and subtype == localpar_code then
+ -- nothing
+ elseif id < math_code then
+ -- messy criterium
+ break
+elseif id == math_code then
+ -- keep the math node
+ setfield(next,"surround",0)
+ break
+ elseif id == kern_code and (subtype ~= userkern_code and not getattr(next,a_fontkern)) then
+ -- fontkerns and accent kerns as well as otf injections
+ break
end
- r = q
+ current = next
end
- if r ~= phead then
- r.next = nil
- flush_node_list(phead.next)
- phead.next = q
- if q then
- q.prev = phead
+ if current ~= head then
+ setfield(current,"next",nil)
+ flush_nodelist(getnext(head))
+ setfield(head,"next",next)
+ if next then
+ setfield(next,"prev",head)
end
end
end
- until not cur_p
- if cur_line ~= par.best_line then -- or not par.head.next then
- report_parbuilders("line breaking")
- end
- if par.head then -- added
--- flush_node(par.head) -- the localpar_code whatsit
- par.head = nil
end
- cur_line = cur_line - 1
+ -- if current_line ~= par.best_line then
+ -- report_parbuilders("line breaking")
+ -- end
+ par.head = nil -- needs checking
+ current_line = current_line - 1
if trace_basic then
- report_parbuilders("paragraph broken into %a lines",cur_line)
+ report_parbuilders("paragraph broken into %a lines",current_line)
end
- texnest[texnest.ptr].prevgraf = cur_line
+ texnest[texnest.ptr].prevgraf = current_line
end
local function wrap_up(par)
@@ -1475,11 +1472,11 @@ local function wrap_up(par)
par.do_last_line_fit = false
else
local glue = par.final_par_glue
- local spec = copy_node(glue.spec)
- spec.width = spec.width + active_short - active_glue
- spec.stretch = 0
- -- flush_node(glue.spec) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
- glue.spec = spec
+ local spec = copy_node(getfield(glue,"spec"))
+ setfield(spec,"width",getfield(spec,"width") + active_short - active_glue)
+ setfield(spec,"stretch",0)
+ -- flush_node(getfield(glue,"spec")) -- brrr, when we do this we can get an "invalid id stretch message", maybe dec refcount
+ setfield(glue,"spec",spec)
if trace_lastlinefit then
report_parbuilders("applying last line fit, short %a, glue %p",active_short,active_glue)
end
@@ -1487,8 +1484,8 @@ local function wrap_up(par)
end
-- we have a bunch of glue and and temp nodes not freed
local head = par.head
- if head.id == temp_code then
- par.head = head.next
+ if getid(head) == temp_code then
+ par.head = getnext(head)
flush_node(head)
end
post_line_break(par)
@@ -1498,7 +1495,8 @@ local function wrap_up(par)
end
-- we could do active nodes differently ... table instead of linked list or a list
--- with prev nodes
+-- with prev nodes but it doesn't save much (as we still need to keep indices then
+-- in next)
local function deactivate_node(par,prev_prev_r,prev_r,r,cur_active_width,checked_expansion) -- no need for adjust if disabled
local active = par.active
@@ -1616,18 +1614,26 @@ local function lastlinecrap(shortfall,active_short,active_glue,cur_active_width,
end
end
-local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
+-- todo: statistics .. count tries and so
- if pi >= infinite_penalty then
- return -- this breakpoint is inhibited by infinite penalty
- elseif pi <= -infinite_penalty then
- pi = eject_penalty -- this breakpoint will be forced
+local trialcount = 0
+
+local function try_break(pi, break_type, par, first_p, current, checked_expansion)
+
+-- trialcount = trialcount + 1
+-- print(trialcount,pi,break_type,current,nuts.tostring(current))
+
+ if pi >= infinite_penalty then -- this breakpoint is inhibited by infinite penalty
+ local p_active = par.active
+ return p_active, p_active and p_active.next
+ elseif pi <= -infinite_penalty then -- this breakpoint will be forced
+ pi = eject_penalty
end
local prev_prev_r = nil -- a step behind prev_r, if type(prev_r)=delta_code
local prev_r = par.active -- stays a step behind r
local r = nil -- runs through the active list
- local no_break_yet = true -- have we found a feasible break at cur_p?
+ local no_break_yet = true -- have we found a feasible break at current?
local node_r_stays_active = false -- should node r remain in the active list?
local line_width = 0 -- the current line will be justified to this width
local line_number = 0 -- line number of current active node
@@ -1648,6 +1654,10 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
local tracing_paragraphs = par.tracing_paragraphs
-- local par_active = par.active
+ local adapt_width = par.adapt_width
+
+ local parshape = par.par_shape_ptr
+
local cur_active_width = checked_expansion and { -- distance from current active node
size = active_width.size,
stretch = active_width.stretch,
@@ -1702,8 +1712,8 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
break_width.adjust_stretch = 0
break_width.adjust_shrink = 0
end
- if cur_p then
- compute_break_width(par,break_type,cur_p)
+ if current then
+ compute_break_width(par,break_type,current)
end
end
if prev_r.id == delta_code then
@@ -1769,14 +1779,14 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
end
for fit_class = fit_very_loose_class, fit_tight_class do
if minimal_demerits[fit_class] <= minimum_demerits then
- -- insert a new active node from best_place[fit_class] to cur_p
+ -- insert a new active node from best_place[fit_class] to current
par.pass_number = par.pass_number + 1
local prev_break = best_place[fit_class]
local passive = {
id = passive_code,
subtype = nosubtype_code,
next = par.passive,
- cur_break = cur_p,
+ cur_break = current,
serial = par.pass_number,
prev_break = prev_break,
passive_pen_inter = par.internal_pen_inter,
@@ -1811,7 +1821,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
prev_r.next = q
prev_r = q
if tracing_paragraphs then
- diagnostics.break_node(par,q,fit_class,break_type,cur_p)
+ diagnostics.break_node(par,q,fit_class,break_type,current)
end
end
minimal_demerits[fit_class] = awful_badness
@@ -1850,7 +1860,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
end
end
if r == par.active then
- return
+ return r, r and r.next -- p_active, n_active
end
if line_number > par.easy_line then
old_line_number = max_halfword - 1
@@ -1859,12 +1869,16 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
old_line_number = line_number
if line_number > par.last_special_line then
line_width = par.second_width
- elseif par.par_shape_ptr then
- line_width = par.par_shape_ptr[line_number][2]
+ elseif parshape then
+ line_width = parshape[line_number][2]
else
line_width = par.first_width
end
end
+ if adapt_width then
+ local l, r = adapt_width(par,line_number)
+ line_width = line_width - l - r
+ end
end
local artificial_demerits = false -- has d been forced to zero
local shortfall = line_width - cur_active_width.size - par.internal_right_box_width -- used in badness calculations
@@ -1878,17 +1892,17 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
-- this is quite time consuming
local b = r.break_node
local l = b and b.cur_break or first_p
- local o = cur_p and cur_p.prev
- if cur_p and cur_p.id == disc_code and cur_p.pre then
- o = slide_nodes(cur_p.pre)
+ local o = current and getprev(current)
+ if current and getid(current) == disc_code and getfield(current,"pre") then
+ o = find_tail(getfield(current,"pre"))
else
o = find_protchar_right(l,o)
end
- if o and o.id == glyph_code then
+ if o and getid(o) == glyph_code then
pw, rp = right_pw(o)
shortfall = shortfall + pw
end
- local id = l.id
+ local id = getid(l)
if id == glyph_code then
-- ok ?
elseif id == disc_code and l.post then
@@ -1896,7 +1910,7 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
else
l = find_protchar_left(l)
end
- if l and l.id == glyph_code then
+ if l and getid(l) == glyph_code then
pw, lp = left_pw(l)
shortfall = shortfall + pw
end
@@ -1906,27 +1920,23 @@ local function try_break(pi, break_type, par, first_p, cur_p, checked_expansion)
local margin_kern_shrink = 0
if protrude_chars > 1 then
if lp then
--- margin_kern_stretch, margin_kern_shrink = cal_margin_kern_var(lp)
-local data = expansions[lp.font][lp.char]
-if data then
- margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
-end
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch, margin_kern_shrink = data.glyphstretch, data.glyphshrink
+ end
end
if rp then
--- local mka, mkb = cal_margin_kern_var(rp)
--- margin_kern_stretch = margin_kern_stretch + mka
--- margin_kern_shrink = margin_kern_shrink + mkb
-local data = expansions[lp.font][lp.char]
-if data then
- margin_kern_stretch = margin_kern_stretch + data.glyphstretch
- margin_kern_shrink = margin_kern_shrink + data.glyphshrink
-end
+ local data = expansions[getfont(lp)][getchar(lp)]
+ if data then
+ margin_kern_stretch = margin_kern_stretch + data.glyphstretch
+ margin_kern_shrink = margin_kern_shrink + data.glyphshrink
+ end
end
end
local total = cur_active_width.adjust_stretch + margin_kern_stretch
if shortfall > 0 and total > 0 then
if total > shortfall then
- shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2 -- to be adapted
+ shortfall = total / (par.max_stretch_ratio / par.cur_font_step) / 2
else
shortfall = shortfall - total
end
@@ -1934,7 +1944,7 @@ end
total = cur_active_width.adjust_shrink + margin_kern_shrink
if shortfall < 0 and total > 0 then
if total > - shortfall then
- shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2 -- to be adapted
+ shortfall = - total / (par.max_shrink_ratio / par.cur_font_step) / 2
else
shortfall = shortfall + total
end
@@ -1949,7 +1959,7 @@ end
if cur_active_width.fi ~= 0 or cur_active_width.fil ~= 0 or cur_active_width.fill ~= 0 or cur_active_width.filll ~= 0 then
if not do_last_line_fit then
-- okay
- elseif not cur_p then
+ elseif not current then
found, shortfall, fit_class, g, b = lastlinecrap(shortfall,r.active_short,r.active_glue,cur_active_width,par.fill_width,par.last_line_fit)
else
shortfall = 0
@@ -1984,7 +1994,7 @@ end
end
end
if do_last_line_fit and not found then
- if not cur_p then
+ if not current then
-- g = 0
shortfall = 0
elseif shortfall > 0 then
@@ -2032,7 +2042,7 @@ end
d = d - pi * pi
end
if break_type == hyphenated_code and r.id == hyphenated_code then
- if cur_p then
+ if current then
d = d + par.double_hyphen_demerits
else
d = d + par.final_hyphen_demerits
@@ -2044,9 +2054,9 @@ end
end
end
if tracing_paragraphs then
- diagnostics.feasible_break(par,cur_p,r,b,pi,d,artificial_demerits)
+ diagnostics.feasible_break(par,current,r,b,pi,d,artificial_demerits)
end
- d = d + r.total_demerits -- this is the minimum total demerits from the beginning to cur_p via r
+ d = d + r.total_demerits -- this is the minimum total demerits from the beginning to current via r
if d <= minimal_demerits[fit_class] then
minimal_demerits[fit_class] = d
best_place [fit_class] = r.break_node
@@ -2070,25 +2080,16 @@ end
end
end
-local function kern_break(par, cur_p, first_p, checked_expansion) -- move inline if needed
- local v = cur_p.next
- if par.auto_breaking and v.id == glue_code then
- try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
- end
- local active_width = par.active_width
- if cur_p.id ~= math_code then
- active_width.size = active_width.size + cur_p.kern
- else
- active_width.size = active_width.size + cur_p.surround
- end
-end
-
-- we can call the normal one for simple box building in the otr so we need
-- frequent enabling/disabling
+local dcolor = { [0] = "red", "green", "blue", "magenta", "cyan", "gray" }
+
local temp_head = new_temp()
function constructors.methods.basic(head,d)
+ head = tonut(head)
+
if trace_basic then
report_parbuilders("starting at %a",head)
end
@@ -2140,24 +2141,27 @@ function constructors.methods.basic(head,d)
par.passive = nil -- = 0
par.printed_node = temp_head -- only when tracing, shared
- par.printed_node.next = head
par.pass_number = 0
- par.auto_breaking = true
+-- par.auto_breaking = true
- local cur_p = head
- local first_p = cur_p
+ setfield(temp_head,"next",head)
+
+ local current = head
+ local first_p = current
+
+ local auto_breaking = true
par.font_in_short_display = 0
- if cur_p and cur_p.id == whatsit_code and cur_p.subtype == localpar_code then
- par.init_internal_left_box = cur_p.box_left
- par.init_internal_left_box_width = cur_p.box_left_width
- par.internal_pen_inter = cur_p.pen_inter
- par.internal_pen_broken = cur_p.pen_broken
+ if current and getid(current) == whatsit_code and getsubtype(current) == localpar_code then
+ par.init_internal_left_box = getfield(current,"box_left")
+ par.init_internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
par.internal_left_box = par.init_internal_left_box
par.internal_left_box_width = par.init_internal_left_box_width
- par.internal_right_box = cur_p.box_right
- par.internal_right_box_width = cur_p.box_right_width
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
end
-- all passes are combined in this loop so maybe we should split this into
@@ -2169,23 +2173,34 @@ function constructors.methods.basic(head,d)
local fontexp, lastfont -- we can pass fontexp to calculate width if needed
- while cur_p and par.active.next ~= par.active do
- while cur_p and cur_p.id == glyph_code do
+ -- i flattened the inner loop over glyphs .. it looks nicer and the extra p_active ~= n_active
+ -- test is fast enough (and try_break now returns the updated values); the kern helper has been
+ -- inlined as it did a double check on id so in fact we had hardly any code to share
+
+ local p_active = par.active
+ local n_active = p_active and p_active.next
+ local second_pass = par.second_pass
+
+ trialcount = 0
+
+ while current and p_active ~= n_active do
+ local id = getid(current)
+ if id == glyph_code then
if is_rotated[par.line_break_dir] then
- active_width.size = active_width.size + cur_p.height + cur_p.depth
+ active_width.size = active_width.size + getfield(current,"height") + getfield(current,"depth")
else
- active_width.size = active_width.size + cur_p.width
+ active_width.size = active_width.size + getfield(current,"width")
end
if checked_expansion then
- local data= checked_expansion[cur_p.font]
+ local currentfont = getfont(current)
+ local data= checked_expansion[currentfont]
if data then
- local currentfont = cur_p.font
if currentfont ~= lastfont then
fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
lastfont = currentfont
end
if fontexps then
- local expansion = fontexps[cur_p.char]
+ local expansion = fontexps[getchar(current)]
if expansion then
active_width.adjust_stretch = active_width.adjust_stretch + expansion.glyphstretch
active_width.adjust_shrink = active_width.adjust_shrink + expansion.glyphshrink
@@ -2193,51 +2208,45 @@ function constructors.methods.basic(head,d)
end
end
end
- cur_p = cur_p.next
- end
- if not cur_p then -- TODO
- report_parbuilders("problems with linebreak_tail")
- os.exit()
- end
- local id = cur_p.id
- if id == hlist_code or id == vlist_code then
- if is_parallel[cur_p.dir][par.line_break_dir] then
- active_width.size = active_width.size + cur_p.width
+ elseif id == hlist_code or id == vlist_code then
+ if is_parallel[getfield(current,"dir")][par.line_break_dir] then
+ active_width.size = active_width.size + getfield(current,"width")
else
- active_width.size = active_width.size + cur_p.depth + cur_p.height
+ active_width.size = active_width.size + getfield(current,"depth") + getfield(current,"height")
end
elseif id == glue_code then
- if par.auto_breaking then
- local prev_p = cur_p.prev
+-- if par.auto_breaking then
+ if auto_breaking then
+ local prev_p = getprev(current)
if prev_p and prev_p ~= temp_head then
- local id = prev_p.id
+ local id = getid(prev_p)
if id == glyph_code or
- (id < math_code and (id ~= whatsit_code or prev_p.subtype ~= dir_code)) or -- was: precedes_break(prev_p)
- (id == kern_code and prev_p.subtype ~= userkern_code) then
- try_break(0, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ (id < math_code and (id ~= whatsit_code or getsubtype(prev_p) ~= dir_code)) or -- was: precedes_break(prev_p)
+ (id == kern_code and getsubtype(prev_p) ~= userkern_code) then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
end
end
end
- local spec = check_shrinkage(par,cur_p.spec)
- local order = stretch_orders[spec.stretch_order]
- cur_p.spec = spec
- active_width.size = active_width.size + spec.width
- active_width[order] = active_width[order] + spec.stretch
- active_width.shrink = active_width.shrink + spec.shrink
+ local spec = check_shrinkage(par,getfield(current,"spec"))
+ local order = stretch_orders[getfield(spec,"stretch_order")]
+ setfield(current,"spec",spec)
+ active_width.size = active_width.size + getfield(spec,"width")
+ active_width[order] = active_width[order] + getfield(spec,"stretch")
+ active_width.shrink = active_width.shrink + getfield(spec,"shrink")
elseif id == disc_code then
- local subtype = cur_p.subtype
- if subtype ~= second_disc_code then -- are there still second_disc_code in luatex
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
local line_break_dir = par.line_break_dir
- if par.second_pass then -- todo: make second pass local
+ if second_pass or subtype <= automatic_disc_code then
local actual_pen = subtype == automatic_disc_code and par.ex_hyphen_penalty or par.hyphen_penalty
- local pre = cur_p.pre
+ local pre = getfield(current,"pre")
if not pre then -- trivial pre-break
disc_width.size = 0
if checked_expansion then
disc_width.adjust_stretch = 0
disc_width.adjust_shrink = 0
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
else
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = size
@@ -2251,13 +2260,13 @@ function constructors.methods.basic(head,d)
-- disc_width.adjust_stretch = 0
-- disc_width.adjust_shrink = 0
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, current, checked_expansion)
if subtype == first_disc_code then
- local cur_p_next = cur_p.next
- if cur_p_next.id ~= disc_code or cur_p_next.subtype ~= second_disc_code then
+ local cur_p_next = getnext(current)
+ if getid(cur_p_next) ~= disc_code or getsubtype(cur_p_next) ~= second_disc_code then
report_parbuilders("unsupported disc at location %a",1)
else
- local pre = cur_p_next.pre
+ local pre = getfield(cur_p_next,"pre")
if pre then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,pre)
disc_width.size = disc_width.size + size
@@ -2265,16 +2274,16 @@ function constructors.methods.basic(head,d)
disc_width.adjust_stretch = disc_width.adjust_stretch + adjust_stretch
disc_width.adjust_shrink = disc_width.adjust_shrink + adjust_shrink
end
- try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
+ p_active, n_active = try_break(actual_pen, hyphenated_code, par, first_p, cur_p_next, checked_expansion)
--
-- I will look into this some day ... comment in linebreak.w says that this fails,
-- maybe this is what Taco means with his comment in the luatex manual.
--
-- do_one_seven_eight(sub_disc_width_from_active_width);
-- do_one_seven_eight(reset_disc_width);
- -- s = vlink_no_break(vlink(cur_p));
+ -- s = vlink_no_break(vlink(current));
-- add_to_widths(s, line_break_dir, pdf_adjust_spacing,disc_width);
- -- ext_try_break(...,first_p,vlink(cur_p));
+ -- ext_try_break(...,first_p,vlink(current));
--
else
report_parbuilders("unsupported disc at location %a",2)
@@ -2289,7 +2298,7 @@ function constructors.methods.basic(head,d)
end
end
end
- local replace = cur_p.replace
+ local replace = getfield(current,"replace")
if replace then
local size, adjust_stretch, adjust_shrink = add_to_width(line_break_dir,checked_expansion,replace)
active_width.size = active_width.size + size
@@ -2300,14 +2309,20 @@ function constructors.methods.basic(head,d)
end
end
elseif id == kern_code then
- if cur_p.subtype == userkern_code then
- kern_break(par,cur_p,first_p, checked_expansion)
+ if getsubtype(current) == userkern_code then
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"kern")
else
- local d = cur_p.kern
- of d ~= 0 then
- active_width.size = active_width.size + d
- if checked_expansion and expand_kerns and (cur_p.subtype == kerning_code or cur_p[a_fontkern]) then
- local stretch, shrink = kern_stretch_shrink(cur_p,d)
+ local kern = getfield(current,"kern")
+ if kern ~= 0 then
+ active_width.size = active_width.size + kern
+ if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
if expand_kerns == "stretch" then
active_width.adjust_stretch = active_width.adjust_stretch + stretch
elseif expand_kerns == "shrink" then
@@ -2320,40 +2335,47 @@ function constructors.methods.basic(head,d)
end
end
elseif id == math_code then
- par.auto_breaking = cur_p.subtype == endmath_code
- kern_break(par,cur_p, first_p, checked_expansion)
+-- par.auto_breaking = getsubtype(current) == endmath_code
+ auto_breaking = getsubtype(current) == endmath_code
+ local v = getnext(current)
+-- if par.auto_breaking and getid(v) == glue_code then
+ if auto_breaking and getid(v) == glue_code then
+ p_active, n_active = try_break(0, unhyphenated_code, par, first_p, current, checked_expansion)
+ end
+ local active_width = par.active_width
+ active_width.size = active_width.size + getfield(current,"surround")
elseif id == rule_code then
- active_width.size = active_width.size + cur_p.width
+ active_width.size = active_width.size + getfield(current,"width")
elseif id == penalty_code then
- try_break(cur_p.penalty, unhyphenated_code, par, first_p, cur_p, checked_expansion)
+ p_active, n_active = try_break(getfield(current,"penalty"), unhyphenated_code, par, first_p, current, checked_expansion)
elseif id == whatsit_code then
- local subtype = cur_p.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- par.internal_pen_inter = cur_p.pen_inter
- par.internal_pen_broken = cur_p.pen_broken
- par.internal_left_box = cur_p.box_left
- par.internal_left_box_width = cur_p.box_left_width
- par.internal_right_box = cur_p.box_right
- par.internal_right_box_width = cur_p.box_right_width
+ par.internal_pen_inter = getfield(current,"pen_inter")
+ par.internal_pen_broken = getfield(current,"pen_broken")
+ par.internal_left_box = getfield(current,"box_left")
+ par.internal_left_box_width = getfield(current,"box_left_width")
+ par.internal_right_box = getfield(current,"box_right")
+ par.internal_right_box_width = getfield(current,"box_right_width")
elseif subtype == dir_code then
par.line_break_dir = checked_line_dir(dirstack) or par.line_break_dir
else
local get_width = get_whatsit_width[subtype]
if get_width then
- active_width.size = active_width.size + get_width(cur_p)
+ active_width.size = active_width.size + get_width(current,par.line_break_dir)
end
end
- elseif id == mark_code or id == ins_code or id == adjust_code then
- -- skip
- else
- report_parbuilders("node of type %a found in paragraph",type(id))
+ elseif trace_unsupported then
+ if id == mark_code or id == ins_code or id == adjust_code then
+ -- skip
+ else
+ report_parbuilders("node of type %a found in paragraph",type(id))
+ end
end
- cur_p = cur_p.next
+ current = getnext(current)
end
- if not cur_p then
- try_break(eject_penalty, hyphenated_code, par, first_p, cur_p, checked_expansion)
- local p_active = par.active
- local n_active = p_active.next
+ if not current then
+ local p_active, n_active = try_break(eject_penalty, hyphenated_code, par, first_p, current, checked_expansion)
if n_active ~= p_active then
local r = n_active
par.fewest_demerits = awful_badness
@@ -2367,7 +2389,7 @@ function constructors.methods.basic(head,d)
par.best_line = par.best_bet.line_number
local asked_looseness = par.looseness
if asked_looseness == 0 then
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
local r = n_active
local actual_looseness = 0
@@ -2387,30 +2409,30 @@ function constructors.methods.basic(head,d)
end
end
r = r.next
- until r == p_active -- weird, loop list?
+ until r == p_active
par.best_line = par.best_bet.line_number
if actual_looseness == asked_looseness or par.final_pass then
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
end
end
reset_meta(par) -- clean up the memory by removing the break nodes
- if not par.second_pass then
+ if not second_pass then
if tracing_paragraphs then
diagnostics.current_pass(par,"secondpass")
end
- par.threshold = par.tolerance
+ par.threshold = par.tolerance
par.second_pass = true
- par.final_pass = par.emergency_stretch <= 0
+ par.final_pass = par.emergency_stretch <= 0
else
if tracing_paragraphs then
diagnostics.current_pass(par,"emergencypass")
end
par.background.stretch = par.background.stretch + par.emergency_stretch
- par.final_pass = true
+ par.final_pass = true
end
end
- return wrap_up(par)
+ return tonode(wrap_up(par))
end
-- standard tex logging .. will be adapted ..
@@ -2435,48 +2457,58 @@ function diagnostics.current_pass(par,what)
write_nl("log",format("@%s",what))
end
-local function short_display(a,font_in_short_display)
+local verbose = false -- true
+
+local function short_display(target,a,font_in_short_display)
while a do
- local id = a.id
+ local id = getid(a)
if id == glyph_code then
- local font = a.font
+ local font = getfont(a)
if font ~= font_in_short_display then
- write("log",tex.fontidentifier(font) .. ' ')
+ write(target,tex.fontidentifier(font) .. ' ')
font_in_short_display = font
end
- if a.subtype == ligature_code then
- font_in_short_display = short_display(a.components,font_in_short_display)
+ if getsubtype(a) == ligature_code then
+ font_in_short_display = short_display(target,getfield(a,"components"),font_in_short_display)
else
- write("log",utfchar(a.char))
+ write(target,utfchar(getchar(a)))
end
--- elseif id == rule_code then
--- write("log","|")
--- elseif id == glue_code then
--- if a.spec.writable then
--- write("log"," ")
--- end
--- elseif id == math_code then
--- write("log","$")
elseif id == disc_code then
- font_in_short_display = short_display(a.pre,font_in_short_display)
- font_in_short_display = short_display(a.post,font_in_short_display)
- else -- no explicit checking
- write("log",format("[%s]",nodecodes[id]))
+ font_in_short_display = short_display(target,getfield(a,"pre"),font_in_short_display)
+ font_in_short_display = short_display(target,getfield(a,"post"),font_in_short_display)
+ elseif verbose then
+ write(target,format("[%s]",nodecodes[id]))
+ elseif id == rule_code then
+ write(target,"|")
+ elseif id == glue_code then
+ if getfield(getfield(a,"spec"),"writable") then
+ write(target," ")
+ end
+ elseif id == kern_code and (getsubtype(a) == userkern_code or getattr(a,a_fontkern)) then
+ if verbose then
+ write(target,"[|]")
+ else
+ write(target,"")
+ end
+ elseif id == math_code then
+ write(target,"$")
+ else
+ write(target,"[]")
end
- a = a.next
+ a = getnext(a)
end
return font_in_short_display
end
diagnostics.short_display = short_display
-function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
+function diagnostics.break_node(par, q, fit_class, break_type, current) -- %d ?
local passive = par.passive
local typ_ind = break_type == hyphenated_code and '-' or ""
if par.do_last_line_fit then
local s = number.toscaled(q.active_short)
local g = number.toscaled(q.active_glue)
- if cur_p then
+ if current then
write_nl("log",format("@@%d: line %d.%d%s t=%s s=%s g=%s",
passive.serial or 0,q.line_number-1,fit_class,typ_ind,q.total_demerits,s,g))
else
@@ -2494,26 +2526,26 @@ function diagnostics.break_node(par, q, fit_class, break_type, cur_p) -- %d ?
end
end
-function diagnostics.feasible_break(par, cur_p, r, b, pi, d, artificial_demerits)
+function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demerits)
local printed_node = par.printed_node
- if printed_node ~= cur_p then
+ if printed_node ~= current then
write_nl("log","")
- if not cur_p then
- par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
+ if not current then
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
else
- local save_link = cur_p.next
- cur_p.next = nil
+ local save_link = getnext(current)
+ setfield(current,"next",nil)
write_nl("log","")
- par.font_in_short_display = short_display(printed_node.next,par.font_in_short_display)
- cur_p.next = save_link
+ par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
+ setfield(current,"next",save_link)
end
- par.printed_node = cur_p
+ par.printed_node = current
end
write_nl("log","@")
- if not cur_p then
+ if not current then
write_esc("par")
else
- local id = cur_p.id
+ local id = getid(current)
if id == glue_code then
-- print nothing
elseif id == penalty_code then
@@ -2562,49 +2594,54 @@ end)
-- with the glyph.
local function glyph_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else
- local half = p.width / 2
- return p.height + p.depth, half, half
+ local half = wd / 2
+ return ht + dp, half, half
end
elseif is_rotated[pdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else
- return p.height + p.depth, p.width, 0 -- weird
+ return ht + dp, wd, 0 -- weird
end
else
if glyphdir_is_equal[curdir][pdir] then
- return p.width, p.height, p.depth
+ return wd, ht, dp
elseif is_opposite[curdir][pdir] then
- return p.width, p.depth, p.height
+ return wd, dp, ht
else -- can this happen?
- return p.height + p.depth, p.width, 0 -- weird
+ return ht + dp, wd, 0
end
end
end
local function pack_width_height_depth(curdir,pdir,p)
+ local wd = getfield(p,"width")
+ local ht = getfield(p,"height")
+ local dp = getfield(p,"depth")
if is_rotated[curdir] then
if is_parallel[curdir][pdir] then
- local half = (p.height + p.depth) / 2
- return p.width, half, half
+ local half = (ht + dp) / 2
+ return wd, half, half
else -- can this happen?
- local half = p.width / 2
- return p.height + p.depth, half, half
+ local half = wd / 2
+ return ht + dp, half, half
end
else
if pardir_is_equal[curdir][pdir] then
- return p.width, p.height, p.depth
+ return wd, ht, dp
elseif is_opposite[curdir][pdir] then
- return p.width, p.depth, p.height
+ return wd, dp, ht
else -- weird dimensions, can this happen?
- -- return p.width, p.depth, p.height
- return p.height + p.depth, p.width, 0
+ return ht + dp, wd, 0
end
end
end
@@ -2622,17 +2659,17 @@ end
--
-- local hlist = new_node("hlist")
--
--- hlist.list = head
--- hlist.dir = direction or tex.textdir
--- hlist.width = width
--- hlist.height = height
--- hlist.depth = depth
+-- setfield(hlist,"list",head)
+-- setfield(hlist,"dir",direction or tex.textdir)
+-- setfield(hlist,"width",width)
+-- setfield(hlist,"height",height)
+-- setfield(hlist,"depth",depth)
--
-- if delta == 0 then
--
--- hlist.glue_sign = 0
--- hlist.glue_order = 0
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0)
+-- setfield(hlist,"glue_order",0)
+-- setfield(hlist,"glue_set",0)
--
-- else
--
@@ -2648,16 +2685,15 @@ end
-- else
-- local stretch = analysis.stretch
-- if stretch ~= 0 then
--- hlist.glue_sign = 1 -- stretch
--- hlist.glue_order = order
--- hlist.glue_set = delta/stretch
+-- setfield(hlist,"glue_sign",1) -- stretch
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",delta/stretch)
-- else
--- hlist.glue_sign = 0 -- nothing
--- hlist.glue_order = order
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
-- end
-- end
--- print("stretch",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- else
--
@@ -2666,16 +2702,15 @@ end
-- else
-- local shrink = analysis.shrink
-- if shrink ~= 0 then
--- hlist.glue_sign = 2 -- shrink
--- hlist.glue_order = order
--- hlist.glue_set = - delta/shrink
+-- setfield(hlist,"glue_sign",2) -- shrink
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",-delta/stretch)
-- else
--- hlist.glue_sign = 0 -- nothing
--- hlist.glue_order = order
--- hlist.glue_set = 0
+-- setfield(hlist,"glue_sign",0) -- nothing
+-- setfield(hlist,"glue_order",order)
+-- setfield(hlist,"glue_set",0)
-- end
-- end
--- print("shrink",hlist.glue_sign,hlist.glue_order,hlist.glue_set)
--
-- end
--
@@ -2689,7 +2724,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = current.id
+-- local id = getid(current)
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if stretch then
@@ -2699,12 +2734,12 @@ end
-- current.expansion_factor = font_expand_ratio * stretch
-- end
-- elseif id == kern_code then
--- local kern = current.kern
--- if kern ~= 0 and current.subtype == kerning_code then
--- current.kern = font_expand_ratio * current.kern
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
-- end
-- end
--- current = current.next
+-- current = getnext(current)
-- end
-- elseif font_expand_ratio < 0 then
-- if font_expand_ratio < -1000 then
@@ -2712,7 +2747,7 @@ end
-- end
-- local current = head
-- while current do
--- local id = current.id
+-- local id = getid(current)
-- if id == glyph_code then
-- local stretch, shrink = char_stretch_shrink(current) -- get only one
-- if shrink then
@@ -2722,26 +2757,31 @@ end
-- current.expansion_factor = font_expand_ratio * shrink
-- end
-- elseif id == kern_code then
--- local kern = current.kern
--- if kern ~= 0 and current.subtype == kerning_code then
--- current.kern = font_expand_ratio * current.kern
+-- local kern = getfield(current,"kern")
+-- if kern ~= 0 and getsubtype(current) == kerning_code then
+-- setfield(current,"kern",font_expand_ratio * kern)
-- end
-- end
--- current = current.next
+-- current = getnext(current)
-- end
-- end
-- return hlist, 0
-- end
-local function hpack(head,width,method,direction) -- fast version when head = nil
+local function hpack(head,width,method,direction,firstline,line) -- fast version when head = nil
-- we can pass the adjust_width and adjust_height so that we don't need to recalculate them but
- -- with the glue mess it's less trivial as we lack detail
+ -- with the glue mess it's less trivial as we lack detail .. challenge
local hlist = new_node("hlist")
+ setfield(hlist,"dir",direction)
+
if head == nil then
+ setfield(hlist,"width",width)
return hlist, 0
+ else
+ setfield(hlist,"list",head)
end
local cal_expand_ratio = method == "cal_expand_ratio" or method == "subst_ex_font"
@@ -2757,8 +2797,6 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local font_shrink = 0
local font_expand_ratio = 0
local last_badness = 0
- local disc_stack = { }
- local disc_level = 0
local expansion_stack = cal_expand_ratio and { } -- todo: optionally pass this
local expansion_index = 0
local total_stretch = { [0] = 0, 0, 0, 0, 0 }
@@ -2768,11 +2806,8 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local adjust_head = texlists.adjust_head
local pre_adjust_head = texlists.pre_adjust_head
- local adjust_tail = adjust_head and slide_nodes(adjust_head)
- local pre_adjust_tail = pre_adjust_head and slide_nodes(pre_adjust_head)
-
- hlist.list = head
- hlist.dir = hpack_dir
+ local adjust_tail = adjust_head and slide_nodelist(adjust_head) -- todo: find_tail
+ local pre_adjust_tail = pre_adjust_head and slide_nodelist(pre_adjust_head) -- todo: find_tail
new_dir_stack(hpack_dir)
@@ -2787,173 +2822,72 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
local fontexps, lastfont
- local current = head
+ local function process(current) -- called nested in disc replace
- while current do
- local id = current.id
- if id == glyph_code then
- if cal_expand_ratio then
- local currentfont = current.font
- if currentfont ~= lastfont then
- fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
- lastfont = currentfont
- end
- if fontexps then
- local expansion = fontexps[current.char]
- if expansion then
- font_stretch = font_stretch + expansion.glyphstretch
- font_shrink = font_shrink + expansion.glyphshrink
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ if cal_expand_ratio then
+ local currentfont = getfont(current)
+ if currentfont ~= lastfont then
+ fontexps = checked_expansion[currentfont] -- a bit redundant for the par line packer
+ lastfont = currentfont
end
- end
- end
- -- use inline if no expansion
- local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- current = current.next
- elseif id == kern_code then
- local kern = current.kern
- if kern == 0 then
- -- no kern
- else
- if cal_expand_ratio and expand_kerns and current.subtype == kerning_code or current[a_fontkern] then -- check p.kern
- local stretch, shrink = kern_stretch_shrink(current,kern)
- if expand_kerns == "stretch" then
- font_stretch = font_stretch + stretch
- elseif expand_kerns == "shrink" then
- font_shrink = font_shrink + shrink
- else
- font_stretch = font_stretch + stretch
- font_shrink = font_shrink + shrink
+ if fontexps then
+ local expansion = fontexps[getchar(current)]
+ if expansion then
+ font_stretch = font_stretch + expansion.glyphstretch
+ font_shrink = font_shrink + expansion.glyphshrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
end
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = current
end
- natural = natural + kern
- end
- current = current.next
- elseif id == disc_code then
- if current.subtype ~= second_disc_code then
- -- we follow the end of line disc chain
- local replace = current.replace
- if replace then
- disc_level = disc_level + 1
- disc_stack[disc_level] = current.next
- current = replace
- else
- current = current.next
- end
- else
- current = current.next
- end
- elseif id == glue_code then
- local spec = current.spec
- natural = natural + spec.width
- local op = spec.stretch_order
- local om = spec.shrink_order
- total_stretch[op] = total_stretch[op] + spec.stretch
- total_shrink [om] = total_shrink [om] + spec.shrink
- if current.subtype >= leaders_code then
- local leader = current.leader
- local ht = leader.height
- local dp = leader.depth
+ -- use inline
+ local wd, ht, dp = glyph_width_height_depth(hpack_dir,"TLT",current) -- was TRT ?
+ natural = natural + wd
if ht > height then
height = ht
end
if dp > depth then
depth = dp
end
- end
- current = current.next
- elseif id == hlist_code or id == vlist_code then
- local sh = current.shift
- local wd, ht, dp = pack_width_height_depth(hpack_dir,current.dir or hpack_dir,current) -- added: or pack_dir
- local hs, ds = ht - sh, dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
- end
- current = current.next
- elseif id == rule_code then
- local wd = current.width
- local ht = current.height
- local dp = current.depth
- natural = natural + wd
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- current = current.next
- elseif id == math_code then
- natural = natural + current.surround
- current = current.next
- elseif id == unset_code then
- local wd = current.width
- local ht = current.height
- local dp = current.depth
- local sh = current.shift
- local hs = ht - sh
- local ds = dp + sh
- natural = natural + wd
- if hs > height then
- height = hs
- end
- if ds > depth then
- depth = ds
- end
- current = current.next
- elseif id == ins_code or id == mark_code then
- local prev = current.prev
- local next = current.next
- if adjust_tail then -- todo
- if next then
- next.prev = prev
+ elseif id == kern_code then
+ local kern = getfield(current,"kern")
+ if kern == 0 then
+ -- no kern
+ elseif getsubtype(current) == kerning_code then -- check getfield(p,"kern")
+ if cal_expand_ratio then
+ local stretch, shrink = kern_stretch_shrink(current,kern)
+ font_stretch = font_stretch + stretch
+ font_shrink = font_shrink + shrink
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = current
+ end
+ natural = natural + kern
+ else
+ natural = natural + kern
end
- if prev then
- prev.next = next
+ elseif id == disc_code then
+ local subtype = getsubtype(current)
+ if subtype ~= second_disc_code then
+ -- todo : local stretch, shrink = char_stretch_shrink(s)
+ local replace = getfield(current,"replace")
+ if replace then
+ process(replace)
+ end
end
- current.prev = adjust_tail
- current.next = nil
- adjust_tail.next = current
- adjust_tail = current
- else
- adjust_head = current
- adjust_tail = current
- current.prev = nil
- current.next = nil
- end
- current = next
- elseif id == adjust_code then
- local list = current.list
- if adjust_tail then
- adjust_tail.next = list
- adjust_tail = slide_nodes(list)
- else
- adjust_head = list
- adjust_tail = slide_nodes(list)
- end
- current = current.next
- elseif id == whatsit_code then
- local subtype = current.subtype
- if subtype == dir_code then
- hpack_dir = checked_line_dir(stack,current) or hpack_dir
- else
- local get_dimensions = get_whatsit_dimensions[subtype]
- if get_dimensions then
- local wd, ht, dp = get_dimensions(current)
- natural = natural + wd
+ elseif id == glue_code then
+ local spec = getfield(current,"spec")
+ natural = natural + getfield(spec,"width")
+ local op = getfield(spec,"stretch_order")
+ local om = getfield(spec,"shrink_order")
+ total_stretch[op] = total_stretch[op] + getfield(spec,"stretch")
+ total_shrink [om] = total_shrink [om] + getfield(spec,"shrink")
+ if getsubtype(current) >= leaders_code then
+ local leader = getleader(current)
+ local ht = getfield(leader,"height")
+ local dp = getfield(leader,"depth")
if ht > height then
height = ht
end
@@ -2961,51 +2895,132 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
depth = dp
end
end
+ elseif id == hlist_code or id == vlist_code then
+ local sh = getfield(current,"shift")
+ local wd, ht, dp = pack_width_height_depth(hpack_dir,getfield(current,"dir") or hpack_dir,current) -- added: or pack_dir
+ local hs, ds = ht - sh, dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == rule_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ elseif id == math_code then
+ natural = natural + getfield(current,"surround")
+ elseif id == unset_code then
+ local wd = getfield(current,"width")
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local sh = getfield(current,"shift")
+ local hs = ht - sh
+ local ds = dp + sh
+ natural = natural + wd
+ if hs > height then
+ height = hs
+ end
+ if ds > depth then
+ depth = ds
+ end
+ elseif id == ins_code or id == mark_code then
+ local prev = getprev(current)
+ local next = getnext(current)
+ if adjust_tail then -- todo
+ if next then
+ setfield(next,"prev",prev)
+ end
+ if prev then
+ setfield(prev,"next",next)
+ end
+ setfield(current,"prev",adjust_tail)
+ setfield(current,"next",nil)
+ adjust_setfield(tail,"next",current)
+ adjust_tail = current
+ else
+ adjust_head = current
+ adjust_tail = current
+ setfield(current,"prev",nil)
+ setfield(current,"next",nil)
+ end
+ elseif id == adjust_code then
+ local list = getlist(current)
+ if adjust_tail then
+ adjust_setfield(tail,"next",list)
+ else
+ adjust_head = list
+ end
+ adjust_tail = slide_nodelist(list) -- find_tail(list)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(current)
+ if subtype == dir_code then
+ hpack_dir = checked_line_dir(stack,current) or hpack_dir
+ else
+ local get_dimensions = get_whatsit_dimensions[subtype]
+ if get_dimensions then
+ local wd, ht, dp = get_dimensions(current,hpack_dir)
+ natural = natural + wd
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ end
+ end
+ elseif id == marginkern_code then
+ local width = getfield(current,"width")
+ if cal_expand_ratio then
+ -- is this ok?
+ local glyph = getfield(current,"glyph")
+ local char_pw = getsubtype(current) == leftmargin_code and left_pw or right_pw
+ font_stretch = font_stretch - width - char_pw(glyph)
+ font_shrink = font_shrink - width - char_pw(glyph)
+ expansion_index = expansion_index + 1
+ expansion_stack[expansion_index] = glyph
+ end
+ natural = natural + width
end
- current = current.next
- elseif id == marginkern_code then
- if cal_expand_ratio then
- local glyph = current.glyph
- local char_pw = current.subtype == leftmargin_code and left_pw or right_pw
- font_stretch = font_stretch - current.width - char_pw(glyph)
- font_shrink = font_shrink - current.width - char_pw(glyph)
- expansion_index = expansion_index + 1
- expansion_stack[expansion_index] = glyph
- end
- natural = natural + current.width
- current = current.next
- else
- current = current.next
- end
- if not current and disc_level > 0 then
- current = disc_stack[disc_level]
- disc_level = disc_level - 1
+ current = getnext(current)
end
+
end
+
+ process(head)
+
if adjust_tail then
adjust_tail.next = nil -- todo
end
if pre_adjust_tail then
pre_adjust_tail.next = nil -- todo
end
- if mode == "additional" then
+ if method == "additional" then
width = width + natural
end
- hlist.width = width
- hlist.height = height
- hlist.depth = depth
+ setfield(hlist,"width",width)
+ setfield(hlist,"height",height)
+ setfield(hlist,"depth",depth)
local delta = width - natural
if delta == 0 then
- hlist.glue_sign = 0
- hlist.glue_order = 0
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",0)
+ setfield(hlist,"glue_set",0)
elseif delta > 0 then
-- natural width smaller than requested width
local order = (total_stretch[4] ~= 0 and 4 or total_stretch[3] ~= 0 and 3) or
(total_stretch[2] ~= 0 and 2 or total_stretch[1] ~= 0 and 1) or 0
--- local correction = 0
if cal_expand_ratio and order == 0 and font_stretch > 0 then -- check sign of font_stretch
font_expand_ratio = delta/font_stretch
@@ -3017,41 +3032,38 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if g.id == glyph_code then
- local currentfont = g.font
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[g.char]
+ local data = fontexps[getchar(g)]
if trace_expansion then
setnodecolor(g,"hz:positive")
end
e = font_expand_ratio * data.glyphstretch / 1000
--- correction = correction + (e / 1000) * g.width
else
- local kern = g.kern
+ local kern = getfield(g,"kern")
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * stretch / 1000
--- correction = correction + (e / 1000) * kern
end
- g.expansion_factor = e
+ setfield(g,"expansion_factor",e)
end
end
--- delta = delta - correction
local tso = total_stretch[order]
if tso ~= 0 then
- hlist.glue_sign = 1
- hlist.glue_order = order
- hlist.glue_set = delta/tso
+ setfield(hlist,"glue_sign",1)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",delta/tso)
else
- hlist.glue_sign = 0
- hlist.glue_order = order
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
end
if font_expand_ratio ~= 0 then
-- todo
- elseif order == 0 then -- and hlist.list then
+ elseif order == 0 then -- and getlist(hlist) then
last_badness = calculate_badness(delta,total_stretch[0])
if last_badness > tex.hbadness then
if last_badness > 100 then
@@ -3065,7 +3077,6 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
-- natural width larger than requested width
local order = total_shrink[4] ~= 0 and 4 or total_shrink[3] ~= 0 and 3
or total_shrink[2] ~= 0 and 2 or total_shrink[1] ~= 0 and 1 or 0
--- local correction = 0
if cal_expand_ratio and order == 0 and font_shrink > 0 then -- check sign of font_shrink
font_expand_ratio = delta/font_shrink
@@ -3077,65 +3088,60 @@ local function hpack(head,width,method,direction) -- fast version when head = ni
for i=1,expansion_index do
local g = expansion_stack[i]
local e
- if g.id == glyph_code then
- local currentfont = g.font
+ if getid(g) == glyph_code then
+ local currentfont = getfont(g)
if currentfont ~= lastfont then
fontexps = expansions[currentfont]
lastfont = currentfont
end
- local data = fontexps[g.char]
+ local data = fontexps[getchar(g)]
if trace_expansion then
setnodecolor(g,"hz:negative")
end
e = font_expand_ratio * data.glyphshrink / 1000
- -- local d = (e / 1000) * 1000
- -- local eps = g.width - (1 + d / 1000000) * g.width
- -- correction = correction + eps
- -- e = d
--- correction = correction + (e / 1000) * g.width
else
- local kern = g.kern
+ local kern = getfield(g,"kern")
local stretch, shrink = kern_stretch_shrink(g,kern)
e = font_expand_ratio * shrink / 1000
--- correction = correction + (e / 1000) * kern
end
- g.expansion_factor = e
+ setfield(g,"expansion_factor",e)
end
end
--- delta = delta - correction
local tso = total_shrink[order]
if tso ~= 0 then
- hlist.glue_sign = 2
- hlist.glue_order = order
- hlist.glue_set = -delta/tso
+ setfield(hlist,"glue_sign",2)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",-delta/tso)
else
- hlist.glue_sign = 0
- hlist.glue_order = order
- hlist.glue_set = 0
+ setfield(hlist,"glue_sign",0)
+ setfield(hlist,"glue_order",order)
+ setfield(hlist,"glue_set",0)
end
if font_expand_ratio ~= 0 then
-- todo
- elseif tso < -delta and order == 0 then -- and hlist.list then
+ elseif tso < -delta and order == 0 then -- and getlist(hlist) then
last_badness = 1000000
- hlist.glue_set = 1
+ setfield(hlist,"glue_set",1)
local fuzz = - delta - total_shrink[0]
local hfuzz = tex.hfuzz
if fuzz > hfuzz or tex.hbadness < 100 then
local overfullrule = tex.overfullrule
if fuzz > hfuzz and overfullrule > 0 then
-- weird, is always called and no rules shows up
- slide_nodes(list).next = new_rule(overfullrule,nil,nil,hlist.dir)
+ setfield(slide_nodelist(list),"next",new_rule(overfullrule,nil,nil,hlist.dir)) -- todo: find_tail
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
- elseif order == 0 and hlist.list and last_badness > tex.hbadness then
+ elseif order == 0 and getlist(hlist) and last_badness > tex.hbadness then
diagnostics.bad_hbox(hlist,line,last_badness)
end
end
return hlist, last_badness
end
-xpack_nodes = hpack -- comment this for old fashioned expansion
+xpack_nodes = hpack -- comment this for old fashioned expansion (we need to fix float mess)
+
+constructors.methods.hpack = hpack
local function common_message(hlist,line,str)
write_nl("")
@@ -3173,20 +3179,3 @@ end
function diagnostics.loose_hbox(hlist,line,b)
common_message(hlist,line,format("Loose \\hbox (badness %i)",b))
end
-
--- e = font_expand_ratio * data.glyphstretch / 1000
--- local stretch = data.stretch
--- if e >= stretch then
--- e = stretch
--- else
--- local step = 5
--- e = math.round(e/step) * step
--- end
-
--- local shrink = - data.shrink
--- if e <= shrink then
--- e = shrink
--- else
--- local step = 5
--- e = math.round(e/step) * step
--- end
diff --git a/tex/context/base/node-met.lua b/tex/context/base/node-met.lua
index c85a53c8e..335ce2a98 100644
--- a/tex/context/base/node-met.lua
+++ b/tex/context/base/node-met.lua
@@ -68,7 +68,7 @@ local nodes = nodes
nodes.gonuts = gonuts
-local nodecodes = nodes.codes
+local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -332,6 +332,28 @@ function nodes.writable_spec(n) -- not pool
return spec
end
+function nodes.copy_spec(old,free) -- also frees
+ if not old then
+ return n_new_node("glue_spec")
+ else
+ local new = n_copy_node(old)
+ if free and old.writable then
+ free_node(old)
+ end
+ return new
+ end
+end
+
+function nodes.free_spec(old)
+ if not old then
+ -- skip
+ elseif old.writable then
+ free_node(old)
+ else
+ -- skip
+ end
+end
+
if gonuts then
function nodes.reference(n)
@@ -668,3 +690,34 @@ end
nodes.keys = keys -- [id][subtype]
nodes.fields = nodefields -- (n)
+
+-- one issue solved in flush_node:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- /*
+-- } else if (! valid_node(p)) {
+-- return ;
+-- */
+-- /*
+-- } else {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- */
+-- }
+-- break ;
+--
+-- or:
+--
+-- case glue_spec_node:
+-- if (glue_ref_count(p)!=null) {
+-- decr(glue_ref_count(p));
+-- return ;
+-- } else if (valid_node(p)) {
+-- free_node(p, get_node_size(type(p), subtype(p)));
+-- return ;
+-- } else {
+-- break ;
+-- }
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index 9fc35a048..41f95be45 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -6,15 +6,32 @@ if not modules then modules = { } end modules ['node-mig'] = {
license = "see context related readme files"
}
+-- todo: insert_after
+
local format = string.format
-local attributes, nodes, node = attributes, nodes, node
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-local remove_nodes = nodes.remove
+local report_nodes = logs.reporter("nodes","migrations")
-local nodecodes = nodes.nodecodes
+local attributes = attributes
+local nodes = nodes
local tasks = nodes.tasks
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local remove_node = nuts.remove
+
+local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local insert_code = nodecodes.ins
@@ -22,10 +39,6 @@ local mark_code = nodecodes.mark
local a_migrated = attributes.private("migrated")
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-
-local report_nodes = logs.reporter("nodes","migrations")
-
local migrate_inserts, migrate_marks, inserts_too
local t_inserts, t_marks, t_sweeps = 0, 0, 0
@@ -33,32 +46,42 @@ local t_inserts, t_marks, t_sweeps = 0, 0, 0
local function locate(head,first,last,ni,nm)
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == vlist_code or id == hlist_code then
- current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
- current = current.next
+ local list = getlist(current)
+ if list then
+ list, first, last, ni, nm = locate(list,first,last,ni,nm)
+ setfield(current,"list",list)
+ end
+ current = getnext(current)
elseif migrate_inserts and id == insert_code then
local insert
- head, current, insert = remove_nodes(head,current)
- insert.next = nil
+ head, current, insert = remove_node(head,current)
+ setfield(insert,"next",nil)
if first then
- insert.prev, last.next = last, insert
+ setfield(insert,"prev",last)
+ setfield(last,"next",insert)
else
- insert.prev, first = nil, insert
+ setfield(insert,"prev",nil)
+ first = insert
end
- last, ni = insert, ni + 1
+ last = insert
+ ni = ni + 1
elseif migrate_marks and id == mark_code then
local mark
- head, current, mark = remove_nodes(head,current)
- mark.next = nil
+ head, current, mark = remove_node(head,current)
+ setfield(mark,"next",nil)
if first then
- mark.prev, last.next = last, mark
+ setfield(mark,"prev",last)
+ setfield(last,"next",mark)
else
- mark.prev, first = nil, mark
+ setfield(mark,"prev",nil)
+ first = mark
end
- last, nm = mark, nm + 1
+ last = mark
+ nm = nm + 1
else
- current= current.next
+ current = getnext(current)
end
end
return head, first, last, ni, nm
@@ -70,39 +93,43 @@ function nodes.handlers.migrate(head,where)
if trace_migrations then
report_nodes("migration sweep %a",where)
end
- local current = head
+ local current = tonut(head)
while current do
- local id = current.id
+ local id = getid(current)
-- inserts_too is a temp hack, we should only do them when it concerns
-- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
- current[a_migrated] = 1
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not getattr(current,a_migrated) then
+ setattr(current,a_migrated,1)
t_sweeps = t_sweeps + 1
- local h = current.list
+ local h = getlist(current)
local first, last, ni, nm
while h do
- local id = h.id
+ local id = getid(h)
if id == vlist_code or id == hlist_code then
h, first, last, ni, nm = locate(h,first,last,0,0)
end
- h = h.next
+ h = getnext(h)
end
if first then
- t_inserts, t_marks = t_inserts + ni, t_marks + nm
+ t_inserts = t_inserts + ni
+ t_marks = t_marks + nm
if trace_migrations and (ni > 0 or nm > 0) then
report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
t_sweeps,nodecodes[id],ni,nm,where)
end
- -- inserts after head
- local n = current.next
+ -- inserts after head, use insert_after
+ local n = getnext(current)
if n then
- last.next, n.prev = n, last
+ setfield(last,"next",n)
+ setfield(n,"prev",last)
end
- current.next, first.prev = first, current
- done, current = true, last
+ setfield(current,"next",first)
+ setfield(first,"prev",current)
+ done = true
+ current = last
end
end
- current = current.next
+ current = getnext(next)
end
return head, done
end
diff --git a/tex/context/base/node-nut.lua b/tex/context/base/node-nut.lua
new file mode 100644
index 000000000..b133c4e74
--- /dev/null
+++ b/tex/context/base/node-nut.lua
@@ -0,0 +1,790 @@
+if not modules then modules = { } end modules ['node-met'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Here starts some more experimental code that Luigi and I use in a next stage of
+-- exploring and testing potential speedups in the engines. This code is not meant
+-- for users and can change (or be removed) any moment. During the experiments I'll
+-- do my best to keep the code as fast as possible by using two codebases. See
+-- about-fast.pdf for some more info about impacts. Although key based access has
+-- more charm, function based is somewhat faster and has more potential for future
+-- speedups.
+
+-- This next iteration is flagged direct because we avoid user data which has a price
+-- in allocation and metatable tagging. Although in this stage we pass numbers around
+-- future versions might use light user data, so never depend on what direct function
+-- return. Using the direct approach had some speed advantages but you loose the key
+-- based access. The speed gain is only measurable in cases with lots of access. For
+-- instance when typesettign arabic with advanced fonts, we're talking of many millions
+-- of function calls and there we can get a 30\% or more speedup. On average complex
+-- \CONTEXT\ runs the gain can be 10\% to 15\% percent. Because mixing the two models
+-- (here we call then nodes and nuts) is not possible you need to cast either way which
+-- has a penalty. Also, error messages in nuts mode are less clear and \LUATEX\ will
+-- often simply abort when you make mistakes of mix the models. So, development (at least
+-- in \CONTEXT) can be done in node mode and not in nuts mode. Only robust code will
+-- be turned nuts afterwards and quite likely not all code. The official \LUATEX\ api
+-- to nodes is userdata!
+--
+-- Listening to 'lunatic soul' at the same time helped wrapping my mind around the mixed
+-- usage of both models. Just for the record: the potential of the direct approach only
+-- became clear after experimenting for weeks and partly adapting code. It is one of those
+-- (sub)projects where you afterwards wonder if it was worth the trouble, but users that
+-- rely on lots of complex functionality and font support will probably notice the speedup.
+--
+-- luatex luajittex
+-- ------------- ----- -------------------- ---------------------------------
+-- name pages old new pct old new pct
+-- ------------- ----- -------------------- ---------------------------------
+-- fonts-mkiv 166 9.3 7.7/7.4 17.2 7.4 (37.5) 5.9/5.7 (55.6) 20.3
+-- about 60 3.3 2.7/2.6 20.4 2.5 (39.5) 2.1 (57.0) 23.4
+-- arabic-001 61 25.3 15.8 18.2 15.3 (46.7) 6.8 (54.7) 16.0
+-- torture-001 300 21.4 11.4 24.2 13.9 (35.0) 6.3 (44.7) 22.2
+--
+-- so:
+--
+-- - we run around 20% faster on documents of average complexity and gain more when
+-- dealing with scripts like arabic and such
+-- - luajittex benefits a bit more so a luajittex job can (in principle) now be much
+-- faster
+-- - if we reason backwards, and take luajittex as norm we get 1:2:3 on some jobs for
+-- luajittex direct:luatex direct:luatex normal i.e. we can be 3 times faster
+-- - keep in mind that these are tex/lua runs so the real gain at the lua end is much
+-- larger
+--
+-- Because we can fake direct mode a little bit by using the fast getfield and setfield
+-- at the cost of wrapped getid and alike, we still are running quite ok. As we could gain
+-- some 5% with fast mode, we can sacrifice some on wrappers when we use a few fast core
+-- functions. This means that simulated direct mode runs font-mkiv in 9.1 seconds (we could
+-- get down to 8.7 seconds in fast mode) and that we can migrate slowely to direct mode.
+--
+-- The following measurements are from 2013-07-05 after adapting some 47 files to nuts. Keep
+-- in mind that the old binary can fake a fast getfield and setfield but that the other
+-- getters are wrapped functions. The more we have, the slower it gets.
+--
+-- fonts about arabic
+-- old mingw, indexed plus some functions : 8.9 3.2 20.3
+-- old mingw, fake functions : 9.9 3.5 27.4
+-- new mingw, node functions : 9.0 3.1 20.8
+-- new mingw, indexed plus some functions : 8.6 3.1 19.6
+-- new mingw, direct functions : 7.5 2.6 14.4
+--
+-- \starttext \dorecurse{1000}{test\page} \stoptext :
+--
+-- luatex 560 pps
+-- luajittex 600 pps
+--
+-- \setupbodyfont[pagella]
+--
+-- \edef\zapf{\cldcontext{context(io.loaddata(resolvers.findfile("zapf.tex")))}}
+--
+-- \starttext \dorecurse{1000}{\zapf\par} \stoptext
+--
+-- luatex 3.9 sec / 54 pps
+-- luajittex 2.3 sec / 93 pps
+
+local type, rawget = type, rawget
+
+local nodes = nodes
+local gonuts = nodes.gonuts
+local direct = node.direct
+
+local fastcopy = table.fastcopy
+
+if type(direct) ~= "table" then
+ return
+elseif gonuts then
+ statistics.register("running in nuts mode", function() return "yes" end)
+else
+ statistics.register("running in nuts mode", function() return "no" end)
+ return
+end
+
+local texget = tex.get
+
+local nodecodes = nodes.nodecodes
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nuts = nodes.nuts or { }
+nodes.nuts = nuts
+
+nodes.is_node = direct.is_node or function() return true end
+nodes.is_direct = direct.is_direct or function() return false end
+nodes.is_nut = nodes.is_direct
+
+-- casters
+
+local tonode = direct.tonode or function(n) return n end
+local tonut = direct.todirect or function(n) return n end
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+-- getters
+
+nuts.getfield = direct.getfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = direct.has_attribute or direct.getfield
+nuts.getchar = direct.getchar
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getlist = direct.getlist -- only hlist and vlist !
+nuts.getleader = direct.getleader
+
+-- local function track(name)
+-- local n = 0
+-- local f = nuts[name]
+-- function nuts[name](...)
+-- n = n + 1
+-- if n % 1000 == 0 then
+-- print(name,n)
+-- end
+-- return f(...)
+-- end
+-- end
+
+-- track("getsubtype")
+
+-- local dgf = direct.getfield function nuts.getlist(n) return dgf(n,"list") end
+
+-- setters
+
+nuts.setfield = direct.setfield
+nuts.setattr = direct.set_attribute or setfield
+
+nuts.getbox = direct.getbox
+nuts.setbox = direct.setbox
+nuts.getskip = direct.getskip or function(s) return tonut(texget(s)) end
+
+-- helpers
+
+nuts.tostring = direct.tostring
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.delete = direct.delete
+nuts.dimensions = direct.dimensions
+nuts.end_of_math = direct.end_of_math
+nuts.flush_list = direct.flush_list
+nuts.flush_node = direct.flush_node
+nuts.free = direct.free
+nuts.insert_after = direct.insert_after
+nuts.insert_before = direct.insert_before
+nuts.hpack = direct.hpack
+nuts.new = direct.new
+nuts.tail = direct.tail
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+nuts.slide = direct.slide
+nuts.writable_spec = direct.writable_spec
+nuts.vpack = direct.vpack
+nuts.is_node = direct.is_node
+nuts.is_direct = direct.is_direct
+nuts.is_nut = direct.is_direct
+nuts.first_glyph = direct.first_glyph
+nuts.first_character = direct.first_character
+nuts.has_glyph = direct.has_glyph or direct.first_glyph
+
+nuts.current_attr = direct.current_attr
+nuts.do_ligature_n = direct.do_ligature_n
+nuts.has_field = direct.has_field
+nuts.last_node = direct.last_node
+nuts.usedlist = direct.usedlist
+nuts.protrusion_skippable = direct.protrusion_skippable
+nuts.write = direct.write
+
+nuts.has_attribute = direct.has_attribute
+nuts.set_attribute = direct.set_attribute
+nuts.unset_attribute = direct.unset_attribute
+
+nuts.protect_glyphs = direct.protect_glyphs
+nuts.unprotect_glyphs = direct.unprotect_glyphs
+
+-- placeholders
+
+if not direct.kerning then
+
+ local n_kerning = node.kerning
+
+ function nuts.kerning(head)
+ return tonode(n_kerning(tonut(head)))
+ end
+
+end
+
+if not direct.ligaturing then
+
+ local n_ligaturing = node.ligaturing
+
+ function nuts.ligaturing(head)
+ return tonode(n_ligaturing(tonut(head)))
+ end
+
+end
+
+if not direct.mlist_to_hlist then
+
+ local n_mlist_to_hlist = node.mlist_to_hlist
+
+ function nuts.mlist_to_hlist(head)
+ return tonode(n_mlist_to_hlist(tonut(head)))
+ end
+
+end
+
+--
+
+local d_remove_node = direct.remove
+local d_free_node = direct.free
+local d_getfield = direct.getfield
+local d_setfield = direct.setfield
+local d_getnext = direct.getnext
+local d_getprev = direct.getprev
+local d_getid = direct.getid
+local d_getlist = direct.getlist
+local d_find_tail = direct.tail
+local d_insert_after = direct.insert_after
+local d_insert_before = direct.insert_before
+local d_slide = direct.slide
+local d_copy_node = direct.copy
+local d_traverse = direct.traverse
+
+local function remove(head,current,free_too)
+ local t = current
+ head, current = d_remove_node(head,current)
+ if not t then
+ -- forget about it
+ elseif free_too then
+ d_free_node(t)
+ t = nil
+ else
+ d_setfield(t,"next",nil) -- not that much needed (slows down unless we check the source on this)
+ d_setfield(t,"prev",nil) -- not that much needed (slows down unless we check the source on this)
+ end
+ return head, current, t
+end
+
+-- bad: we can have prev's being glue_spec
+
+-- local function remove(head,current,free_too) -- d_remove_node does a slide which can fail
+-- local prev = d_getprev(current) -- weird
+-- local next = d_getnext(current)
+-- if next then
+-- -- print("!!!!!!!! prev is gluespec",
+-- -- nodes.nodecodes[d_getid(current)],
+-- -- nodes.nodecodes[d_getid(next)],
+-- -- nodes.nodecodes[d_getid(prev)])
+-- d_setfield(prev,"next",next)
+-- d_setfield(next,"prev",prev)
+-- else
+-- d_setfield(prev,"next",nil)
+-- end
+-- if free_too then
+-- d_free_node(current)
+-- current = nil
+-- else
+-- d_setfield(current,"next",nil) -- use this fact !
+-- d_setfield(current,"prev",nil) -- use this fact !
+-- end
+-- if head == current then
+-- return next, next, current
+-- else
+-- return head, next, current
+-- end
+-- end
+
+nuts.remove = remove
+
+function nuts.delete(head,current)
+ return remove(head,current,true)
+end
+
+function nuts.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+ end
+ local prev = d_getprev(current)
+ local next = d_getnext(current)
+ if next then
+ d_setfield(new,"next",next)
+ d_setfield(next,"prev",new)
+ end
+ if prev then
+ d_setfield(new,"prev",prev)
+ d_setfield(prev,"next",new)
+ end
+ if head then
+ if head == current then
+ head = new
+ end
+ d_free_node(current)
+ return head, new
+ else
+ d_free_node(current)
+ return new
+ end
+end
+
+local function count(stack,flat)
+ local n = 0
+ while stack do
+ local id = d_getid(stack)
+ if not flat and id == hlist_code or id == vlist_code then
+ local list = d_getlist(stack)
+ if list then
+ n = n + 1 + count(list) -- self counts too
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ stack = d_getnext(stack)
+ end
+ return n
+end
+
+nuts.count = count
+
+function nuts.append(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_after(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.prepend(head,current,...)
+ for i=1,select("#",...) do
+ head, current = d_insert_before(head,current,(select(i,...)))
+ end
+ return head, current
+end
+
+function nuts.linked(...)
+ local head, last
+ for i=1,select("#",...) do
+ local next = select(i,...)
+ if next then
+ if head then
+ d_setfield(last,"next",next)
+ d_setfield(next,"prev",last)
+ else
+ head = next
+ end
+ last = d_find_tail(next) -- we could skip the last one
+ end
+ end
+ return head
+end
+
+function nuts.concat(list) -- consider tail instead of slide
+ local head, tail
+ for i=1,#list do
+ local li = list[i]
+ if li then
+ if head then
+ d_setfield(tail,"next",li)
+ d_setfield(li,"prev",tail)
+ else
+ head = li
+ end
+ tail = d_slide(li)
+ end
+ end
+ return head, tail
+end
+
+function nuts.writable_spec(n) -- not pool
+ local spec = d_getfield(n,"spec")
+ if not spec then
+ spec = d_copy_node(glue_spec)
+ d_setfield(n,"spec",spec)
+ elseif not d_getfield(spec,"writable") then
+ spec = d_copy_node(spec)
+ d_setfield(n,"spec",spec)
+ end
+ return spec
+end
+
+function nuts.reference(n)
+ return n or ""
+end
+
+-- quick and dirty tracing of nuts
+
+-- for k, v in next, nuts do
+-- if string.find(k,"box") then
+-- nuts[k] = function(...) print(k,...) return v(...) end
+-- end
+-- end
+
+function nodes.vianuts (f) return function(n,...) return tonode(f(tonut (n),...)) end end
+function nodes.vianodes(f) return function(n,...) return tonut (f(tonode(n),...)) end end
+
+nuts.vianuts = nodes.vianuts
+nuts.vianodes = nodes.vianodes
+
+-- for k, v in next, nuts do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nuts[k] = function(...) print("d",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- for k, v in next, nodes do
+-- if type(v) == "function" then
+-- if not string.find(k,"^[sg]et") and not string.find(k,"^to") then
+-- local f = v
+-- nodes[k] = function(...) print("n",k,...) return f(...) end
+-- end
+-- end
+-- end
+
+-- function nodes.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- n_setfield(n,"next",h)
+-- n_setfield(n,"prev",nil)
+-- n_setfield(h,"prev",n)
+-- else
+-- local cp = n_getprev(c)
+-- n_setfield(n,"next",c)
+-- n_setfield(n,"prev",cp)
+-- if cp then
+-- n_setfield(cp,"next",n)
+-- end
+-- n_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nodes.insert_after(h,c,n)
+-- if c then
+-- local cn = n_getnext(c)
+-- if cn then
+-- n_setfield(n,"next",cn)
+-- n_setfield(cn,"prev",n)
+-- else
+-- n_setfield(n,"next",nil)
+-- end
+-- n_setfield(c,"next",n)
+-- n_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nodes.insert_list_after(h,c,n)
+ local t = n_tail(n)
+ if c then
+ local cn = n_getnext(c)
+ if cn then
+ n_setfield(t,"next",cn)
+ n_setfield(cn,"prev",t)
+ else
+ n_setfield(t,"next",nil)
+ end
+ n_setfield(c,"next",n)
+ n_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- function nuts.insert_before(h,c,n)
+-- if c then
+-- if c == h then
+-- d_setfield(n,"next",h)
+-- d_setfield(n,"prev",nil)
+-- d_setfield(h,"prev",n)
+-- else
+-- local cp = d_getprev(c)
+-- d_setfield(n,"next",c)
+-- d_setfield(n,"prev",cp)
+-- if cp then
+-- d_setfield(cp,"next",n)
+-- end
+-- d_setfield(c,"prev",n)
+-- return h, n
+-- end
+-- end
+-- return n, n
+-- end
+
+-- function nuts.insert_after(h,c,n)
+-- if c then
+-- local cn = d_getnext(c)
+-- if cn then
+-- d_setfield(n,"next",cn)
+-- d_setfield(cn,"prev",n)
+-- else
+-- d_setfield(n,"next",nil)
+-- end
+-- d_setfield(c,"next",n)
+-- d_setfield(n,"prev",c)
+-- return h, n
+-- end
+-- return n, n
+-- end
+
+function nuts.insert_list_after(h,c,n)
+ local t = d_tail(n)
+ if c then
+ local cn = d_getnext(c)
+ if cn then
+ d_setfield(t,"next",cn)
+ d_setfield(cn,"prev",t)
+ else
+ d_setfield(t,"next",nil)
+ end
+ d_setfield(c,"next",n)
+ d_setfield(n,"prev",c)
+ return h, n
+ end
+ return n, t
+end
+
+-- test code only
+
+-- collectranges and mix
+
+local report = logs.reporter("sliding")
+
+local function message(detail,head,current,previous)
+ report("error: %s, current: %s:%s, previous: %s:%s, list: %s, text: %s",
+ detail,
+ nodecodes[d_getid(current)],
+ current,
+ nodecodes[d_getid(previous)],
+ previous,
+ nodes.idstostring(head),
+ nodes.listtoutf(head)
+ )
+ utilities.debugger.showtraceback(report)
+end
+
+local function warn()
+ report()
+ report("warning: the slide tracer is enabled")
+ report()
+ warn = false
+end
+
+local function tracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+local function nestedtracedslide(head,level) -- no sliding !
+ if head then
+ if warn then
+ warn()
+ end
+ local id = d_getid(head)
+ local next = d_getnext(head)
+ if next then
+ report("%whead:%s",level or 0,nodecodes[id])
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ message("unset",head,n,prev)
+ -- break
+ elseif p ~= prev then
+ message("wrong",head,n,prev)
+ -- break
+ end
+ prev = n
+ local id = d_getid(n)
+ if id == hlist_code or id == vlist_code then
+ nestedtracedslide(d_getlist(n),(level or 0) + 1)
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ report("%wlist:%s",level or 0,nodecodes[id])
+ nestedtracedslide(d_getlist(head),(level or 0) + 1)
+ end
+ -- return d_slide(head)
+ end
+end
+
+local function untracedslide(head)
+ if head then
+ if warn then
+ warn()
+ end
+ local next = d_getnext(head)
+ if next then
+ local prev = head
+ for n in d_traverse(next) do
+ local p = d_getprev(n)
+ if not p then
+ return "unset", d_getid(n)
+ elseif p ~= prev then
+ return "wrong", d_getid(n)
+ end
+ prev = n
+ end
+ end
+ return d_slide(head)
+ end
+end
+
+nuts.tracedslide = tracedslide
+nuts.untracedslide = untracedslide
+nuts.nestedtracedslide = nestedtracedslide
+
+-- nuts.slide = tracedslide
+
+-- this might move
+
+local propertydata = direct.get_properties_table and direct.get_properties_table()
+
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+if propertydata then
+
+ nodes.properties = {
+ data = propertydata,
+ }
+
+ -- direct.set_properties_mode(true,false) -- shallow copy ... problem: in fonts we then affect the originals too
+ direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-inj.lua (unless we use an intermediate table)
+
+ -- todo:
+ --
+ -- function direct.set_properties_mode()
+ -- -- we really need the set modes
+ -- end
+
+ -- experimental code with respect to copying attributes has been removed
+ -- as it doesn't pay of (most attributes are only accessed once anyway)
+
+ nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+ end
+
+ nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+ end
+
+ nodes.setprop = nodes.setproperty
+ nodes.getprop = nodes.getproperty
+
+else
+
+ -- for testing and simple cases
+
+ nuts.getprop = getattr
+ nuts.setprop = setattr
+
+ nodes.setprop = getattr
+ nodes.getprop = setattr
+
+end
+
+function nuts.copy_properties(source,target,what)
+ local newprops = propertydata[source]
+ if not newprops then
+ -- nothing to copy
+ return
+ end
+ if what then
+ -- copy one category
+ newprops = rawget(source,what)
+ if newprops then
+ newprops = fastcopy(newprops)
+ local p = rawget(propertydata,target)
+ if p then
+ p[what] = newprops
+ else
+ propertydata[target] = {
+ [what] = newprops,
+ }
+ end
+ end
+ else
+ -- copy all properties
+ newprops = fastcopy(newprops)
+ propertydata[target] = newprops
+ end
+ return newprops -- for checking
+end
+
+-- a bit special
+
+local getwidth = { }
+local setwidth = { }
+local getdimensions = { }
+local setdimensions = { }
+
+nodes.whatsitters = {
+ getters = { width = getwidth, dimensions = getdimensions },
+ setters = { width = setwidth, dimensions = setdimensions },
+}
+
+-- this might move (in fact forms and images will become nodes)
+
+local function get_width(n,dir)
+ n = tonut(n)
+ return getfield(n,"width")
+end
+
+local function get_dimensions(n,dir)
+ n = tonut(n)
+ return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth")
+end
+
+local whatcodes = nodes.whatcodes
+local pdfrefximage_code = whatcodes.pdfrefximage
+local pdfrefxform_code = whatcodes.pdfrefxform
+
+getwidth [pdfrefximage_code] = get_width
+getwidth [pdfrefxform_code ] = get_width
+
+getdimensions[pdfrefximage_code] = get_dimensions
+getdimensions[pdfrefxform_code ] = get_dimensions
+
+
diff --git a/tex/context/base/node-ppt.lua b/tex/context/base/node-ppt.lua
new file mode 100644
index 000000000..5e7abeaae
--- /dev/null
+++ b/tex/context/base/node-ppt.lua
@@ -0,0 +1,476 @@
+if not modules then modules = { } end modules ['node-ppt'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is all very exeperimental and likely to change.
+
+local next, type, unpack, load = next, type, table.unpack, load
+
+local serialize = table.serialize
+local formatters = string.formatters
+
+local report = logs.reporter("properties")
+local report_setting = logs.reporter("properties","setting")
+local trace_setting = false trackers.register("properties.setting", function(v) trace_setting = v end)
+
+-- report("using experimental properties")
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local flushnode = nuts.flush
+local removenode = nuts.remove
+local traverse = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local userdefined_code = whatsitcodes.userdefined
+local localpar_code = whatsitcodes.localpar
+
+local nodepool = nodes.pool
+local new_usernumber = nodepool.usernumber
+
+local nutpool = nuts.pool
+local nut_usernumber = nutpool.usernumber
+
+local variables = interfaces.variables
+local v_before = variables.before
+local v_after = variables.after
+local v_here = variables.here
+
+local cache = { }
+local nofslots = 0
+local property_id = nodepool.userids["property"]
+
+local properties = nodes.properties
+local propertydata = properties.data
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+if not propertydata then
+ return
+end
+
+-- management
+
+local function register(where,data,...)
+ if not data then
+ data = where
+ where = v_after
+ end
+ if data then
+ local data = { where, data, ... }
+ nofslots = nofslots + 1
+ if nofslots > 1 then
+ cache[nofslots] = data
+ else
+ -- report("restarting attacher")
+ cache = { data } -- also forces collection
+ end
+ return new_usernumber(property_id,nofslots)
+ end
+end
+
+local writenode = node.write
+local flushnode = context.flushnode
+
+function commands.deferredproperty(...)
+-- context(register(...))
+ flushnode(register(...))
+end
+
+
+function commands.immediateproperty(...)
+ writenode(register(...))
+end
+
+commands.attachproperty = commands.deferredproperty
+
+local actions = { } properties.actions = actions
+
+table.setmetatableindex(actions,function(t,k)
+ report("unknown property action %a",k)
+ local v = function() end
+ return v
+end)
+
+local f_delayed = formatters["return function(target,head,where,propdata,parent) %s end"]
+local f_immediate = formatters["return function(target,head,where,propdata) %s end"]
+
+local nofdelayed = 0 -- better is to keep track of it per page ... we can have deleted nodes with properties
+
+function actions.delayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { where, code, ... }
+ else
+ propdata.delayed = { { where, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.fdelayed(target,head,where,propdata,code,...) -- this one is used at the tex end
+-- local kind = type(code)
+-- if kind == "string" then
+-- code, err = load(f_delayed(code))
+-- if code then
+-- code = code()
+-- end
+-- elseif kind ~= "function" then
+-- code = nil
+-- end
+ if code then
+ local delayed = propdata.delayed
+ if delayed then
+ delayed[#delayed+1] = { false, code, ... }
+ else
+ propdata.delayed = { { false, code, ... } }
+ nofdelayed = nofdelayed + 1
+ end
+ end
+end
+
+function actions.immediate(target,head,where,propdata,code,...) -- this one is used at the tex end
+ local kind = type(code)
+ if kind == "string" then
+ local f = f_immediate(code)
+ local okay, err = load(f)
+ if okay then
+ local h = okay()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+ elseif kind == "function" then
+ local h = code()(target,head,where,propdata,...)
+ if h and h ~= head then
+ return h
+ end
+ end
+end
+
+-- another experiment (a table or function closure are equally efficient); a function
+-- is easier when we want to experiment with different (compatible) implementations
+
+-- function nodes.nuts.pool.deferredfunction(...)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = { ... } }
+-- return n
+-- end
+
+-- function nodes.nuts.pool.deferredfunction(f)
+-- nofdelayed = nofdelayed + 1
+-- local n = nut_usernumber(property_id,0)
+-- propertydata[n] = { deferred = f }
+-- return n
+-- end
+
+-- maybe actions will get parent too
+
+local function delayed(head,parent) -- direct based
+ for target in traverse(head) do
+ local p = propertydata[target]
+ if p then
+ -- local deferred = p.deferred -- kind of late lua (but too soon as we have no access to pdf.h/v)
+ -- if deferred then
+ -- -- if #deferred > 0 then
+ -- -- deferred[1](unpack(deferred,2))
+ -- -- else
+ -- -- deferred[1]()
+ -- -- end
+ -- deferred()
+ -- p.deferred = false
+ -- if nofdelayed == 1 then
+ -- nofdelayed = 0
+ -- return head
+ -- else
+ -- nofdelayed = nofdelayed - 1
+ -- end
+ -- else
+ local delayed = p.delayed
+ if delayed then
+ for i=1,#delayed do
+ local d = delayed[i]
+ local code = d[2]
+ local kind = type(code)
+ if kind == "string" then
+ code, err = load(f_delayed(code))
+ if code then
+ code = code()
+ end
+ end
+ local where = d[1]
+ if where then
+ local h = code(target,where,head,p,parent,unpack(d,3)) -- target where propdata head parent
+ if h and h ~= head then
+ head = h
+ end
+ else
+ code(unpack(d,3))
+ end
+ end
+ p.delayed = nil
+ if nofdelayed == 1 then
+ nofdelayed = 0
+ return head
+ else
+ nofdelayed = nofdelayed - 1
+ end
+ end
+ -- end
+ end
+ local id = getid(target)
+ if id == hlist_code or id == vlist_code then
+ local list = getlist(target)
+ if list then
+ local done = delayed(list,parent)
+ if done then
+ setfield(target,"list",done)
+ end
+ if nofdelayed == 0 then
+ return head
+ end
+ end
+ else
+ -- maybe also some more lists? but we will only use this for some
+ -- special cases .. who knows
+ end
+ end
+ return head
+end
+
+function properties.delayed(head) --
+ if nofdelayed > 0 then
+ -- if next(propertydata) then
+ starttiming(properties)
+ head = delayed(tonut(head))
+ stoptiming(properties)
+ return tonode(head), true -- done in shipout anyway
+ -- else
+ -- delayed = 0
+ -- end
+ end
+ return head, false
+end
+
+-- more explicit ones too
+
+local anchored = {
+ [v_before] = function(n)
+ while n do
+ n = getprev(n)
+ if getid(n) == whatsit_code and getsubtype(n) == user_code and getfield(n,"user_id") == property_id then
+ -- continue
+ else
+ return n
+ end
+ end
+ end,
+ [v_after] = function(n)
+ while n do
+ n = getnext(n)
+ if getid(n) == whatsit_code then
+ local subtype = getsubtype(n)
+ if (subtype == userdefined_code and getfield(n,"user_id") == property_id) then
+ -- continue
+ elseif subtype == localpar_code then
+ -- continue .. can't happen anyway as we cannot write
+ else
+ return n
+ end
+ else
+ return n
+ end
+ end
+ end,
+ [v_here] = function(n)
+ -- todo
+ end,
+}
+
+table.setmetatableindex(anchored,function(t,k)
+ v = anchored[v_after]
+ t[k] = v
+ return v
+end)
+
+function properties.attach(head)
+
+ if nofslots <= 0 then
+ return head, false
+ end
+
+ local done = false
+ local last = nil
+ local head = tonut(head)
+
+ starttiming(properties)
+
+ for source in traverse_id(whatsit_code,head) do
+ if getsubtype(source) == userdefined_code then
+ if last then
+ removenode(head,last,true)
+ last = nil
+ end
+ if getfield(source,"user_id") == property_id then
+ local slot = getfield(source,"value")
+ local data = cache[slot]
+ if data then
+ cache[slot] = nil
+ local where = data[1]
+ local target = anchored[where](source)
+ if target then
+ local first = data[2]
+ local method = type(first)
+ local p_target = propertydata[target]
+ local p_source = propertydata[source]
+ if p_target then
+ if p_source then
+ for k, v in next, p_source do
+ p_target[k] = v
+ end
+ end
+ if method == "table" then
+ for k, v in next, first do
+ p_target[k] = v
+ end
+ elseif method == "function" then
+ first(target,head,where,p_target,unpack(data,3))
+ elseif method == "string" then
+ actions[first](target,head,where,p_target,unpack(data,3))
+ end
+ elseif p_source then
+ if method == "table" then
+ propertydata[target] = p_source
+ for k, v in next, first do
+ p_source[k] = v
+ end
+ elseif method == "function" then
+ propertydata[target] = p_source
+ first(target,head,where,p_source,unpack(data,3))
+ elseif method == "string" then
+ propertydata[target] = p_source
+ actions[first](target,head,where,p_source,unpack(data,3))
+ end
+ else
+ if method == "table" then
+ propertydata[target] = first
+ elseif method == "function" then
+ local t = { }
+ propertydata[target] = t
+ first(target,head,where,t,unpack(data,3))
+ elseif method == "string" then
+ local t = { }
+ propertydata[target] = t
+ actions[first](target,head,where,t,unpack(data,3))
+ end
+ end
+ if trace_setting then
+ report_setting("node %i, id %s, data %s",
+ target,nodecodes[getid(target)],serialize(propertydata[target],false))
+ end
+ end
+ if nofslots == 1 then
+ nofslots = 0
+ last = source
+ break
+ else
+ nofslots = nofslots - 1
+ end
+ end
+ last = source
+ end
+ end
+ end
+
+ if last then
+ removenode(head,last,true)
+ end
+
+ stoptiming(properties)
+
+ return head, done
+
+end
+
+local tasks = nodes.tasks
+
+-- maybe better hard coded in-place
+
+-- tasks.prependaction("processors","before","nodes.properties.attach")
+-- tasks.appendaction("shipouts","normalizers","nodes.properties.delayed")
+
+statistics.register("properties processing time", function()
+ return statistics.elapsedseconds(properties)
+end)
+
+-- only for development
+
+-- local function show(head,level,report)
+-- for target in traverse(head) do
+-- local p = propertydata[target]
+-- if p then
+-- report("level %i, node %i, id %s, data %s",
+-- level,target,nodecodes[getid(target)],serialize(propertydata[target],false))
+-- end
+-- local id = getid(target)
+-- if id == hlist_code or id == vlist_code then
+-- local list = getlist(target)
+-- if list then
+-- show(list,level+1,report)
+-- end
+-- else
+-- -- maybe more lists
+-- end
+-- end
+-- return head, false
+-- end
+--
+-- local report_shipout = logs.reporter("properties","shipout")
+-- local report_processors = logs.reporter("properties","processors")
+--
+-- function properties.showshipout (head) return tonode(show(tonut(head),1,report_shipout )), true end
+-- function properties.showprocessors(head) return tonode(show(tonut(head),1,report_processors)), true end
+--
+-- tasks.prependaction("shipouts","before","nodes.properties.showshipout")
+-- tasks.disableaction("shipouts","nodes.properties.showshipout")
+--
+-- trackers.register("properties.shipout",function(v)
+-- tasks.setaction("shipouts","nodes.properties.showshipout",v)
+-- end)
+--
+-- tasks.appendaction ("processors","after","nodes.properties.showprocessors")
+-- tasks.disableaction("processors","nodes.properties.showprocessors")
+--
+-- trackers.register("properties.processors",function(v)
+-- tasks.setaction("processors","nodes.properties.showprocessors",v)
+-- end)
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index aa6692d7b..27e349893 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -13,15 +13,15 @@ local trace_callbacks = false trackers.register("nodes.callbacks", function(v)
local report_nodes = logs.reporter("nodes","processors")
-local nodes, node = nodes, node
+local nodes = nodes
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local tasks = nodes.tasks
+local nuts = nodes.nuts
-local free_node = node.free
-local first_glyph = node.first_glyph or node.first_character
-local has_attribute = node.has_attribute
+local first_glyph = nodes.first_glyph
+local has_glyph = nodes.has_glyph
nodes.processors = nodes.processors or { }
local processors = nodes.processors
@@ -31,43 +31,53 @@ local processors = nodes.processors
local actions = tasks.actions("processors")
-local n = 0
+do
-local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, head
- while h do
+ local tonut = nuts.tonut
+ local getid = nuts.getid
+ local getchar = nuts.getchar
+ local getnext = nuts.getnext
+
+ local n = 0
+
+ local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, tonut(head)
+ while h do
+ n = n + 1
+ local id = getid(h)
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(getchar(h))
+ else
+ t[n] = "[]"
+ end
+ h = getnext(h)
+ end
+ return concat(t)
+ end
+
+ function processors.tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
n = n + 1
- local id = h.id
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(h.char)
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
else
- t[n] = "[]"
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
end
- h = h.next
end
- return concat(t)
-end
-local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
- n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
- else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
- end
end
-processors.tracer = tracer
+local tracer = processors.tracer
processors.enabled = true -- this will become a proper state (like trackers)
function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
- local first, found = first_glyph(head) -- they really need to be glyphs
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -94,10 +104,8 @@ local enabled = true
function processors.hpack_filter(head,groupcode,size,packtype,direction)
if enabled then
- -- if not head.next and head.id ~= glyph_code then -- happens often but not faster
- -- return true
- -- end
- local first, found = first_glyph(head) -- they really need to be glyphs
+ -- local first, found = first_glyph(head) -- they really need to be glyphs
+ local found = has_glyph(head)
if found then
if trace_callbacks then
local before = nodes.count(head,true)
@@ -121,15 +129,36 @@ function processors.hpack_filter(head,groupcode,size,packtype,direction)
return true
end
-local hpack = node.hpack
+do
+
+ local setfield = nodes.setfield
+ local hpack = nodes.hpack
+
+ function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
+
+end
+
+do
+
+ local setfield = nuts.setfield
+ local hpack = nuts.hpack
+
+ function nuts.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ setfield(hp,"prev",nil)
+ setfield(hp,"next",nil)
+ enabled = true
+ return hp, b
+ end
-function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- hp.prev = nil
- hp.next = nil
- enabled = true
- return hp, b
end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index aa864fb1c..97c37c74e 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -16,12 +16,13 @@ if not modules then modules = { } end modules ['node-ref'] = {
-- is grouplevel still used?
+local concat = table.concat
+
local attributes, nodes, node = attributes, nodes, node
local allocate = utilities.storage.allocate, utilities.storage.mark
local mark = utilities.storage.allocate, utilities.storage.mark
-
local nodeinjections = backends.nodeinjections
local codeinjections = backends.codeinjections
@@ -33,17 +34,38 @@ local colors = attributes.colors
local references = structures.references
local tasks = nodes.tasks
-local hpack_list = node.hpack
-local list_dimensions = node.dimensions
-
-local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
-local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+local trace_areas = false trackers.register("nodes.areas", function(v) trace_areas = v end)
+local show_references = false trackers.register("nodes.references.show", function(v) show_references = tonumber(v) or (v and 2.25 or false) end)
+local show_destinations = false trackers.register("nodes.destinations.show", function(v) show_destinations = tonumber(v) or (v and 2.00 or false) end)
local report_reference = logs.reporter("backend","references")
local report_destination = logs.reporter("backend","destinations")
local report_area = logs.reporter("backend","areas")
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local hpack_list = nuts.hpack
+local vpack_list = nuts.vpack
+local list_dimensions = nuts.dimensions
+local traverse = nuts.traverse
+local find_node_tail = nuts.tail
+
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
local whatcodes = nodes.whatcodes
@@ -52,6 +74,8 @@ local listcodes = nodes.listcodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glue_code = nodecodes.glue
+local glyph_code = nodecodes.glyph
+local rule_code = nodecodes.rule
local whatsit_code = nodecodes.whatsit
local leftskip_code = skipcodes.leftskip
@@ -63,75 +87,150 @@ local dir_code = whatcodes.dir
local line_code = listcodes.line
-local nodepool = nodes.pool
-
+local new_rule = nodepool.rule
local new_kern = nodepool.kern
-local traverse = node.traverse
-local find_node_tail = node.tail or node.slide
+local free_node = nuts.free
+
local tosequence = nodes.tosequence
--- local function dimensions(parent,start,stop)
--- stop = stop and stop.next
--- if parent then
--- if stop then
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
--- else
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
--- end
--- else
--- if stop then
--- return list_dimensions(start,stop)
--- else
--- return list_dimensions(start)
--- end
--- end
--- end
---
--- -- more compact
+local implement = interfaces.implement
+
+-- Normally a (destination) area is a box or a simple stretch if nodes but when it is
+-- a paragraph we hav ea problem: we cannot calculate the height well. This happens
+-- with footnotes or content broken across a page.
-local function dimensions(parent,start,stop)
+local function vlist_dimensions(start,stop)
+ local temp
+ if stop then
+ temp = getnext(stop)
+ setfield(stop,"next",nil)
+ end
+ local v = vpack_list(start)
+ local w = getfield(v,"width")
+ local h = getfield(v,"height")
+ local d = getfield(v,"depth")
+ setfield(v,"list",nil)
+ free_node(v)
+ if temp then
+ setfield(stop,"next",temp)
+ end
+ return w, h, d
+end
+
+local function hlist_dimensions(start,stop,parent)
+ local last = stop and getnext(stop)
if parent then
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
+ return list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),start,last)
else
- return list_dimensions(start,stop and stop.next)
+ return list_dimensions(start,last)
+ end
+end
+
+local function dimensions(parent,start,stop) -- in principle we could move some to the caller
+ local id = getid(start)
+ if start == stop then
+ if id == hlist_code or id == vlist_code or id == glyph_code or id == rule_code then -- or image
+ if trace_areas then
+ report_area("dimensions taken of %a",nodecodes[id])
+ end
+ return getfield(start,"width"), getfield(parent,"height"), getfield(parent,"depth")
+ else
+ if trace_areas then
+ report_area("dimensions calculated of %a",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop) -- one node only so simple
+ end
+ end
+ local last = stop and getnext(stop)
+ if parent then
+ -- todo: if no prev and no next and parent
+ -- todo: we need a a list_dimensions for a vlist
+ if getid(parent) == vlist_code then
+ local l = getlist(parent)
+ local c = l
+ local ok = false
+ while c do
+ if c == start then
+ ok = true
+ end
+ if ok and getid(c) == hlist_code then
+ break
+ else
+ c = getnext(c)
+ end
+ end
+ if ok and c then
+ if trace_areas then
+ report_area("dimensions taken of first line in vlist")
+ end
+ return getfield(c,"width"), getfield(c,"height"), getfield(c,"depth"), c
+ else
+ if trace_areas then
+ report_area("dimensions taken of vlist (probably wrong)")
+ end
+ return hlist_dimensions(start,stop,parent)
+ end
+ else
+ if trace_areas then
+ report_area("dimensions taken of range starting with %a using parent",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop,parent)
+ end
+ else
+ if trace_areas then
+ report_area("dimensions taken of range starting with %a",nodecodes[id])
+ end
+ return hlist_dimensions(start,stop)
end
end
-- is pardir important at all?
local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir)
- local width, height, depth = dimensions(parent,first,last)
+ local width, height, depth, line = dimensions(parent,first,last)
if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added
width = - width
end
local result, resolved = make(width,height,depth,reference)
if result and resolved then
- if head == first then
- if trace_backend then
- report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ if line then
+ -- special case, we only treat the first line in a vlist
+ local l = getlist(line)
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","line",
+ reference,pardir or "---",txtdir or "---",tosequence(l,nil,true),width,height,depth,resolved)
end
- result.next = first
- first.prev = result
- return result, last
+ setfield(line,"list",result)
+ setfield(result,"next",l)
+ setfield(l,"prev",result)
+ return head, last
else
- if trace_backend then
- report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
- end
- local prev = first.prev
- if prev then
- result.next = first
- result.prev = prev
- prev.next = result
- first.prev = result
+ if head == first then
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","head",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ return result, last
else
- result.next = first
- first.prev = result
- end
- if first == head.next then
- head.next = result -- hm, weird
+ if trace_areas then
+ report_area("%s: %04i %s %s %s => w=%p, h=%p, d=%p, c=%S","middle",
+ reference,pardir or "---",txtdir or "---",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ local prev = getprev(first)
+ if prev then
+ setfield(prev,"next",result)
+ setfield(result,"prev",prev)
+ end
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ -- if first == getnext(head) then
+ -- setfield(head,"next",result) -- hm, weird
+ -- end
+ return head, last
end
- return head, last
end
else
return head, last
@@ -139,9 +238,12 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
end
local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = current.width, current.height, current.depth, 0
- local moveright = false
- local first = current.list
+ local width = getfield(current,"width")
+ local height = getfield(current,"height")
+ local depth = getfield(current,"depth")
+ local correction = 0
+ local moveright = false
+ local first = getlist(current)
if id == hlist_code then -- box_code line_code
-- can be either an explicit hbox or a line and there is no way
-- to recognize this; anyway only if ht/dp (then inline)
@@ -149,17 +251,17 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if first then
if sr and sr[2] then
local last = find_node_tail(first)
- if last.id == glue_code and last.subtype == rightskip_code then
- local prev = last.prev
- moveright = first.id == glue_code and first.subtype == leftskip_code
- if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
- width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
+ if getid(last) == glue_code and getsubtype(last) == rightskip_code then
+ local prev = getprev(last)
+ moveright = getid(first) == glue_code and getsubtype(first) == leftskip_code
+ if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
+ width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
else
- if moveright and first.writable then
- width = width - first.spec.stretch*current.glue_set * current.glue_sign
+ if moveright and getfield(first,"writable") then
+ width = width - getfield(getfield(first,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
- if last.writable then
- width = width - last.spec.stretch*current.glue_set * current.glue_sign
+ if getfield(last,"writable") then
+ width = width - getfield(getfield(last,"spec"),"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
end
end
end
@@ -180,23 +282,26 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
local result, resolved = make(width,height,depth,reference)
-- todo: only when width is ok
if result and resolved then
- if trace_backend then
- report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ if trace_areas then
+ report_area("%s: %04i %s %s %s: w=%p, h=%p, d=%p, c=%S","box",
+ reference,pardir or "---",txtdir or "----","[]",width,height,depth,resolved)
end
if not first then
- current.list = result
+ setfield(current,"list",result)
elseif moveright then -- brr no prevs done
-- result after first
- local n = first.next
- result.next = n
- first.next = result
- result.prev = first
- if n then n.prev = result end
+ local n = getnext(first)
+ setfield(result,"next",n)
+ setfield(first,"next",result)
+ setfield(result,"prev",first)
+ if n then
+ setfield(n,"prev",result)
+ end
else
-- first after result
- result.next = first
- first.prev = result
- current.list = result
+ setfield(result,"next",first)
+ setfield(first,"prev",result)
+ setfield(current,"list",result)
end
end
end
@@ -209,45 +314,61 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
pardir = pardir or "==="
txtdir = txtdir or "==="
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
- -- somehow reference is true so the following fails (second one not done) in
- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
- -- so let's wait till this fails again
- -- if not reference and r and (not skip or r > skip) then -- > or ~=
- if r and (not skip or r > skip) then -- > or ~=
- inject_list(id,current,r,make,stack,pardir,txtdir)
- end
+
+ -- see dimensions: this is tricky with split off boxes like inserts
+ -- where we can end up with a first and last spanning lines
+
+ local r = getattr(current,attribute)
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
+ -- if r and (not skip or r >) skip then -- maybe no > test
+ -- inject_list(id,current,r,make,stack,pardir,txtdir)
+ -- end
if r then
+ if not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ -- same link
+ last = current
+ elseif (done[reference] or 0) == 0 then
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
+ end
+ else
+ reference, first, last, firstdir = r, current, current, txtdir
+ end
done[r] = (done[r] or 0) + 1
end
- local list = current.list
+ local list = getlist(current)
if list then
- local _
- current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ local h, ok
+ h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ setfield(current,"list",h)
end
if r then
done[r] = done[r] - 1
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
- elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
--
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if not r then
-- just go on, can be kerns
elseif not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -256,7 +377,7 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
reference, first, last, firstdir = r, current, current, txtdir
end
end
- current = current.next
+ current = getnext(current)
end
if reference and (done[reference] or 0) == 0 then
head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
@@ -271,45 +392,39 @@ local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir)
txtdir = txtdir or "==="
local current = head
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
inject_list(id,current,r,make,stack,pardir,txtdir)
end
- local list = current.list
+ local list = getlist(current)
if list then
- current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+ setfield(current,"list",(inject_area(list,attribute,make,stack,done,current,pardir,txtdir)))
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- pardir = current.dir
+ pardir = getfield(current,"dir")
elseif subtype == dir_code then
- txtdir = current.dir
+ txtdir = getfield(current,"dir")
end
else
- local r = current[attribute]
+ local r = getattr(current,attribute)
if r and not done[r] then
done[r] = true
head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
end
end
- current = current.next
+ current = getnext(current)
end
end
return head, true
end
--- tracing
+-- tracing: todo: use predefined colors
-local nodepool = nodes.pool
-
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-
-local set_attribute = node.set_attribute
local register_color = colors.register
local a_color = attributes.private('color')
@@ -319,7 +434,32 @@ local u_transparency = nil
local u_colors = { }
local force_gray = true
-local function colorize(width,height,depth,n,reference,what)
+local function addstring(what,str,shift) --todo make a pluggable helper (in font-ctx)
+ if str then
+ local typesetters = nuts.typesetters
+ if typesetters then
+ local hashes = fonts.hashes
+ local infofont = fonts.infofont()
+ local emwidth = hashes.emwidths [infofont]
+ local exheight = hashes.exheights[infofont]
+ if what == "reference" then
+ str = str .. " "
+ shift = - (shift or 2.25) * exheight
+ else
+ str = str .. " "
+ shift = (shift or 2) * exheight
+ end
+ local text = typesetters.fast_hpack(str,infofont)
+ local rule = new_rule(emwidth/5,4*exheight,3*exheight)
+ setfield(text,"shift",shift)
+ return nuts.fasthpack(nuts.linked(text,rule))
+ -- local text = typesetters.fast_hpack(str,fonts.infofont())
+ -- return text
+ end
+ end
+end
+
+local function colorize(width,height,depth,n,reference,what,sr,offset)
if force_gray then n = 0 end
u_transparency = u_transparency or transparencies.register(nil,2,.65)
local ucolor = u_colors[n]
@@ -346,25 +486,49 @@ local function colorize(width,height,depth,n,reference,what)
height = 65536/2
depth = height
end
- local rule = new_rule(width,height,depth)
- rule[a_colormodel] = 1 -- gray color model
- rule[a_color] = u_color
- rule[a_transparency] = u_transparency
+ local rule = new_rule(width,height,depth) -- todo: use tracer rule
+ setattr(rule,a_colormodel,1) -- gray color model
+ setattr(rule,a_color,u_color)
+ setattr(rule,a_transparency,u_transparency)
if width < 0 then
local kern = new_kern(width)
- rule.width = -width
- kern.next = rule
- rule.prev = kern
+ setfield(rule,"width",-width)
+ setfield(kern,"next",rule)
+ setfield(rule,"prev",kern)
return kern
else
+
+if sr and sr ~= "" then
+ local text = addstring(what,sr,shift)
+ if text then
+ local kern = new_kern(-getfield(text,"width"))
+ setfield(kern,"next",text)
+ setfield(text,"prev",kern)
+ setfield(text,"next",rule)
+ setfield(rule,"prev",text)
+ return kern
+ end
+end
+
return rule
end
end
--- references:
+local function justadd(what,sr,shift)
+ if sr and sr ~= "" then
+ local text = addstring(what,sr,shift)
+ if text then
+ local kern = new_kern(-getfield(text,"width"))
+ setfield(kern,"next",text)
+ setfield(text,"prev",kern)
+ setfield(text,"next",rule)
+ setfield(rule,"prev",text)
+ return kern
+ end
+ end
+end
-local nodepool = nodes.pool
-local new_kern = nodepool.kern
+-- references:
local texsetattribute = tex.setattribute
local texsetcount = tex.setcount
@@ -397,35 +561,64 @@ function references.get(n) -- not public so functionality can change
return sn and sn[1]
end
-local function makereference(width,height,depth,reference)
+local function makereference(width,height,depth,reference) -- height and depth are of parent
local sr = stack[reference]
if sr then
if trace_references then
report_reference("resolving attribute %a",reference)
end
local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5]
+ -- logs.report("temp","child: ht=%p dp=%p, parent: ht=%p dp=%p",ht,dp,height,depth)
if ht then
if height < ht then height = ht end
if depth < dp then depth = dp end
end
+ -- logs.report("temp","used: ht=%p dp=%p",height,depth)
local annot = nodeinjections.reference(width,height,depth,set)
if annot then
+ annot = tonut(annot) -- todo
nofreferences = nofreferences + 1
- local result, current
+ local result, current, texts
+ if show_references then
+ local d = sr[1]
+ if d then
+ local r = d.reference
+ local p = d.prefix
+ if r then
+ if p then
+ texts = p .. "|" .. r
+ else
+ texts = r
+ end
+ else
+ -- t[#t+1] = d.internal or "?"
+ end
+ end
+ end
if trace_references then
local step = 65536
- result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- result.width = 0
+ result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference",texts,show_references)) -- step subtracted so that we can see seperate links
+ setfield(result,"width",0)
current = result
+ elseif texts then
+ texts = justadd("reference",texts,show_references)
+ if texts then
+ result = hpack_list(texts)
+ setfield(result,"width",0)
+ current = result
+ end
end
if current then
- current.next = annot
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
else
result = annot
end
references.registerpage(n)
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
if cleanupreferences then stack[reference] = nil end
return result, resolved
elseif trace_references then
@@ -436,9 +629,19 @@ local function makereference(width,height,depth,reference)
end
end
+-- function nodes.references.handler(head)
+-- if topofstack > 0 then
+-- return inject_areas(head,attribute,makereference,stack,done)
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.references.handler(head)
if topofstack > 0 then
- return inject_areas(head,attribute,makereference,stack,done)
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makereference,stack,done)
+ return tonode(head), done
else
return head, false
end
@@ -470,49 +673,86 @@ local function makedestination(width,height,depth,reference)
if trace_destinations then
report_destination("resolving attribute %a",reference)
end
- local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
+ local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5] -- sr[4] will change to just internal
if ht then
if height < ht then height = ht end
if depth < dp then depth = dp end
end
- local result, current
+ local result, current, texts
+ if show_destinations then
+ if name and #name > 0 then
+ local t = { }
+ for i=1,#name do
+ local s = name[i]
+ if type(s) == "number" then
+ local d = references.internals[s]
+ if d then
+ d = d.references
+ local r = d.reference
+ local p = d.usedprefix
+ if r then
+ if p then
+ t[#t+1] = p .. "|" .. r
+ else
+ t[#t+1] = r
+ end
+ else
+ -- t[#t+1] = d.internal or "?"
+ end
+ end
+ else
+ -- in fact we have a prefix:name here
+ end
+ end
+ if #t > 0 then
+ texts = concat(t," & ")
+ end
+ end
+ end
if trace_destinations then
local step = 0
if width == 0 then
step = 4*65536
width, height, depth = 5*step, 5*step, 0
end
- for n=1,#name do
- local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- rule.width = 0
- if not result then
- result, current = rule, rule
- else
- current.next = rule
- rule.prev = current
- current = rule
+ local rule = hpack_list(colorize(width,height,depth,3,reference,"destination",texts,show_destinations))
+ setfield(rule,"width",0)
+ if not result then
+ result, current = rule, rule
+ else
+ setfield(current,"next",rule)
+ setfield(rule,"prev",current)
+ current = rule
+ end
+ width, height = width - step, height - step
+ elseif texts then
+ texts = justadd("destination",texts,show_destinations)
+ if texts then
+ result = hpack_list(texts)
+ if result then
+ setfield(result,"width",0)
+ current = result
end
- width, height = width - step, height - step
end
end
nofdestinations = nofdestinations + 1
- for n=1,#name do
- local annot = nodeinjections.destination(width,height,depth,name[n],view)
- if annot then
- -- probably duplicate
- if not result then
- result = annot
- else
- current.next = annot
- annot.prev = current
- end
- current = find_node_tail(annot)
+ local annot = nodeinjections.destination(width,height,depth,name,view)
+ if annot then
+ annot = tonut(annot) -- obsolete soon
+ if result then
+ setfield(current,"next",annot)
+ setfield(annot,"prev",current)
+ else
+ result = annot
end
+ current = find_node_tail(annot)
end
if result then
-- some internal error
result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
+ setfield(result,"width",0)
+ setfield(result,"height",0)
+ setfield(result,"depth",0)
end
if cleanupdestinations then stack[reference] = nil end
return result, resolved
@@ -521,28 +761,42 @@ local function makedestination(width,height,depth,reference)
end
end
+-- function nodes.destinations.handler(head)
+-- if topofstack > 0 then
+-- return inject_area(head,attribute,makedestination,stack,done) -- singular
+-- else
+-- return head, false
+-- end
+-- end
+
function nodes.destinations.handler(head)
if topofstack > 0 then
- return inject_area(head,attribute,makedestination,stack,done) -- singular
+ head = tonut(head)
+ local head, done = inject_areas(head,attribute,makedestination,stack,done)
+ return tonode(head), done
else
return head, false
end
end
+
-- will move
function references.mark(reference,h,d,view)
return setdestination(tex.currentgrouplevel,h,d,reference,view)
end
-function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible
+function references.inject(prefix,reference,specification) -- todo: use currentreference is possible
+-- print(prefix,reference,h,d,highlight,newwindow,layer)
local set, bug = references.identify(prefix,reference)
if bug or #set == 0 then
-- unknown ref, just don't set it and issue an error
else
-- check
- set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
- setreference(h,d,set) -- sets attribute / todo: for set[*].error
+ set.highlight = specification.highlight
+ set.newwindow = specification.newwindow
+ set.layer = specification.layer
+ setreference(specification.height,specification.depth,set) -- sets attribute / todo: for set[*].error
end
end
@@ -553,8 +807,32 @@ function references.injectcurrentset(h,d) -- used inside doifelse
end
end
-commands.injectreference = references.inject
-commands.injectcurrentreference = references.injectcurrentset
+implement {
+ name = "injectreference",
+ actions = references.inject,
+ arguments = {
+ "string",
+ "string",
+ {
+ { "highlight", "boolean" },
+ { "newwindow", "boolean" },
+ { "layer" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ }
+ }
+}
+
+implement {
+ name = "injectcurrentreference",
+ actions = references.injectcurrentset,
+}
+
+implement {
+ name = "injectcurrentreferencehtdp",
+ actions = references.injectcurrentset,
+ arguments = { "dimen", "dimen" },
+}
--
@@ -583,4 +861,11 @@ end)
function references.enableinteraction()
tasks.enableaction("shipouts","nodes.references.handler")
tasks.enableaction("shipouts","nodes.destinations.handler")
+ function references.enableinteraction() end
end
+
+implement {
+ name = "enableinteraction",
+ actions = references.enableinteraction,
+ onlyonce = true
+}
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index ca9d67f91..43dd3895e 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -18,13 +18,8 @@ local report_nodes = logs.reporter("nodes","housekeeping")
local nodes, node = nodes, node
-local copy_node = node.copy
-local free_node = node.free
-local free_list = node.flush_list
-local new_node = node.new
-
nodes.pool = nodes.pool or { }
-local pool = nodes.pool
+local nodepool = nodes.pool
local whatsitcodes = nodes.whatsitcodes
local skipcodes = nodes.skipcodes
@@ -35,400 +30,549 @@ local glyph_code = nodecodes.glyph
local allocate = utilities.storage.allocate
-local texgetbox = tex.getbox
local texgetcount = tex.getcount
local reserved, nofreserved = { }, 0
-local function register_node(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
-end
+-- user nodes
-pool.register = register_node
+local userids = allocate()
+local lastid = 0
-function pool.cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (ri.id == glue_spec and not ri.is_writable) then
- free_node(reserved[i])
- -- end
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
end
- if nofboxes then
- for i=0,nofboxes do
- local l = texgetbox(i)
- if l then
- free_node(l) -- also list ?
- nl = nl + 1
- end
+} )
+
+-- nuts overload
+
+local nuts = nodes.nuts
+local nutpool = { }
+nuts.pool = nutpool
+
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getlist = nuts.getlist
+
+local copy_nut = nuts.copy
+local new_nut = nuts.new
+local free_nut = nuts.free
+
+local copy_node = nodes.copy
+local new_node = nodes.new
+
+-- at some point we could have a dual set (the overhead of tonut is not much larger than
+-- metatable associations at the lua/c end esp if we also take assignments into account
+
+-- table.setmetatableindex(nodepool,function(t,k,v)
+-- -- report_nodes("defining nodepool[%s] instance",k)
+-- local f = nutpool[k]
+-- local v = function(...)
+-- return tonode(f(...))
+-- end
+-- t[k] = v
+-- return v
+-- end)
+--
+-- -- we delay one step because that permits us a forward reference
+-- -- e.g. in pdfsetmatrix
+
+table.setmetatableindex(nodepool,function(t,k,v)
+ -- report_nodes("defining nodepool[%s] instance",k)
+ local v = function(...)
+ local f = nutpool[k]
+ local v = function(...)
+ return tonode(f(...))
end
+ t[k] = v
+ return v(...)
end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
+ t[k] = v
+ return v
+end)
+
+local function register_nut(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
end
-function pool.usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ if type(n) == "number" then -- isnut(n)
+ reserved[nofreserved] = n
+ else
+ reserved[nofreserved] = tonut(n)
end
- return t
+ return n
end
-local disc = register_node(new_node("disc"))
-local kern = register_node(new_node("kern",kerncodes.userkern))
-local fontkern = register_node(new_node("kern",kerncodes.fontkern))
-local penalty = register_node(new_node("penalty"))
-local glue = register_node(new_node("glue")) -- glue.spec = nil
-local glue_spec = register_node(new_node("glue_spec"))
-local glyph = register_node(new_node("glyph",0))
-local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
-local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
-local special = register_node(new_node("whatsit",whatsitcodes.special))
-local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
-local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
-local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
-local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
-local left_margin_kern = register_node(new_node("margin_kern",0))
-local right_margin_kern = register_node(new_node("margin_kern",1))
-local lineskip = register_node(new_node("glue",skipcodes.lineskip))
-local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
-local leftskip = register_node(new_node("glue",skipcodes.leftskip))
-local rightskip = register_node(new_node("glue",skipcodes.rightskip))
-local temp = register_node(new_node("temp",0))
-local noad = register_node(new_node("noad"))
+nodepool.userids = userids
+nodepool.register = register_node
+
+nutpool.userids = userids
+nutpool.register = register_node -- could be register_nut
+
+-- so far
+
+local disc = register_nut(new_nut("disc"))
+local kern = register_nut(new_nut("kern",kerncodes.userkern))
+local fontkern = register_nut(new_nut("kern",kerncodes.fontkern))
+local penalty = register_nut(new_nut("penalty"))
+local glue = register_nut(new_nut("glue")) -- glue.spec = nil
+local glue_spec = register_nut(new_nut("glue_spec"))
+local glyph = register_nut(new_nut("glyph",0))
+local textdir = register_nut(new_nut("whatsit",whatsitcodes.dir))
+local latelua = register_nut(new_nut("whatsit",whatsitcodes.latelua))
+local special = register_nut(new_nut("whatsit",whatsitcodes.special))
+local user_n = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_n,"type",100) -- 44
+local user_l = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_l,"type",110) -- 44
+local user_s = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_s,"type",115) -- 44
+local user_t = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_t,"type",116) -- 44
+----- user_c = register_nut(new_nut("whatsit",whatsitcodes.userdefined)) setfield(user_c,"type",108) -- 44
+local left_margin_kern = register_nut(new_nut("margin_kern",0))
+local right_margin_kern = register_nut(new_nut("margin_kern",1))
+local lineskip = register_nut(new_nut("glue",skipcodes.lineskip))
+local baselineskip = register_nut(new_nut("glue",skipcodes.baselineskip))
+local leftskip = register_nut(new_nut("glue",skipcodes.leftskip))
+local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
+local temp = register_nut(new_nut("temp",0))
+local noad = register_nut(new_nut("noad"))
-- the dir field needs to be set otherwise crash:
-local rule = register_node(new_node("rule")) rule .dir = "TLT"
-local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
-local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
-
-function pool.zeroglue(n)
- local s = n.spec
- return not writable or (
- s.width == 0
- and s.stretch == 0
- and s.shrink == 0
- and s.stretch_order == 0
- and s.shrink_order == 0
- )
-end
-
-function pool.glyph(fnt,chr)
- local n = copy_node(glyph)
- if fnt then n.font = fnt end
- if chr then n.char = chr end
+local rule = register_nut(new_nut("rule")) setfield(rule, "dir","TLT")
+local hlist = register_nut(new_nut("hlist")) setfield(hlist,"dir","TLT")
+local vlist = register_nut(new_nut("vlist")) setfield(vlist,"dir","TLT")
+
+function nutpool.zeroglue(n)
+ local s = getfield(n,"spec")
+ return
+ getfield(s,"width") == 0 and
+ getfield(s,"stretch") == 0 and
+ getfield(s,"shrink") == 0 and
+ getfield(s,"stretch_order") == 0 and
+ getfield(s,"shrink_order") == 0
+end
+
+function nutpool.glyph(fnt,chr)
+ local n = copy_nut(glyph)
+ if fnt then setfield(n,"font",fnt) end
+ if chr then setfield(n,"char",chr) end
return n
end
-function pool.penalty(p)
- local n = copy_node(penalty)
- n.penalty = p
+function nutpool.penalty(p)
+ local n = copy_nut(penalty)
+ setfield(n,"penalty",p)
return n
end
-function pool.kern(k)
- local n = copy_node(kern)
- n.kern = k
+function nutpool.kern(k)
+ local n = copy_nut(kern)
+ setfield(n,"kern",k)
return n
end
-function pool.fontkern(k)
- local n = copy_node(fontkern)
- n.kern = k
+function nutpool.fontkern(k)
+ local n = copy_nut(fontkern)
+ setfield(n,"kern",k)
return n
end
-function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
+function nutpool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
return s
end
local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_node(skip)
+ local n = copy_nut(skip)
if not width then
-- no spec
elseif width == false or tonumber(width) then
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- n.spec = s
+ local s = copy_nut(glue_spec)
+ if width then setfield(s,"width",width) end
+ if stretch then setfield(s,"stretch",stretch) end
+ if shrink then setfield(s,"shrink",shrink) end
+ if stretch_order then setfield(s,"stretch_order",stretch_order) end
+ if shrink_order then setfield(s,"shrink_order",shrink_order) end
+ setfield(n,"spec",s)
else
-- shared
- n.spec = copy_node(width)
+ setfield(n,"spec",copy_nut(width))
end
return n
end
-function pool.stretch(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.stretch(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.stretch = a
- s.stretch_order = b
+ setfield(s,"stretch",a)
+ setfield(s,"stretch_order",b)
else
- s.stretch = 1
- s.stretch_order = a or 1
+ setfield(s,"stretch",1)
+ setfield(s,"stretch_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-function pool.shrink(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
+function nutpool.shrink(a,b)
+ local n = copy_nut(glue)
+ local s = copy_nut(glue_spec)
if b then
- s.shrink = a
- s.shrink_order = b
+ setfield(s,"shrink",a)
+ setfield(s,"shrink_order",b)
else
- s.shrink = 1
- s.shrink_order = a or 1
+ setfield(s,"shrink",1)
+ setfield(s,"shrink_order",a or 1)
end
- n.spec = s
+ setfield(n,"spec",s)
return n
end
-
-function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.glue(width,stretch,shrink,stretch_order,shrink_order)
return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.negatedglue(glue)
+ local n = copy_nut(glue)
+ local s = copy_nut(getfield(n,"spec"))
+ local width = getfield(s,"width")
+ local stretch = getfield(s,"stretch")
+ local shrink = getfield(s,"shrink")
+ if width then setfield(s,"width", -width) end
+ if stretch then setfield(s,"stretch",-stretch) end
+ if shrink then setfield(s,"shrink", -shrink) end
+ setfield(n,"spec",s)
+ return n
+end
+
+function nutpool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+function nutpool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
end
-function pool.baselineskip(width,stretch,shrink)
+function nutpool.baselineskip(width,stretch,shrink)
return someskip(baselineskip,width,stretch,shrink)
end
-function pool.disc()
- return copy_node(disc)
+function nutpool.disc()
+ return copy_nut(disc)
end
-function pool.textdir(dir)
- local t = copy_node(textdir)
- t.dir = dir
+function nutpool.textdir(dir)
+ local t = copy_nut(textdir)
+ setfield(t,"dir",dir)
return t
end
-function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_node(rule)
- if width then n.width = width end
- if height then n.height = height end
- if depth then n.depth = depth end
- if dir then n.dir = dir end
+function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_nut(rule)
+ if width then setfield(n,"width",width) end
+ if height then setfield(n,"height",height) end
+ if depth then setfield(n,"depth",depth) end
+ if dir then setfield(n,"dir",dir) end
+ return n
+end
+
+function nutpool.latelua(code)
+ local n = copy_nut(latelua)
+ setfield(n,"string",code)
return n
end
-if node.has_field(latelua,'string') then
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.string = code
+if context and _cldo_ then
+
+ -- a typical case where we have more nodes than nuts
+
+ local context = context
+
+ local f_cldo = string.formatters["_cldo_(%i)"]
+ local register = context.registerfunction
+
+ local latelua_node = register_node(new_node("whatsit",whatsitcodes.latelua))
+ local latelua_nut = register_nut (new_nut ("whatsit",whatsitcodes.latelua))
+
+ local setfield_node = nodes.setfield
+ local setfield_nut = nuts .setfield
+
+ function nodepool.lateluafunction(f)
+ local n = copy_node(latelua_node)
+ setfield_node(n,"string",f_cldo(register(f)))
return n
end
-else
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.data = code
+ function nutpool.lateluafunction(f)
+ local n = copy_nut(latelua_nut)
+ setfield_nut(n,"string",f_cldo(register(f)))
return n
end
+
+ -- when function in latelua:
+
+ -- function nodepool.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- return n
+ -- end
+ -- function nutpool.lateluafunction(f)
+ -- local n = copy_nut(latelua_nut)
+ -- setfield_nut(n,"string",f)
+ -- return n
+ -- end
+
+ local latefunction = nodepool.lateluafunction
+ local flushnode = context.flushnode
+
+ function context.lateluafunction(f)
+ flushnode(latefunction(f)) -- hm, quite some indirect calls
+ end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- flushnode(n)
+ -- end
+
+ -- local contextsprint = context.sprint
+ -- local ctxcatcodes = tex.ctxcatcodes
+ -- local storenode = context.storenode
+
+ -- when 0.79 is out:
+
+ -- function context.lateluafunction(f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(latefunction(f))," ")
+ -- end
+
+ -- when function in latelua:
+
+ -- function context.lateluafunction(f)
+ -- local n = copy_node(latelua_node)
+ -- setfield_node(n,"string",f)
+ -- contextsprint(ctxcatcodes,"\\cldl",storenode(n)," ")
+ -- end
+
end
-function pool.leftmarginkern(glyph,width)
- local n = copy_node(left_margin_kern)
+function nutpool.leftmarginkern(glyph,width)
+ local n = copy_nut(left_margin_kern)
if not glyph then
report_nodes("invalid pointer to left margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.rightmarginkern(glyph,width)
- local n = copy_node(right_margin_kern)
+function nutpool.rightmarginkern(glyph,width)
+ local n = copy_nut(right_margin_kern)
if not glyph then
report_nodes("invalid pointer to right margin glyph node")
- elseif glyph.id ~= glyph_code then
+ elseif getid(glyph) ~= glyph_code then
report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
else
- n.glyph = glyph
+ setfield(n,"glyph",glyph)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
return n
end
-function pool.temp()
- return copy_node(temp)
+function nutpool.temp()
+ return copy_nut(temp)
end
-function pool.noad()
- return copy_node(noad)
+function nutpool.noad()
+ return copy_nut(noad)
end
-function pool.hlist(list,width,height,depth)
- local n = copy_node(hlist)
+function nutpool.hlist(list,width,height,depth)
+ local n = copy_nut(hlist)
if list then
- n.list = list
+ setfield(n,"list",list)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
if height then
- n.height = height
+ setfield(n,"height",height)
end
if depth then
- n.depth = depth
+ setfield(n,"depth",depth)
end
return n
end
-function pool.vlist(list,width,height,depth)
- local n = copy_node(vlist)
+function nutpool.vlist(list,width,height,depth)
+ local n = copy_nut(vlist)
if list then
- n.list = list
+ setfield(n,"list",list)
end
if width then
- n.width = width
+ setfield(n,"width",width)
end
if height then
- n.height = height
+ setfield(n,"height",height)
end
if depth then
- n.depth = depth
+ setfield(n,"depth",depth)
end
return n
end
---[[
-At some point we ran into a problem that the glue specification
-of the zeropoint dimension was overwritten when adapting a glue spec
-node. This is a side effect of glue specs being shared. After a
-couple of hours tracing and debugging Taco and I came to the
-conclusion that it made no sense to complicate the spec allocator
-and settled on a writable flag. This all is a side effect of the
-fact that some glues use reserved memory slots (with the zeropoint
-glue being a noticeable one). So, next we wrap this into a function
-and hide it for the user. And yes, LuaTeX now gives a warning as
-well.
-]]--
-
-function nodes.writable_spec(n) -- not pool
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
- end
- return spec
-end
-
-- local num = userids["my id"]
-- local str = userids[num]
-local userids = allocate() pool.userids = userids
-local lastid = 0
-
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
- end
-} )
-
-function pool.usernumber(id,num)
- local n = copy_node(user_n)
+function nutpool.usernumber(id,num)
+ local n = copy_nut(user_n)
if num then
- n.user_id, n.value = id, num
+ setfield(n,"user_id",id)
+ setfield(n,"value",num)
elseif id then
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userlist(id,list)
- local n = copy_node(user_l)
+function nutpool.userlist(id,list)
+ local n = copy_nut(user_l)
if list then
- n.user_id, n.value = id, list
+ setfield(n,"user_id",id)
+ setfield(n,"value",list)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.userstring(id,str)
- local n = copy_node(user_s)
+function nutpool.userstring(id,str)
+ local n = copy_nut(user_s)
if str then
- n.user_id, n.value = id, str
+ setfield(n,"user_id",id)
+ setfield(n,"value",str)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.usertokens(id,tokens)
- local n = copy_node(user_t)
+function nutpool.usertokens(id,tokens)
+ local n = copy_nut(user_t)
if tokens then
- n.user_id, n.value = id, tokens
+ setfield(n,"user_id",id)
+ setfield(n,"value",tokens)
else
- n.value = id
+ setfield(n,"value",id)
end
return n
end
-function pool.special(str)
- local n = copy_node(special)
- n.data = str
+-- function nutpool.usercode(id,code)
+-- local n = copy_nut(user_c)
+-- if code then
+-- setfield(n,"user_id",id)
+-- setfield(n,"value",code)
+-- else
+-- setfield(n,"value",id)
+-- end
+-- return n
+-- end
+
+function nutpool.special(str)
+ local n = copy_nut(special)
+ setfield(n,"data",str)
return n
end
+-- housekeeping
+
+local function cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ end
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (getid(ri) == glue_spec and not getfield(ri,"is_writable")) then
+ free_nut(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ for i=0,nofboxes do
+ local l = getbox(i)
+ if l then
+-- print(nodes.listtoutf(getlist(l)))
+ free_nut(l) -- also list ?
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
+end
+
+
+local function usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
+ end
+ return t
+end
+
+nutpool .cleanup = cleanup
+nodepool.cleanup = cleanup
+
+nutpool .usage = usage
+nodepool.usage = usage
+
+-- end
+
statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", pool.cleanup(texgetcount("c_syst_last_allocated_box")))
+ return format("%s nodes, %s lists of %s", cleanup(texgetcount("c_syst_last_allocated_box")))
end) -- \topofboxstack
statistics.register("node memory usage", function() -- comes after cleanup !
return status.node_mem_usage
end)
-lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
+lua.registerfinalizer(cleanup, "cleanup reserved nodes")
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 96d6bdf41..36d56a16c 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -13,12 +13,28 @@ if not modules then modules = { } end modules ['node-rul'] = {
local attributes, nodes, node = attributes, nodes, node
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getlist = nuts.getlist
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
function nodes.striprange(first,last) -- todo: dir
if first and last then -- just to be sure
@@ -26,11 +42,11 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while first and first ~= last do
- local id = first.id
+ local id = getid(first)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- first = first.next
+ first = getnext(first)
end
end
if not first then
@@ -39,13 +55,13 @@ function nodes.striprange(first,last) -- todo: dir
return first, last
end
while last and last ~= first do
- local id = last.id
+ local id = getid(last)
if id == glyph_code or id == disc_code then -- or id == rule_code
break
else
- local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
+ local prev = getprev(last) -- luatex < 0.70 has italic correction kern not prev'd
if prev then
- last = last.prev
+ last = prev
else
break
end
@@ -73,12 +89,12 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local striprange = nodes.striprange
-local list_dimensions = node.dimensions
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local list_dimensions = nuts.dimensions
+local hpack_nodes = nuts.hpack
-local hpack_nodes = node.hpack
+local striprange = nodes.striprange
local fontdata = fonts.hashes.identifiers
local variables = interfaces.variables
@@ -111,7 +127,7 @@ local dir_code = whatcodes.dir
local kerning_code = kerncodes.kern
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
@@ -141,9 +157,9 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
local f, l, a, d, i, class
local continue, done, strip, level = false, false, true, -1
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code or id == rule_code then
- local aa = n[attribute]
+ local aa = getattr(n,attribute)
if aa then
if aa == a then
if not f then -- ?
@@ -172,13 +188,13 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
f, l, a = nil, nil, nil
end
--- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
+-- elseif f and (id == disc_code or (id == kern_code and getsubtype(n) == kerning_code)) then
-- l = n
elseif id == disc_code then
if f then
l = n
end
- elseif id == kern_code and n.subtype == kerning_code then
+ elseif id == kern_code and getsubtype(n) == kerning_code then
if f then
l = n
end
@@ -187,11 +203,11 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
head, done = flush(head,f,l,d,level,parent,strip), true
f, l, a = nil, nil, nil
end
- local list = n.list
+ local list = getlist(n)
if list then
- n.list = processwords(attribute,data,flush,list,n)
+ setfield(n,"list",(processwords(attribute,data,flush,list,n))) -- watch ()
end
- elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
+ elseif checkdir and id == whatsit_code and getsubtype(n) == dir_code then -- only changes in dir, we assume proper boundaries
if f and a then
l = n
end
@@ -203,8 +219,8 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
-- l = n
elseif id == glue_code then
-- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = n.subtype
- if n[attribute] and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
+ local subtype = getsubtype(n)
+ if getattr(n,attribute) and (subtype == userskip_code or subtype == spaceskip_code or subtype == xspaceskip_code) then
l = n
else
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -216,7 +232,7 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
f, l, a = nil, nil, nil
end
end
- n = n.next
+ n = getnext(n)
end
if f then
head, done = flush(head,f,l,d,level,parent,strip), true
@@ -227,7 +243,16 @@ local function processwords(attribute,data,flush,head,parent) -- we have hlistdi
end
end
-nodes.processwords = processwords
+-- nodes.processwords = processwords
+
+nodes.processwords = function(attribute,data,flush,head,parent) -- we have hlistdir and local dir
+ head = tonut(head)
+ if parent then
+ parent = tonut(parent)
+ end
+ local head, done = processwords(attribute,data,flush,head,parent)
+ return tonode(head), done
+end
--
@@ -246,7 +271,7 @@ end
local a_viewerlayer = attributes.private("viewerlayer")
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
- if f.id ~= glyph_code then
+ if getid(f) ~= glyph_code then
-- saveguard ... we need to deal with rules and so (math)
return head
end
@@ -264,16 +289,16 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
if not f then
return head
end
- local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
+ local w = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),f,getnext(l))
local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
local rulethickness, unit = d.rulethickness, d.unit
local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = ma > 0 and ma or f[a_colorspace] or 1
- local color = ca > 0 and ca or f[a_color]
- local transparency = ta > 0 and ta or f[a_transparency]
+ local colorspace = ma > 0 and ma or getattr(f,a_colorspace) or 1
+ local color = ca > 0 and ca or getattr(f,a_color)
+ local transparency = ta > 0 and ta or getattr(f,a_transparency)
local foreground = order == v_foreground
- local e = dimenfactor(unit,f.font) -- what if no glyph node
+ local e = dimenfactor(unit,getfont(f)) -- what if no glyph node
local rt = tonumber(rulethickness)
if rt then
@@ -281,7 +306,7 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
else
local n, u = splitdimen(rulethickness)
if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
+ rulethickness = n * dimenfactor(u,fontdata[getfont(f)]) / 2
else
rulethickness = 1/5
end
@@ -300,18 +325,18 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local ht = (offset+(i-1)*dy)*e + rulethickness - m
local dp = -(offset+(i-1)*dy)*e + rulethickness + m
local r = new_rule(w,ht,dp)
- local v = f[a_viewerlayer]
+ local v = getattr(f,a_viewerlayer)
-- quick hack
if v then
- r[a_viewerlayer] = v
+ setattr(r,a_viewerlayer,v)
end
--
if color then
- r[a_colorspace] = colorspace
- r[a_color] = color
+ setattr(r,a_colorspace,colorspace)
+ setattr(r,a_color,color)
end
if transparency then
- r[a_transparency] = transparency
+ setattr(r,a_transparency,transparency)
end
local k = new_kern(-w)
if foreground then
@@ -365,21 +390,27 @@ local function flush_shifted(head,first,last,data,level,parent,strip) -- not tha
if true then
first, last = striprange(first,last)
end
- local prev, next = first.prev, last.next
- first.prev, last.next = nil, nil
- local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
+ local prev = getprev(first)
+ local next = getnext(last)
+ setfield(first,"prev",nil)
+ setfield(last,"next",nil)
+ local width, height, depth = list_dimensions(getfield(parent,"glue_set"),getfield(parent,"glue_sign"),getfield(parent,"glue_order"),first,next)
local list = hpack_nodes(first,width,"exactly")
if first == head then
head = list
end
if prev then
- prev.next, list.prev = list, prev
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
end
if next then
- next.prev, list.next = list, next
+ setfield(next,"prev",list)
+ setfield(list,"next",next)
end
- local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
- list.shift, list.height, list.depth = raise, height, depth
+ local raise = data.dy * dimenfactor(data.unit,fontdata[getfont(first)])
+ setfield(list,"shift",raise)
+ setfield(list,"height",height)
+ setfield(list,"depth",depth)
if trace_shifted then
report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
end
@@ -393,3 +424,52 @@ nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifte
function nodes.shifts.enable()
tasks.enableaction("shipouts","nodes.shifts.handler")
end
+
+-- interface
+
+local implement = interfaces.implement
+
+implement {
+ name = "definerule",
+ actions = { nodes.rules.define, context },
+ arguments = {
+ {
+ { "continue" },
+ { "unit" },
+ { "order" },
+ { "method", "integer" },
+ { "offset", "number" },
+ { "rulethickness", "string" },
+ { "dy", "number" },
+ { "max", "number" },
+ { "ma", "integer" },
+ { "ca", "integer" },
+ { "ta", "integer" },
+ }
+ }
+}
+
+implement {
+ name = "enablerules",
+ onlyonce = true,
+ actions = nodes.rules.enable
+}
+
+implement {
+ name = "defineshift",
+ actions = { nodes.shifts.define, context },
+ arguments = {
+ {
+ { "continue" },
+ { "unit" },
+ { "method", "integer" },
+ { "dy", "number" },
+ }
+ }
+}
+
+implement {
+ name = "enableshifts",
+ onlyonce = true,
+ actions = nodes.shifts.enable
+}
diff --git a/tex/context/base/node-rul.mkiv b/tex/context/base/node-rul.mkiv
index 2d2e61134..7fa0473a5 100644
--- a/tex/context/base/node-rul.mkiv
+++ b/tex/context/base/node-rul.mkiv
@@ -112,19 +112,20 @@
\unexpanded\def\node_rules_define
{\edef\p_node_rules_color{\barparameter\c!color}%
- \setevalue{\??barattribute\currentbar}{\number\ctxlua{nodes.rules.define {
- method = \barparameter\c!method,
- offset = \barparameter\c!offset,
- continue = "\barparameter\c!continue",
- dy = \barparameter\c!dy,
- rulethickness = "\barparameter\c!rulethickness",
- unit = "\barparameter\c!unit",
- order = "\barparameter\c!order",
- max = \barparameter\c!max,
- ma = \thecolormodelattribute,
- ca = \thecolorattribute\p_node_rules_color,
- ta = \thetransparencyattribute\p_node_rules_color
- }}}}
+ \setevalue{\??barattribute\currentbar}{\number
+ \clf_definerule
+ continue {\barparameter\c!continue}%
+ unit {\barparameter\c!unit}%
+ order {\barparameter\c!order}%
+ rulethickness {\barparameter\c!rulethickness}%
+ method \barparameter\c!method
+ ma \thecolormodelattribute
+ ca \thecolorattribute\p_node_rules_color
+ ta \thetransparencyattribute\p_node_rules_color
+ offset \barparameter\c!offset\space % number
+ dy \barparameter\c!dy\space % number
+ max \barparameter\c!max
+ \relax}}
\unexpanded\def\node_rules_redefine#1%
{\def\currentbar{#1}\node_rules_define}
@@ -132,13 +133,16 @@
\unexpanded\def\node_rules_direct#1%
{\groupedcommand{\node_rules_set{#1}}\relax}
-\def\node_rules_set
- {\ctxlua{nodes.rules.enable()}% will be moved to lua
- \glet\node_rules_set\node_rules_set_indeed
- \node_rules_set}
+% \unexpanded\def\node_rules_set
+% {\clf_enablerules % will be moved to lua
+% \glet\node_rules_set\node_rules_set_indeed
+% \node_rules_set}
+%
+%\unexpanded\def\node_rules_set_indeed#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
-\def\node_rules_set_indeed#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
- {\edef\currentbar{#1}%
+\unexpanded\def\node_rules_set#1% maybe reverse the 1000 (also maybe use more attributes instead of settings)
+ {\clf_enablerules % will be relaxed
+ \edef\currentbar{#1}%
\expandafter\let\expandafter\c_node_rules_index\csname\??barindex#1\endcsname
\advance\c_node_rules_index\plusone
\usebarstyleandcolor\c!foregroundstyle\c!foregroundcolor
@@ -157,6 +161,8 @@
\unexpanded\def\setbar[#1]%
{\node_rules_set{#1}}
+\let\directsetbar\node_rules_set
+
% ungrouped
\newcount\c_node_rules_nesting % todo: same as colors
@@ -279,31 +285,27 @@
\to \everydefineshift
\unexpanded\def\node_shifts_define
- {\setevalue{\??shiftattribute\currentshift}{\number\ctxlua{nodes.shifts.define {
- method = \shiftparameter\c!method,
- continue = "\shiftparameter\c!continue",
- dy = \shiftparameter\c!dy,
- unit = "\shiftparameter\c!unit",
- }}}}
+ {\setevalue{\??shiftattribute\currentshift}{\number
+ \clf_defineshift
+ continue {\shiftparameter\c!continue}%
+ unit {\shiftparameter\c!unit}%
+ method \shiftparameter\c!method
+ dy \shiftparameter\c!dy % number
+ \relax}}
\unexpanded\def\node_shifts_redefine#1%
{\def\currentshift{#1}\node_shifts_define}
-\unexpanded\def\node_shifts_set
- {\ctxlua{nodes.shifts.enable()}%
- \glet\node_shifts_set\node_shifts_set_indeed
- \node_shifts_set}
-
-% \unexpanded\def\node_shifts_direct#1%
-% {\doisolatedgroupedalign{\node_shifts_set{#1}}\donothing}
-
-\unexpanded\def\node_shifts_direct#1%
- {\groupedcommand
- {\begingroup\dostartisolation\begingroup\node_shifts_set{#1}}
- {\endgroup\dostopisolation\endgroup}}
+% \unexpanded\def\node_shifts_set
+% {\clf_enableshifts
+% \glet\node_shifts_set\node_shifts_set_indeed
+% \node_shifts_set}
+%
+% \def\node_shifts_set_indeed#1% todo: check parent !
-\def\node_shifts_set_indeed#1% todo: check parent !
- {\def\currentshift{#1}%
+\unexpanded\def\node_shifts_set#1% todo: check parent !
+ {\clf_enableshifts
+ \def\currentshift{#1}%
\expandafter\let\expandafter\c_node_shifts_index\csname\??shiftindex#1\endcsname
\advance\c_node_shifts_index\plusone
\attribute\shiftedattribute\numexpr
@@ -320,6 +322,14 @@
\unexpanded\def\stopshift
{\endgroup}
+% \unexpanded\def\node_shifts_direct#1%
+% {\doisolatedgroupedalign{\node_shifts_set{#1}}\donothing}
+
+\unexpanded\def\node_shifts_direct#1%
+ {\groupedcommand
+ {\begingroup\dostartisolation\begingroup\node_shifts_set{#1}}
+ {\endgroup\dostopisolation\endgroup}}
+
\setupshifts
[\c!method=0,
\c!continue=\v!no,
diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua
index 6ebfd767f..42cc83b8f 100644
--- a/tex/context/base/node-shp.lua
+++ b/tex/context/base/node-shp.lua
@@ -15,6 +15,8 @@ local setmetatableindex = table.setmetatableindex
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
+local disccodes = nodes.disccodes
+
local tasks = nodes.tasks
local handlers = nodes.handlers
@@ -26,11 +28,27 @@ local kern_code = nodecodes.kern
local glue_code = nodecodes.glue
local whatsit_code = nodecodes.whatsit
+local fulldisc_code = disccodes.discretionary
+
local texgetbox = tex.getbox
-local free_node = node.free
-local remove_node = node.remove
-local traverse_nodes = node.traverse
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local free_node = nuts.free
+local remove_node = nuts.remove
+local traverse_nodes = nuts.traverse
+local find_tail = nuts.tail
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
local removables = {
[whatsitcodes.open] = true,
@@ -41,38 +59,63 @@ local removables = {
[whatsitcodes.latelua] = true,
}
-local function cleanup_redundant(head)
+-- About 10% of the nodes make no sense for the backend. By (at least)
+-- removing the replace disc nodes, we can omit extensive checking in
+-- the finalizer code (e.g. colors in disc nodes). Removing more nodes
+-- (like marks) is not saving much and removing empty boxes is even
+-- dangerous because we can rely on dimensions (e.g. in references).
+
+local wipedisc = false -- we can use them in the export ... can be option
+
+local function cleanup_redundant(head) -- better name is: flatten_page
local start = head
while start do
- local id = start.id
+ local id = getid(start)
if id == disc_code then
- head, start = remove_node(head,start,true)
- -- elseif id == glue_code then
- -- if start.writable then
- -- start = start.next
- -- elseif some_complex_check_on_glue_spec then
- -- head, start = remove_node(head,start,true)
- -- else
- -- start = start.next
- -- end
- elseif id == kern_code then
- if start.kern == 0 then
- head, start = remove_node(head,start,true)
+ if getsubtype(start) == fulldisc_code then
+ local replace = getfield(start,"replace")
+ if replace then
+ local prev = getprev(start)
+ local next = getnext(start)
+ local tail = find_tail(replace)
+ setfield(start,"replace",nil)
+ if start == head then
+ remove_node(head,start,true)
+ head = replace
+ else
+ remove_node(head,start,true)
+ end
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ if prev then
+ setfield(prev,"next",replace)
+ setfield(replace,"prev",prev)
+ else
+ setfield(replace,"prev",nil) -- to be sure
+ end
+ start = next
+ elseif wipedisc then
+ -- pre and post can have values
+ head, start = remove_node(head,start,true)
+ else
+ start = getnext(start)
+ end
else
- start = start.next
+ start = getnext(start)
end
- elseif id == mark_code then
- head, start = remove_node(head,start,true)
elseif id == hlist_code or id == vlist_code then
- local sl = start.list
+ local sl = getlist(start)
if sl then
- start.list = cleanup_redundant(sl)
- start = start.next
- else
- head, start = remove_node(head,start,true)
+ local rl = cleanup_redundant(sl)
+ if rl ~= sl then
+ setfield(start,"list",rl)
+ end
end
+ start = getnext(start)
else
- start = start.next
+ start = getnext(start)
end
end
return head
@@ -81,54 +124,49 @@ end
local function cleanup_flushed(head) -- rough
local start = head
while start do
- local id = start.id
- if id == whatsit_code and removables[start.subtype] then
- head, start = remove_node(head,start,true)
+ local id = getid(start)
+ if id == whatsit_code then
+ if removables[getsubtype(start)] then
+ head, start = remove_node(head,start,true)
+ else
+ start = getnext(start)
+ end
elseif id == hlist_code or id == vlist_code then
- local sl = start.list
+ local sl = getlist(start)
if sl then
- start.list = cleanup_flushed(sl)
- start = start.next
- else
- head, start = remove_node(head,start,true)
+ local rl = cleanup_flushed(sl)
+ if rl ~= sl then
+ setfield(start,"list",rl)
+ end
end
+ start = getnext(start)
else
- start = start.next
+ start = getnext(start)
end
end
return head
end
function handlers.cleanuppage(head)
- -- about 10% of the nodes make no sense for the backend
- return cleanup_redundant(head), true
+ return tonode(cleanup_redundant(tonut(head))), true
end
function handlers.cleanupbox(head)
- return cleanup_flushed(head), true
+ return tonode(cleanup_flushed(tonut(head))), true
end
-directives.register("backend.cleanup", function()
- tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
-end)
-
local actions = tasks.actions("shipouts") -- no extra arguments
function handlers.finalize(head) -- problem, attr loaded before node, todo ...
return actions(head)
end
-function commands.cleanupbox(n)
- cleanup_flushed(texgetbox(n))
-end
-
-- handlers.finalize = actions
-- interface
-function commands.finalizebox(n)
- actions(texgetbox(n))
-end
+implement { name = "cleanupbox", actions = { texgetbox, cleanup_flushed }, arguments = "integer" }
+implement { name = "finalizebox", actions = { texgetbox, actions }, arguments = "integer" }
-- just in case we want to optimize lookups:
@@ -158,12 +196,12 @@ local function count(head,data,subcategory)
-- no components, pre, post, replace .. can maybe an option .. but
-- we use this for optimization so it makes sense to look the the
-- main node only
- for n in traverse_nodes(head) do
- local id = n.id
- local dn = data[nodecodes[n.id]]
+ for n in traverse_nodes(tonut(head)) do
+ local id = getid(n)
+ local dn = data[nodecodes[id]] -- we could use id and then later convert to nodecodes
dn[subcategory] = dn[subcategory] + 1
if id == hlist_code or id == vlist_code then
- count(n.list,data,subcategory)
+ count(getfield(n,"list"),data,subcategory)
end
end
end
diff --git a/tex/context/base/node-tex.lua b/tex/context/base/node-tex.lua
index 2170e0603..c9d3091df 100644
--- a/tex/context/base/node-tex.lua
+++ b/tex/context/base/node-tex.lua
@@ -6,33 +6,32 @@ if not modules then modules = { } end modules ['node-tex'] = {
license = "see context related readme files"
}
-local format = string.format
+builders = builders or { }
+local kernel = builders.kernel or { }
+builders.kernel = kernel
-builders = builders or { }
-builders.kernel = builders.kernel or { }
-local kernel = builders.kernel
+local hyphenate = lang.hyphenate
+local ligaturing = node.ligaturing
+local kerning = node.kerning
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
+kernel.originals = {
+ hyphenate = hyphenate,
+ ligaturing = ligaturing,
+ kerning = kerning,
+}
function kernel.hyphenation(head)
- -- starttiming(kernel)
local done = hyphenate(head)
- -- stoptiming(kernel)
return head, done
end
function kernel.ligaturing(head)
- -- starttiming(kernel)
- local head, tail, done = ligaturing(head) -- todo: check what is returned
- -- stoptiming(kernel)
+ local head, tail, done = ligaturing(head) -- we return 3 values indeed
return head, done
end
function kernel.kerning(head)
- -- starttiming(kernel)
- local head, tail, done = kerning(head) -- todo: check what is returned
- -- stoptiming(kernel)
+ local head, tail, done = kerning(head) -- we return 3 values indeed
return head, done
end
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 9617f7476..a7ab7f77f 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -34,9 +34,30 @@ nodes.handlers = handlers
local injections = nodes.injections or { }
nodes.injections = injections
-local traverse_nodes = node.traverse
-local traverse_by_id = node.traverse_id
-local count_nodes = nodes.count
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+
+local setattr = nuts.setattr
+
+local flush_list = nuts.flush_list
+local count_nodes = nuts.count
+local used_nodes = nuts.usedlist
+
+local traverse_by_id = nuts.traverse_id
+local traverse_nodes = nuts.traverse
+local d_tostring = nuts.tostring
+
+local nutpool = nuts.pool
+local new_rule = nutpool.rule
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -56,10 +77,8 @@ local gluespec_code = nodecodes.gluespec
local localpar_code = whatcodes.localpar
local dir_code = whatcodes.dir
-local nodepool = nodes.pool
-local new_rule = nodepool.rule
-
local dimenfactors = number.dimenfactors
+local fillorders = nodes.fillcodes
local formatters = string.formatters
-- this will be reorganized:
@@ -68,15 +87,16 @@ function nodes.showlist(head, message)
if message then
report_nodes(message)
end
- for n in traverse_nodes(head) do
- report_nodes(tostring(n))
+ for n in traverse_nodes(tonut(head)) do
+ report_nodes(d_tostring(n))
end
end
function nodes.handlers.checkglyphs(head,message)
+ local h = tonut(head)
local t = { }
- for g in traverse_by_id(glyph_code,head) do
- t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
+ for g in traverse_by_id(glyph_code,h) do
+ t[#t+1] = formatters["%U:%s"](getchar(g),getsubtype(g))
end
if #t > 0 then
if message and message ~= "" then
@@ -90,12 +110,12 @@ end
function nodes.handlers.checkforleaks(sparse)
local l = { }
- local q = node.usedlist()
- for p in traverse(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
+ local q = used_nodes()
+ for p in traverse_nodes(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[getid(p)])
l[s] = (l[s] or 0) + 1
end
- node.flush_list(q)
+ flush_list(q)
for k, v in next, l do
report_nodes("%s * %s",v,k)
end
@@ -105,39 +125,40 @@ local f_sequence = formatters["U+%04X:%s"]
local function tosequence(start,stop,compact)
if start then
+ start = tonut(start)
+ stop = stop and tonut(stop)
local t = { }
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local c = start.char
+ local c = getchar(start)
if compact then
- if start.components then
- t[#t+1] = tosequence(start.components,nil,compact)
+ local components = getfield(start,"components")
+ if components then
+ t[#t+1] = tosequence(components,nil,compact)
else
t[#t+1] = utfchar(c)
end
else
t[#t+1] = f_sequence(c,utfchar(c))
end
- elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
- t[#t+1] = "[" .. start.dir .. "]"
elseif id == rule_code then
if compact then
t[#t+1] = "|"
else
t[#t+1] = nodecodes[id]
end
+ elseif id == whatsit_code and getsubtype(start) == localpar_code or getsubtype(start) == dir_code then
+ t[#t+1] = "[" .. getfield(start,"dir") .. "]"
+ elseif compact then
+ t[#t+1] = "[]"
else
- if compact then
- t[#t+1] = "[]"
- else
- t[#t+1] = nodecodes[id]
- end
+ t[#t+1] = nodecodes[id]
end
if start == stop then
break
else
- start = start.next
+ start = getnext(start)
end
end
if compact then
@@ -151,23 +172,26 @@ local function tosequence(start,stop,compact)
end
nodes.tosequence = tosequence
+nuts .tosequence = tosequence
function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(tonut(t)))
end
function nodes.packlist(head)
local t = { }
- for n in traverse(head) do
- t[#t+1] = tostring(n)
+ for n in traverse_nodes(tonut(head)) do
+ t[#t+1] = d_tostring(n)
end
return t
end
function nodes.idstostring(head,tail)
+ head = tonut(head)
+ tail = tail and tonut(tail)
local t, last_id, last_n = { }, nil, 0
for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = n.id
+ local id = getid(n)
if not last_id then
last_id, last_n = id, 1
elseif last_id == id then
@@ -195,6 +219,8 @@ function nodes.idstostring(head,tail)
end
-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
+-- head = tonut(head)
+-- tail = tonut(tail)
-- local n = head
-- while n.next do
-- n = n.next
@@ -217,7 +243,7 @@ end
-- if n == head then
-- break
-- end
--- n = n.prev
+-- n = getprev(n)
-- end
-- if not last_id then
-- t[#t+1] = "no nodes"
@@ -230,51 +256,56 @@ end
-- end
local function showsimplelist(h,depth,n)
+ h = h and tonut(h)
while h do
report_nodes("% w%s",n,d_tostring(h))
if not depth or n < depth then
- local id = h.id
+ local id = getid(h)
if id == hlist_code or id == vlist_code then
- showsimplelist(h.list,depth,n+1)
+ showsimplelist(getlist(h),depth,n+1)
end
end
- h = h.next
+ h = getnext(h)
end
end
---~ \startluacode
---~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
---~ \stopluacode
---~ \vbox{b\footnote{n}a}
---~ \startluacode
---~ callback.register('buildpage_filter',nil)
---~ \stopluacode
+-- \startluacode
+-- callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+-- \stopluacode
+-- \vbox{b\footnote{n}a}
+-- \startluacode
+-- callback.register('buildpage_filter',nil)
+-- \stopluacode
nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last)
- local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
local w = { }
while h do
- local id = h.id
+ local id = getid(h)
if id == glyph_code then -- always true
- local c = h.char
+ local c = getchar(h)
w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
w[#w+1] = joiner
end
elseif id == disc_code then
- local pre = h.pre
- local pos = h.post
- local rep = h.replace
+ local pre = getfield(h,"pre")
+ local pos = getfield(h,"post")
+ local rep = getfield(h,"replace")
w[#w+1] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
pos and listtoutf(pos,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or ""
)
elseif textonly then
- if id == glue_code and h.spec and h.spec.width > 0 then
- w[#w+1] = " "
+ if id == glue_code then
+ local spec = getfield(h,"spec")
+ if spec and getfield(spec,"width") > 0 then
+ w[#w+1] = " "
+ end
+ elseif id == hlist_code or id == vlist_code then
+ w[#w+1] = "[]"
end
else
w[#w+1] = "[-]"
@@ -282,24 +313,32 @@ local function listtoutf(h,joiner,textonly,last)
if h == last then
break
else
- h = h.next
+ h = getnext(h)
end
end
return concat(w)
end
-nodes.listtoutf = listtoutf
+function nodes.listtoutf(h,joiner,textonly,last)
+ if h then
+ local joiner = joiner == true and utfchar(0x200C) or joiner -- zwnj
+ return listtoutf(tonut(h),joiner,textonly,last and tonut(last))
+ else
+ return ""
+ end
+end
local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
local function showboxes(n,symbol,depth)
- depth, symbol = depth or 0, symbol or "."
- for n in traverse_nodes(n) do
- local id = n.id
+ depth = depth or 0
+ symbol = symbol or "."
+ for n in traverse_nodes(tonut(n)) do
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local s = n.subtype
+ local s = getsubtype(n)
report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(n.list,symbol,depth+1)
+ showboxes(getlist(n),symbol,depth+1)
end
end
end
@@ -320,70 +359,180 @@ local stripper = lpeg.patterns.stripzeros
--
-- redefined:
-local dimenfactors = number.dimenfactors
+-- local function nodetodimen(d,unit,fmt,strip)
+-- d = tonut(d) -- tricky: direct nuts are an issue
+-- if unit == true then
+-- unit = "pt"
+-- fmt = "%0.5f%s"
+-- else
+-- unit = unit or 'pt'
+-- if not fmt then
+-- fmt = "%s%s"
+-- elseif fmt == true then
+-- fmt = "%0.5f%s"
+-- end
+-- end
+-- local id = getid(d)
+-- if id == kern_code then
+-- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- if id == glue_code then
+-- d = getfield(d,"spec")
+-- end
+-- if not d or not getid(d) == gluespec_code then
+-- local str = formatters[fmt](0,unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- local width = getfield(d,"width")
+-- local plus = getfield(d,"stretch_order")
+-- local minus = getfield(d,"shrink_order")
+-- local stretch = getfield(d,"stretch")
+-- local shrink = getfield(d,"shrink")
+-- if plus ~= 0 then
+-- plus = " plus " .. stretch/65536 .. fillcodes[plus]
+-- elseif stretch ~= 0 then
+-- plus = formatters[fmt](stretch*dimenfactors[unit],unit)
+-- plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+-- else
+-- plus = ""
+-- end
+-- if minus ~= 0 then
+-- minus = " minus " .. shrink/65536 .. fillcodes[minus]
+-- elseif shrink ~= 0 then
+-- minus = formatters[fmt](shrink*dimenfactors[unit],unit)
+-- minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+-- else
+-- minus = ""
+-- end
+-- local str = formatters[fmt](getfield(d,"width")*dimenfactors[unit],unit)
+-- return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
+-- end
+--
+-- local function numbertodimen(d,unit,fmt,strip)
+-- if not d then
+-- local str = formatters[fmt](0,unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- end
+-- local t = type(d)
+-- if t == 'string' then
+-- return d
+-- elseif t == "number" then
+-- if unit == true then
+-- unit = "pt"
+-- fmt = "%0.5f%s"
+-- else
+-- unit = unit or 'pt'
+-- if not fmt then
+-- fmt = "%s%s"
+-- elseif fmt == true then
+-- fmt = "%0.5f%s"
+-- end
+-- end
+-- local str = formatters[fmt](d*dimenfactors[unit],unit)
+-- return strip and lpegmatch(stripper,str) or str
+-- else
+-- return nodetodimen(d,unit,fmt,strip) -- real node
+-- end
+-- end
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- end
- if unit == true then
- unit = "pt"
- fmt = "%0.5f%s"
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- end
- if t == "number" then
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local id = d.id
+local f_f_f = formatters["%0.5Fpt plus %0.5F%s minus %0.5F%s"]
+local f_f_m = formatters["%0.5Fpt plus %0.5F%s minus %0.5Fpt"]
+local f_p_f = formatters["%0.5Fpt plus %0.5Fpt minus %0.5F%s"]
+local f_p_m = formatters["%0.5Fpt plus %0.5Fpt minus %0.5Fpt"]
+local f_f_z = formatters["%0.5Fpt plus %0.5F%s"]
+local f_p_z = formatters["%0.5Fpt plus %0.5Fpt"]
+local f_z_f = formatters["%0.5Fpt minus %0.5F%s"]
+local f_z_m = formatters["%0.5Fpt minus %0.5Fpt"]
+local f_z_z = formatters["%0.5Fpt"]
+
+local tonut = nodes.tonut
+local getfield = nodes.nuts.getfield
+
+local function nodetodimen(n)
+ n = tonut(n)
+ local id = getid(n)
if id == kern_code then
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
+ local width = getfield(n,"width")
+ if width == 0 then
+ return "0pt"
+ else
+ return f_z_z(width)
+ end
end
if id == glue_code then
- d = d.spec
+ n = getfield(n,"spec")
end
- if not d or not d.id == gluespec_code then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
+ if not n or not getid(n) == gluespec_code then
+ return "0pt"
end
- local width = d.width
- local plus = d.stretch_order
- local minus = d.shrink_order
- local stretch = d.stretch
- local shrink = d.shrink
- if plus ~= 0 then
- plus = " plus " .. stretch/65536 .. fillcodes[plus]
+ local stretch_order = getfield(n,"stretch_order")
+ local shrink_order = getfield(n,"shrink_order")
+ local stretch = getfield(n,"stretch") / 65536
+ local shrink = getfield(n,"shrink") / 65536
+ local width = getfield(n,"width") / 65536
+ if stretch_order ~= 0 then
+ if shrink_order ~= 0 then
+ return f_f_f(width,stretch,fillorders[stretch_order],shrink,fillorders[shrink_order])
+ elseif shrink ~= 0 then
+ return f_f_m(width,stretch,fillorders[stretch_order],shrink)
+ else
+ return f_f_z(width,stretch,fillorders[stretch_order])
+ end
+ elseif shrink_order ~= 0 then
+ if stretch ~= 0 then
+ return f_p_f(width,stretch,shrink,fillorders[shrink_order])
+ else
+ return f_z_f(width,shrink,fillorders[shrink_order])
+ end
elseif stretch ~= 0 then
- plus = formatters[fmt](stretch*dimenfactors[unit],unit)
- plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+ if shrink ~= 0 then
+ return f_p_m(width,stretch,shrink)
+ else
+ return f_p_z(width,stretch)
+ end
+ elseif shrink ~= 0 then
+ return f_z_m(width,shrink)
+ elseif width == 0 then
+ return "0pt"
else
- plus = ""
+ return f_z_z(width)
end
- if minus ~= 0 then
- minus = " minus " .. shrink/65536 .. fillcodes[minus]
- elseif shrink ~= 0 then
- minus = formatters[fmt](shrink*dimenfactors[unit],unit)
- minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+end
+
+
+-- number.todimen(123)
+-- number.todimen(123,"cm")
+-- number.todimen(123,false,"%F))
+
+local f_pt = formatters["%p"]
+local f_un = formatters["%F%s"]
+
+dimenfactors[""] = dimenfactors.pt
+
+local function numbertodimen(d,unit,fmt)
+ if not d or d == 0 then
+ if not unit or unit == "pt" then
+ return "0pt"
+ elseif fmt then
+ return formatters[fmt](0,unit)
+ else
+ return 0 .. unit
+ end
+ elseif fmt then
+ if not unit then
+ unit = "pt"
+ end
+ return formatters[fmt](d*dimenfactors[unit],unit)
+ elseif not unit or unit == "pt" then
+ return f_pt(d)
else
- minus = ""
+ return f_un(d*dimenfactors[unit],unit)
end
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
end
number.todimen = numbertodimen
+nodes .todimen = nodetodimen
function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
@@ -398,6 +547,19 @@ function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
+function nodes.topoints (n,fmt) return nodetodimen(n,"pt",fmt) end
+function nodes.toinches (n,fmt) return nodetodimen(n,"in",fmt) end
+function nodes.tocentimeters (n,fmt) return nodetodimen(n,"cm",fmt) end
+function nodes.tomillimeters (n,fmt) return nodetodimen(n,"mm",fmt) end
+function nodes.toscaledpoints(n,fmt) return nodetodimen(n,"sp",fmt) end
+function nodes.toscaledpoints(n) return n .. "sp" end
+function nodes.tobasepoints (n,fmt) return nodetodimen(n,"bp",fmt) end
+function nodes.topicas (n,fmt) return nodetodimen(n "pc",fmt) end
+function nodes.todidots (n,fmt) return nodetodimen(n,"dd",fmt) end
+function nodes.tociceros (n,fmt) return nodetodimen(n,"cc",fmt) end
+function nodes.tonewdidots (n,fmt) return nodetodimen(n,"nd",fmt) end
+function nodes.tonewciceros (n,fmt) return nodetodimen(n,"nc",fmt) end
+
-- stop redefinition
local points = function(n)
@@ -406,7 +568,7 @@ local points = function(n)
elseif type(n) == "number" then
return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter
else
- return numbertodimen(n,"pt",true,true) -- also deals with nodes
+ return numbertodimen(n,"pt") -- also deals with nodes
end
end
@@ -416,7 +578,7 @@ local basepoints = function(n)
elseif type(n) == "number" then
return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter
else
- return numbertodimen(n,"bp",true,true) -- also deals with nodes
+ return numbertodimen(n,"bp") -- also deals with nodes
end
end
@@ -426,7 +588,7 @@ local pts = function(n)
elseif type(n) == "number" then
return format("%.5fpt",n*ptfactor) -- faster than formatter
else
- return numbertodimen(n,"pt",true) -- also deals with nodes
+ return numbertodimen(n,"pt") -- also deals with nodes
end
end
@@ -443,8 +605,13 @@ number.basepoints = basepoints
number.pts = pts
number.nopts = nopts
-local colors = { }
-tracers.colors = colors
+nodes.points = function(n) return numbertodimen(n,"pt") end
+nodes.basepoints = function(n) return numbertodimen(n,"bp") end
+nodes.pts = function(n) return numbertodimen(n,"pt") end
+nodes.nopts = function(n) return format("%.5f",n*ptfactor) end
+
+local colors = { }
+tracers.colors = colors
local unsetvalue = attributes.unsetvalue
@@ -454,36 +621,34 @@ local m_color = attributes.list[a_color] or { }
function colors.set(n,c,s)
local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
+ local nn = tonut(n)
+ if mc then
+ local mm = s or texgetattribute(a_colormodel)
+ setattr(nn,a_colormodel,mm <= 0 and mm or 1)
+ setattr(nn,a_color,mc)
else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
+ setattr(nn,a_color,unsetvalue)
end
return n
end
function colors.setlist(n,c,s)
- local f = n
- while n do
- local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
- else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
- end
- n = n.next
+ local nn = tonut(n)
+ local mc = m_color[c] or unsetvalue
+ local mm = s or texgetattribute(a_colormodel)
+ if mm <= 0 then
+ mm = 1
end
- return f
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ nn = getnext(nn)
+ end
+ return n
end
function colors.reset(n)
- n[a_color] = unsetvalue
+ setattr(tonut(n),a_color,unsetvalue)
return n
end
@@ -496,31 +661,22 @@ local a_transparency = attributes.private('transparency')
local m_transparency = attributes.list[a_transparency] or { }
function transparencies.set(n,t)
- local mt = m_transparency[t]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
+ setattr(tonut(n),a_transparency,m_transparency[t] or unsetvalue)
return n
end
function transparencies.setlist(n,c,s)
- local f = n
- while n do
- local mt = m_transparency[c]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
- n = n.next
+ local nn = tonut(n)
+ local mt = m_transparency[c] or unsetvalue
+ while nn do
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
end
- return f
+ return n
end
function transparencies.reset(n)
- n[a_transparency] = unsetvalue
+ setattr(n,a_transparency,unsetvalue)
return n
end
@@ -537,52 +693,76 @@ end
-- although tracers are used seldom
local function setproperties(n,c,s)
+ local nn = tonut(n)
local mm = texgetattribute(a_colormodel)
- n[a_colormodel] = mm > 0 and mm or 1
- n[a_color] = m_color[c]
- n[a_transparency] = m_transparency[c]
+ setattr(nn,a_colormodel,mm > 0 and mm or 1)
+ setattr(nn,a_color,m_color[c])
+ setattr(nn,a_transparency,m_transparency[c])
return n
end
tracers.setproperties = setproperties
-function tracers.setlistv(n,c,s)
- local f = n
+function tracers.setlist(n,c,s)
+ local nn = tonut(n)
local mc = m_color[c]
local mt = m_transparency[c]
local mm = texgetattribute(a_colormodel)
if mm <= 0 then
mm = 1
end
- while n do
- n[a_colormodel] = mm
- n[a_color] = mc
- n[a_transparency] = mt
- n = n.next
+ while nn do
+ setattr(nn,a_colormodel,mm)
+ setattr(nn,a_color,mc)
+ setattr(nn,a_transparency,mt)
+ nn = getnext(nn)
end
- return f
+ return n
end
function tracers.resetproperties(n)
- n[a_color] = unsetvalue
- n[a_transparency] = unsetvalue
+ local nn = tonut(n)
+ setattr(nn,a_color,unsetvalue)
+ setattr(nn,a_transparency,unsetvalue)
return n
end
-function tracers.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
- return setproperties(new_rule(w,h,d),c,s)
-end
-
--- only nodes
+-- this one returns a nut
local nodestracerpool = { }
+local nutstracerpool = { }
tracers.pool = {
nodes = nodestracerpool,
+ nuts = nutstracerpool,
}
-function nodestracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
+table.setmetatableindex(nodestracerpool,function(t,k,v)
+ local f = nutstracerpool[k]
+ local v = function(...)
+ return tonode(f(...))
+ end
+ t[k] = v
+ return v
+end)
+
+function nutstracerpool.rule(w,h,d,c,s) -- so some day we can consider using literals (speedup)
return setproperties(new_rule(w,h,d),c,s)
end
tracers.rule = nodestracerpool.rule -- for a while
+
+-- local function show(head,n,message)
+-- print("START",message or "")
+-- local i = 0
+-- for current in traverse(head) do
+-- local prev = getprev(current)
+-- local next = getnext(current)
+-- i = i + 1
+-- print(i, prev and nodecodes[getid(prev)],nodecodes[getid(current)],next and nodecodes[getid(next)])
+-- if i == n then
+-- break
+-- end
+-- end
+-- print("STOP", message or "")
+-- end
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index dfa570b24..56a4b18ef 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -117,6 +117,14 @@ function tasks.disableaction(name,action)
end
end
+function tasks.replaceaction(name,group,oldaction,newaction)
+ local data = valid(name)
+ if data then
+ sequencers.replaceaction(data.list,group,oldaction,newaction)
+ data.runner = false
+ end
+end
+
function tasks.setaction(name,action,value)
if value then
tasks.enableaction(name,action)
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index bfe0051bd..7f5102d5f 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -24,17 +24,26 @@ local rightskip_code = skipcodes.rightskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local find_node_tail = node.tail or node.slide
+local nuts = nodes.nuts
-function nodes.leftmarginwidth(n) -- todo: three values
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getsubtype = nuts.getsubtype
+
+local find_node_tail = nuts.tail
+
+function nuts.leftmarginwidth(n) -- todo: three values
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == leftskip_code and n.spec.width or 0
+ return getsubtype(n) == leftskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.next
+ n = getnext(n)
elseif id == hlist_code then
- return n.width
+ return getfield(n,"width")
else
break
end
@@ -42,15 +51,15 @@ function nodes.leftmarginwidth(n) -- todo: three values
return 0
end
-function nodes.rightmarginwidth(n)
+function nuts.rightmarginwidth(n)
if n then
n = find_node_tail(n)
while n do
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return n.subtype == rightskip_code and n.spec.width or 0
+ return getsubtype(n) == rightskip_code and getfield(getfield(n,"spec"),"width") or 0
elseif id == whatsit_code then
- n = n.prev
+ n = getprev(n)
else
break
end
@@ -59,15 +68,15 @@ function nodes.rightmarginwidth(n)
return false
end
-function nodes.somespace(n,all)
+function nuts.somespace(n,all)
if n then
- local id = n.id
+ local id = getid(n)
if id == glue_code then
- return (all or (n.spec.width ~= 0)) and glue_code
+ return (all or (getfield(getfield(n,"spec"),"width") ~= 0)) and glue_code
elseif id == kern_code then
- return (all or (n.kern ~= 0)) and kern
+ return (all or (getfield(n,"kern") ~= 0)) and kern
elseif id == glyph_code then
- local category = chardata[n.char].category
+ local category = chardata[getchar(n)].category
-- maybe more category checks are needed
return (category == "zs") and glyph_code
end
@@ -75,12 +84,12 @@ function nodes.somespace(n,all)
return false
end
-function nodes.somepenalty(n,value)
+function nuts.somepenalty(n,value)
if n then
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
if value then
- return n.penalty == value
+ return getfield(n,"penalty") == value
else
return true
end
@@ -89,32 +98,38 @@ function nodes.somepenalty(n,value)
return false
end
-function nodes.is_display_math(head)
- local n = head.prev
+function nuts.is_display_math(head)
+ local n = getprev(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == abovedisplayshortskip_code then
+ if getsubtype(n) == abovedisplayshortskip_code then
return true
end
else
break
end
- n = n.prev
+ n = getprev(n)
end
- n = head.next
+ n = getnext(head)
while n do
- local id = n.id
+ local id = getid(n)
if id == penalty_code then
elseif id == glue_code then
- if n.subtype == belowdisplayshortskip_code then
+ if getsubtype(n) == belowdisplayshortskip_code then
return true
end
else
break
end
- n = n.next
+ n = getnext(n)
end
return false
end
+
+nodes.leftmarginwidth = nodes.vianuts(nuts.leftmarginwidth)
+nodes.rightmarginwidth = nodes.vianuts(nuts.rightmarginwidth)
+nodes.somespace = nodes.vianuts(nuts.somespace)
+nodes.somepenalty = nodes.vianuts(nuts.somepenalty)
+nodes.is_display_math = nodes.vianuts(nuts.is_display_math)
diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua
index 4a2ef8d49..f1aacf25a 100644
--- a/tex/context/base/node-typ.lua
+++ b/tex/context/base/node-typ.lua
@@ -8,33 +8,45 @@ if not modules then modules = { } end modules ['node-typ'] = {
-- code has been moved to blob-ini.lua
-local typesetters = nodes.typesetters or { }
-nodes.typesetters = typesetters
+local typesetters = nodes.typesetters or { }
+nodes.typesetters = typesetters
-local hpack_node_list = nodes.hpack
-local vpack_node_list = nodes.vpack
-local fast_hpack_list = nodes.fasthpack
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+local getfont = nuts.getfont
+
+local hpack_node_list = nuts.hpack
+local vpack_node_list = nuts.vpack
+local fast_hpack_list = nuts.fasthpack
+local copy_node = nuts.copy
+
+local nodepool = nuts.pool
local new_glyph = nodepool.glyph
local new_glue = nodepool.glue
local utfvalues = utf.values
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
-local function tonodes(str,fontid,spacing) -- quick and dirty
+local function tonodes(str,fontid,spacing,templateglyph) -- quick and dirty
local head, prev = nil, nil
if not fontid then
- fontid = currentfont()
+ if templateglyph then
+ fontid = getfont(templateglyph)
+ else
+ fontid = currentfont()
+ end
end
local fp = fontparameters[fontid]
local s, p, m
if spacing then
s, p, m = spacing, 0, 0
else
- s, p, m = fp.space, fp.space_stretch, fp,space_shrink
+ s, p, m = fp.space, fp.space_stretch, fp.space_shrink
end
local spacedone = false
for c in utfvalues(str) do
@@ -44,6 +56,10 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
next = new_glue(s,p,m)
spacedone = true
end
+ elseif templateglyph then
+ next = copy_glyph(templateglyph)
+ setfield(next,"char",c)
+ spacedone = false
else
next = new_glyph(fontid or 1,c)
spacedone = false
@@ -53,8 +69,8 @@ local function tonodes(str,fontid,spacing) -- quick and dirty
elseif not head then
head = next
else
- prev.next = next
- next.prev = prev
+ setfield(prev,"next",next)
+ setfield(next,"prev",prev)
end
prev = next
end
@@ -77,17 +93,30 @@ end
local tovpackfast = tovpack
-typesetters.tonodes = tonodes
-typesetters.tohpack = tohpack
-typesetters.tohpackfast = tohpackfast
-typesetters.tovpack = tovpack
-typesetters.tovpackfast = tovpackfast
+local tnuts = { }
+nuts.typesetters = tnuts
+
+tnuts.tonodes = tonodes
+tnuts.tohpack = tohpack
+tnuts.tohpackfast = tohpackfast
+tnuts.tovpack = tovpack
+tnuts.tovpackfast = tovpackfast
+
+tnuts.hpack = tohpack -- obsolete
+tnuts.fast_hpack = tohpackfast -- obsolete
+tnuts.vpack = tovpack -- obsolete
+
+typesetters.tonodes = function(...) local h, b = tonodes (...) return tonode(h), b end
+typesetters.tohpack = function(...) local h, b = tohpack (...) return tonode(h), b end
+typesetters.tohpackfast = function(...) local h, b = tohpackfast(...) return tonode(h), b end
+typesetters.tovpack = function(...) local h, b = tovpack (...) return tonode(h), b end
+typesetters.tovpackfast = function(...) local h, b = tovpackfast(...) return tonode(h), b end
-typesetters.hpack = tohpack
-typesetters.fast_hpack = tohpackfast
-typesetters.vpack = tovpack
+typesetters.hpack = typesetters.tohpack -- obsolete
+typesetters.fast_hpack = typesetters.tofasthpack -- obsolete
+typesetters.vpack = typesetters.tovpack -- obsolete
-- node.write(nodes.typestters.hpack("Hello World!"))
-- node.write(nodes.typestters.hpack("Hello World!",1,100*1024*10))
-string.tonodes = tonodes -- quite convenient
+string.tonodes = function(...) return tonode(tonodes(...)) end -- quite convenient
diff --git a/tex/context/base/pack-bck.mkvi b/tex/context/base/pack-bck.mkvi
index 72eafd282..bb4b72252 100644
--- a/tex/context/base/pack-bck.mkvi
+++ b/tex/context/base/pack-bck.mkvi
@@ -32,7 +32,7 @@
%D \starttyping
%D \setupbackground
%D [backgroundoffset=4pt,
-%D background=screen,
+%D background=color,
%D frame=on,
%D framecolor=red,
%D leftoffset=2pt]
@@ -206,14 +206,9 @@
\c!corner=\v!rectangular,
\c!frame=\v!off,
\c!depth=\zeropoint,
-% \c!color=,
-% \c!background=\v!screen,
-% \c!backgroundcolor=\backgroundparameter\c!color,
-% \c!screen=\defaultbackgroundscreen,
-%
+ % \c!color=,
\c!background=\v!color,
\c!backgroundcolor=lightgray,
-%
\c!before=,
\c!after=]
diff --git a/tex/context/base/pack-box.mkiv b/tex/context/base/pack-box.mkiv
index f8b36691c..6e3bab6f4 100644
--- a/tex/context/base/pack-box.mkiv
+++ b/tex/context/base/pack-box.mkiv
@@ -24,10 +24,10 @@
%D which in itself is ok, but can lead to loops due to rounding errors (happened
%D in demo-obv).
-\definelayer[\v!text-2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text-1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text+1][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
-\definelayer[\v!text+2][\c!position=\v!yes,\c!region=,\c!width=\overlaywidth,\c!height=\overlayheight]
+\definelayer[\v!text-2][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text-1][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text+1][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
+\definelayer[\v!text+2][\c!position=\v!yes,\c!region=,\c!width=\d_overlay_width,\c!height=\d_overlay_height]
\unexpanded\def\internaltextoverlay#1% will become more generic and installable
{\startoverlay % i.e. probably an overlay by itself
@@ -436,7 +436,7 @@
\def\pack_ornament_text[#1][#2]%
{\bgroup
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\letdummyparameter\c!alternative\v!a
\getdummyparameters[#1]%
\doifelse{\directdummyparameter\c!alternative}\v!a
@@ -552,7 +552,7 @@
%
\doifelse{\bleedingparameter\c!stretch}\v!yes\donetrue\donefalse
%
- \xdef\bleedwidth{\dimexpr
+ \xdef\bleedwidth{\the\dimexpr
\ifdone
\ifconditional\c_pack_boxes_l
\scratchwidth+\MPx\currentbgposition-\MPx\currentpageposition
@@ -563,8 +563,9 @@
\fi\fi
\else
\scratchwidth
- \fi+\scratchhoffset}%
- \xdef\bleedheight{\dimexpr
+ \fi+\scratchhoffset
+ \relax}%
+ \xdef\bleedheight{\the\dimexpr
\ifdone
\ifconditional\c_pack_boxes_t
\paperheight -\MPy\currentbgposition+\MPy\currentpageposition % not checked
@@ -575,7 +576,8 @@
\fi\fi
\else
\scratchheight
- \fi+\scratchvoffset}%
+ \fi+\scratchvoffset
+ \relax}%
\dowithnextboxcontentcs\pack_boxes_bleed_settings\pack_boxes_bleed_finish\hbox}
\def\pack_boxes_bleed_settings
@@ -678,7 +680,7 @@
\dowithnextboxcontent
{\forgetall
\hsize\directdummyparameter\c!width
- \normalexpanded{\setupalign[\directdummyparameter\c!align]}%
+ \usealignparameter\directdummyparameter
\dousestyleparameter{\directdummyparameter\c!style}}
{\setlayer[#1][#2]{\strut\dousecolorparameter{\directdummyparameter\c!color}\flushnextbox}% maybe expand the color
\egroup}%
@@ -977,22 +979,43 @@
\box\scratchbox
\egroup}
-% \backgroundimage{1}{\hsize}{\vsize}{\externalfigure[cow][\c!width=3cm]}
+% \backgroundimage{1}{\hsize}{\vsize}{\externalfigure[cow][\c!width=3cm]}
+
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=6cm,height=3cm]{\backgroundimage {3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[cow][width=1cm]}}
+%
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=4cm,height=2cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{1}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{2}{\hsize}{\vsize}{\externalfigure[mill]}}
+% \framed[offset=overlay,width=2cm,height=4cm]{\backgroundimagefill{3}{\hsize}{\vsize}{\externalfigure[mill]}}
\unexpanded\def\backgroundimage#1#2#3% repeat hsize vsize
{\bgroup
\forgetall
- \dowithnextbox{\pack_boxes_background_image{#1}{#2}{#3}}\hbox}
+ \scratchcounter#1\relax
+ \scratchwidth #2\relax
+ \scratchheight #3\relax
+ \dowithnextboxcs\pack_boxes_background_image\hbox}
-\def\pack_boxes_background_image#1#2#3%
+\def\pack_boxes_background_image
{\offinterlineskip
- \ifcase#1\relax
+ \ifcase\scratchcounter
% just one
\else
- \scratchdimen#2\divide\scratchdimen\wd\nextbox\scratchnx\scratchdimen\advance\scratchnx\plusone\relax
- \scratchdimen#3\divide\scratchdimen\ht\nextbox\scratchny\scratchdimen\advance\scratchny\plusone\relax
+ \scratchdimen\scratchwidth \divide\scratchdimen\wd\nextbox\scratchnx\scratchdimen\advance\scratchnx\plusone\relax
+ \scratchdimen\scratchheight\divide\scratchdimen\ht\nextbox\scratchny\scratchdimen\advance\scratchny\plusone\relax
% to be considered: methods
- \ifcase#1%
+ \ifcase\scratchcounter
\or % x and y
\setbox\nextbox\hbox{\dorecurse\scratchnx{\copy\nextbox}}%
\setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
@@ -1002,13 +1025,59 @@
\setbox\nextbox\vbox{\dorecurse\scratchny{\copy\nextbox\endgraf}}%
\fi
\fi
- \ifdim\wd\nextbox>#2\relax
- \setbox\nextbox\hbox to #2{\hss\box\nextbox\hss}%
- \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=#2,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
+ \ifdim\wd\nextbox>\scratchwidth
+ \setbox\nextbox\hbox to \scratchwidth{\hss\box\nextbox\hss}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\scratchwidth,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
+ \fi
+ \ifdim\ht\nextbox>\scratchheight
+ \setbox\nextbox\vbox to \scratchheight{\vss\box\nextbox\vss}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=\the\scratchheight]{\box\nextbox}}}%
+ \fi
+ \box\nextbox
+ \egroup}
+
+\unexpanded\def\backgroundimagefill#1#2#3% repeat hsize vsize
+ {\bgroup
+ \forgetall
+ \scratchcounter#1\relax
+ \scratchwidth #2\relax
+ \scratchheight #3\relax
+ \dowithnextboxcs\pack_boxes_background_image_fill\hbox}
+
+\def\pack_boxes_background_image_fill
+ {\offinterlineskip
+ \setbox\nextbox\hbox\bgroup
+ \ifdim\scratchwidth>\scratchheight
+ \scale[\c!width=\the\scratchwidth]{\box\nextbox}%
+ \else
+ \scale[\c!height=\the\scratchheight]{\box\nextbox}%
+ \fi
+ \egroup
+ \ifdim\wd\nextbox>\scratchwidth
+ \setbox\nextbox\hbox to \scratchwidth
+ {\ifcase\scratchcounter
+ \hss\box\nextbox\hss
+ \or
+ \box\nextbox\hss
+ \or
+ \hss\box\nextbox
+ \else
+ \hss\box\nextbox\hss
+ \fi}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\scratchwidth,\c!height=\the\ht\nextbox]{\box\nextbox}}}%
\fi
- \ifdim\ht\nextbox>#3\relax
- \setbox\nextbox\vbox to #3{\vss\box\nextbox\vss}%
- \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=#3]{\box\nextbox}}}%
+ \ifdim\ht\nextbox>\scratchheight
+ \setbox\nextbox\vbox to \scratchheight
+ {\ifcase\scratchcounter
+ \vss\box\nextbox\vss
+ \or
+ \box\nextbox\vss
+ \or
+ \vss\box\nextbox
+ \else
+ \vss\box\nextbox\vss
+ \fi}%
+ \setbox\nextbox\hbox{\normalexpanded{\clip[\c!width=\the\wd\nextbox,\c!height=\the\scratchheight]{\box\nextbox}}}%
\fi
\box\nextbox
\egroup}
diff --git a/tex/context/base/pack-com.mkiv b/tex/context/base/pack-com.mkiv
index 2c28d6b20..b734d6028 100644
--- a/tex/context/base/pack-com.mkiv
+++ b/tex/context/base/pack-com.mkiv
@@ -178,7 +178,7 @@
\unexpanded\def\pack_common_caption_stop {\removeunwantedspaces\egroup}
\unexpanded\def\stopcombination
- {\bgroup\normalexpanded{\egroup{}\ctxcommand{ntimes("{}{}",\number\c_pack_combinations_n)}}% brr
+ {\bgroup\normalexpanded{\egroup{}\ntimes{{}{}}\c_pack_combinations_n}% brr
\dostoptagged
\egroup
\egroup}
@@ -191,19 +191,19 @@
\edef\currentcombination{#1}%
\edef\currentcombinationspec{#2}%
\ifx\currentcombinationspec\empty
- \doifassignmentelse{#1}%
+ \doifelseassignment{#1}%
{\let\currentcombination\empty
\setupcurrentcombination[#1]%
\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}
- {\doifinstringelse{*}\currentcombination
+ {\doifelseinstring{*}\currentcombination
{\edef\currentcombinationspec{\currentcombination*\plusone*}%
\let\currentcombination\empty}
- {\doifnumberelse\currentcombination
+ {\doifelsenumber\currentcombination
{\edef\currentcombinationspec{\currentcombination*\plusone*}%
\let\currentcombination\empty}
{\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}}}%
\else
- \doifassignmentelse{#2}%
+ \doifelseassignment{#2}%
{\setupcurrentcombination[#2]%
\edef\currentcombinationspec{\combinationparameter\c!nx*\combinationparameter\c!ny*}}
{\edef\currentcombinationspec{\currentcombinationspec*\plusone*}}%
@@ -221,13 +221,12 @@
\edef\p_height {\combinationparameter\c!height}%
\edef\p_width {\combinationparameter\c!width}%
\edef\p_location{\combinationparameter\c!location}%
- \edef\p_align {\combinationparameter\c!align}%
\edef\p_distance{\combinationparameter\c!distance}%
%
\pack_combinations_location_reset
\rawprocesscommacommand[\p_location]\pack_combinations_location_step
%
- \dostarttagged\t!combination\currentcombination
+ \dostarttaggedchained\t!combination\currentcombination\??combination
\vbox \ifx\p_height\v!fit\else to \p_height \fi \bgroup
\let\combination\empty % permits \combination{}{} handy for cld
\normalexpanded{\pack_combinations_start_indeed[\currentcombinationspec]}}
@@ -295,7 +294,7 @@
\def\pack_combinations_alternative_label_indeed
{\dowithnextboxcs\pack_combinations_pickup_caption\vtop\bgroup
\hsize\wd\b_pack_combinations_content
- \ifx\p_align\empty\else\setupalign[\p_align]\fi
+ \usealignparameter\combinationparameter
\usecombinationstyleandcolor\c!style\c!color
\begstrut
\normalexpanded{\strc_labels_command{\v!combination\ifx\currentcombination\empty\else:\currentcombination\fi}}%
@@ -324,15 +323,18 @@
\def\pack_combinations_caption_second
{\ifx\nexttoken\egroup
% the caption is empty
+ \else\ifx\nexttoken\stopcaption
+ % the caption is empty (new per 2014-05-24)
\else
+ % todo: \p_pack_combinations_alternative\v!none: no style, strut etc
\hsize\wd\b_pack_combinations_content
- \ifx\p_align\empty\else\setupalign[\p_align]\fi
+ \usealignparameter\combinationparameter
\usecombinationstyleandcolor\c!style\c!color
\bgroup
\aftergroup\endstrut
\aftergroup\egroup
\begstrut
- \fi}
+ \fi\fi}
\def\pack_combinations_pickup_package_pair % we need to store the caption row
{\vbox
@@ -626,12 +628,12 @@
\unexpanded\def\placepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifelsenextoptionalcs\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\startplacepairedbox[#1]%
{\bgroup
\edef\currentpairedbox{#1}%
- \doifnextoptionalelse\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
+ \doifelsenextoptionalcs\pack_pairedboxes_place\pack_pairedboxes_place_indeed}
\unexpanded\def\stopplacepairedbox
{}
@@ -780,7 +782,7 @@
\fi}
\def\pack_pairedboxes_between
- {\switchtobodyfont[\pairedboxparameter\c!bodyfont]% split under same regime
+ {\usebodyfontparameter\pairedboxparameter
\setbox\b_pack_pairedboxes_first\box\nextbox
\ifconditional\c_pack_pairedboxes_horizontal
\pack_pairedboxes_between_horizontal
diff --git a/tex/context/base/pack-fen.mkiv b/tex/context/base/pack-fen.mkiv
index 4253eeaa7..04a36fa46 100644
--- a/tex/context/base/pack-fen.mkiv
+++ b/tex/context/base/pack-fen.mkiv
@@ -50,12 +50,12 @@
\else
\def\setinstalledframedimensions
- {\edef\overlaywidth {\the\frameddimenwd\space}%
- \edef\overlayheight {\the\dimexpr\frameddimenht+\frameddimendp\relax\space}%
- \edef\overlaydepth {\the\frameddimendp\space}%
+ {\d_overlay_width \frameddimenwd
+ \d_overlay_height \dimexpr\frameddimenht+\frameddimendp\relax
+ \d_overlay_depth \frameddimendp
+ \d_overlay_linewidth \ruledlinewidth
\edef\overlaycolor {\framedparameter\c!backgroundcolor}%
- \edef\overlaylinecolor{\framedparameter\c!framecolor}%
- \edef\overlaylinewidth{\the\ruledlinewidth}}
+ \edef\overlaylinecolor{\framedparameter\c!framecolor}}
\fi
@@ -63,7 +63,7 @@
\def\whateverleftframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\vbox to \overlayheight{\vss#1\vss}%
+ \setbox\b_framed_rendered\vbox to \d_overlay_height{\vss#1\vss}%
\setbox\b_framed_rendered\hbox to \zeropoint{\box\b_framed_rendered\hss}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -71,7 +71,7 @@
\def\whateverrightframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\vbox to \overlayheight{\vss#1\vss}%
+ \setbox\b_framed_rendered\vbox to \d_overlay_height{\vss#1\vss}%
\setbox\b_framed_rendered\hbox to \zeropoint{\hss\box\b_framed_rendered}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -79,7 +79,7 @@
\def\whatevertopframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\hbox to \overlaywidth{\hss#1\hss}%
+ \setbox\b_framed_rendered\hbox to \d_overlay_width{\hss#1\hss}%
\setbox\b_framed_rendered\vbox to \zeropoint{\box\b_framed_rendered\vss}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
@@ -88,7 +88,7 @@
\def\whateverbottomframe#1%
{\setinstalledframedimensions
- \setbox\b_framed_rendered\hbox to \overlaywidth{\hss#1\hss}%
+ \setbox\b_framed_rendered\hbox to \d_overlay_width{\hss#1\hss}%
\setbox\b_framed_rendered\vbox to \zeropoint{\vss\box\b_framed_rendered}%
\ht\b_framed_rendered\zeropoint
\dp\b_framed_rendered\zeropoint
diff --git a/tex/context/base/pack-lyr.mkiv b/tex/context/base/pack-lyr.mkiv
index a891c998d..a847dec67 100644
--- a/tex/context/base/pack-lyr.mkiv
+++ b/tex/context/base/pack-lyr.mkiv
@@ -101,9 +101,6 @@
\def\layeranchor{\currentlayer:\the\realpageno}
-\unexpanded\def\anch_mark_anchor_box#1%
- {\ctxcommand{markregionbox(\number#1,"\layeranchor")}} % needs an hbox
-
\let\p_pack_layers_doublesided\empty
\let\p_pack_layers_state \empty
\let\p_pack_layers_option \empty
@@ -229,7 +226,7 @@
\else\ifthirdargument
\pack_layers_set_indeed[#1][#2][#3]%
\else
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\pack_layers_set_indeed[#1][][#2]}%
{\pack_layers_set_indeed[#1][#2][]}%
\fi\fi}
@@ -323,7 +320,7 @@
{\dodoubleargument\pack_layers_define_preset}
\def\pack_layers_define_preset[#1][#2]%
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\setvalue{\??layerpreset#1}{\setupcurrentlayer[#2]}}
{\setvalue{\??layerpreset#1}{\csname\??layerpreset#2\endcsname}}}
@@ -510,6 +507,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflayerdataelse\doifelselayerdata
+
%D \macros
%D {flushlayer}
%D
@@ -610,7 +609,7 @@
% {\setlayoutcomponentattribute{\v!layer:#2}}%
% \resetlayoutcomponentattribute
% \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \overlayheight \fi \layoutcomponentboxattribute
-% {\hbox \ifx\p_pack_layers_method\v!overlay to \overlaywidth \fi
+% {\hbox \ifx\p_pack_layers_method\v!overlay to \d_overlay_width \fi
% {\edef\currentlayer{#2\the\realpageno}% local
% \edef\p_pack_layers_position{\layerparameter\c!position}% local
% \ifx\p_pack_layers_position\v!yes
@@ -669,15 +668,15 @@
\pack_layers_positioned_box_nop
\fi
% todo: method=offset => overlayoffset right/down (handy for backgrounds with offset)
- \doifoverlayelse{#2}%
+ \doifelseoverlay{#2}%
{\setlayoutcomponentattribute{\v!layer:#2}}%
\resetlayoutcomponentattribute
% we have conflicting demands: some mechanisms want ll anchoring .. I need to figure this out
% an dmaybe we will have 'origin=bottom' or so
\setbox\nextbox
- \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \overlayheight \fi \layoutcomponentboxattribute
+ \ifx\p_pack_layers_option\v!test \ruledvbox \else \vbox \fi \ifx\p_pack_layers_method\v!overlay to \d_overlay_height \fi \layoutcomponentboxattribute
{\pack_layers_top_fill
- \hbox \ifx\p_pack_layers_method\v!overlay to \overlaywidth \fi
+ \hbox \ifx\p_pack_layers_method\v!overlay to \d_overlay_width \fi
{\box\nextbox
\hss}%
\pack_layers_bottom_fill}%
@@ -687,7 +686,7 @@
\ifx\p_pack_layers_position\v!yes
\edef\p_pack_layers_region{\layerparameter\c!region}%
\ifx\p_pack_layers_region\empty \else
- \anch_mark_anchor_box\nextbox
+ \anch_mark_tagged_box\nextbox\layeranchor
\fi
\fi
\box\nextbox
@@ -724,12 +723,20 @@
\unexpanded\def\composedlayer#1{\flushlayer[#1]}
+% \unexpanded\def\tightlayer[#1]%
+% {\hbox
+% {\def\currentlayer{#1}% todo: left/right
+% \setbox\nextbox\emptybox
+% \hsize\layerparameter\c!width
+% \vsize\layerparameter\c!height
+% \composedlayer{#1}}}
+
\unexpanded\def\tightlayer[#1]%
{\hbox
{\def\currentlayer{#1}% todo: left/right
- \setbox\nextbox\emptybox % hoogte/breedte are \wd\nextbox/\ht\nextbox
- \hsize\layerparameter\c!width % \overlaywidth = \hsize
- \vsize\layerparameter\c!height % \overlaywheight = \vsize
+ \setbox\nextbox\emptybox
+ \d_overlay_width \layerparameter\c!width
+ \d_overlay_height\layerparameter\c!height
\composedlayer{#1}}}
\let\placelayer\flushlayer
diff --git a/tex/context/base/pack-mis.mkvi b/tex/context/base/pack-mis.mkvi
index 978cc120c..420f9440a 100644
--- a/tex/context/base/pack-mis.mkvi
+++ b/tex/context/base/pack-mis.mkvi
@@ -46,7 +46,7 @@
\unexpanded\def\pack_placement#tag%
{\bgroup
\edef\currentplacement{#tag}%
- \doifnextoptionalelse\pack_placement_yes\pack_placement_nop}
+ \doifelsenextoptionalcs\pack_placement_yes\pack_placement_nop}
\def\pack_placement_yes[#settings]%
{\setupcurrentplacement[#settings]%
@@ -67,6 +67,7 @@
%\ifinsidefloat \else
% \page_backgrounds_add_local_to_box\nextbox
%\fi
+\flushnotes % new per 2014-05-29 : todo: move them up in the mvl
\ifgridsnapping
\pack_placement_flush_grid_yes
\else
diff --git a/tex/context/base/pack-mrl.mkiv b/tex/context/base/pack-mrl.mkiv
index 7c3f08825..a97c9e6f8 100644
--- a/tex/context/base/pack-mrl.mkiv
+++ b/tex/context/base/pack-mrl.mkiv
@@ -40,7 +40,7 @@
\unexpanded\def\blackrule
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rule_pickup\pack_black_rule_indeed}
+ \doifelsenextoptionalcs\pack_black_rule_pickup\pack_black_rule_indeed}
\def\pack_black_rule_pickup[#1]%
{\setupcurrentblackrules[#1]%
@@ -96,7 +96,7 @@
\unexpanded\def\blackrules % probably never used
{\hbox\bgroup
- \doifnextoptionalelse\pack_black_rules_pickup\pack_black_rules_indeed}
+ \doifelsenextoptionalcs\pack_black_rules_pickup\pack_black_rules_indeed}
\def\pack_black_rules_pickup[#1]%
{\setupcurrentblackrules[#1]%
@@ -145,7 +145,8 @@
%D \showsetup{hl}
\unexpanded\def\pack_rule_vl_indeed#1#2#3%
- {\bgroup
+ {\dontleavehmode
+ \begingroup
\setbox\scratchbox\hbox
{\vrule
\s!width #1\linewidth
@@ -154,13 +155,14 @@
\dp\scratchbox\strutdp
\ht\scratchbox\strutht
\box\scratchbox
- \egroup}
+ \endgroup}
\def\pack_rule_vl[#1]%
{\pack_rule_vl_indeed{#1}{#1}{#1}}
\def\pack_rule_hl[#1]%
- {\hbox
+ {\dontleavehmode
+ \hbox
{\vrule
\s!width #1\emwidth
\s!height\linewidth
@@ -614,7 +616,7 @@
\fi
\doifelse{\directtextrulesparameter\c!depthcorrection}\v!on\pack_textrule_correct_depth_yes\pack_textrule_correct_depth_nop
\nointerlineskip
- \dontleavehmode\vbox
+ \noindent\vbox % was \dontleavehmode
{\color[\directtextrulesparameter\c!rulecolor]
{\hrule\s!depth\directtextrulesparameter\c!rulethickness\s!height\zeropoint\s!width\availablehsize}}}
{\pack_textrule_with_text{#1}}%
@@ -629,7 +631,7 @@
\def\pack_textrule_nop_indeed{\csname\??textrulealternatives\v!middle\endcsname}%
\csname\??textrulealternatives\v!top\endcsname{#1}%
\bgroup
- \doifsomething{\directtextrulesparameter\c!bodyfont}{\switchtobodyfont[\directtextrulesparameter\c!bodyfont]}}
+ \usebodyfontparameter\directtextrulesparameter}
\unexpanded\def\stoptextrule
{\par
diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua
index 70876a346..d1cc5bafc 100644
--- a/tex/context/base/pack-obj.lua
+++ b/tex/context/base/pack-obj.lua
@@ -11,19 +11,21 @@ if not modules then modules = { } end modules ['pack-obj'] = {
reusable components.
--ldx]]--
-local commands, context = commands, context
+local context = context
-local allocate = utilities.storage.allocate
+local implement = interfaces.implement
-local collected = allocate()
-local tobesaved = allocate()
+local allocate = utilities.storage.allocate
-local jobobjects = {
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobobjects = {
collected = collected,
tobesaved = tobesaved,
}
-job.objects = jobobjects
+job.objects = jobobjects
local function initializer()
collected = jobobjects.collected
@@ -32,45 +34,60 @@ end
job.register('job.objects.collected', tobesaved, initializer, nil)
-function jobobjects.save(tag,number,page)
+local function saveobject(tag,number,page)
local t = { number, page }
tobesaved[tag], collected[tag] = t, t
end
-function jobobjects.set(tag,number,page)
+local function setobject(tag,number,page)
collected[tag] = { number, page }
end
-function jobobjects.get(tag)
+local function getobject(tag)
return collected[tag] or tobesaved[tag]
end
-function jobobjects.number(tag,default)
+local function getobjectnumber(tag,default)
local o = collected[tag] or tobesaved[tag]
return o and o[1] or default
end
-function jobobjects.page(tag,default)
+local function getobjectpage(tag,default)
local o = collected[tag] or tobesaved[tag]
return o and o[2] or default
end
--- interface
+jobobjects.save = saveobject
+jobobjects.set = setobject
+jobobjects.get = getobject
+jobobjects.number = getobjectnumber
+jobobjects.page = getobjectpage
-commands.saveobject = jobobjects.save
-commands.setobject = jobobjects.set
+implement {
+ name = "saveobject",
+ actions = saveobject
+}
-function commands.objectnumber(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[1] or default)
-end
+implement {
+ name = "setobject",
+ actions = setobject,
+ arguments = { "string", "integer", "integer" }
+}
-function commands.objectpage(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[2] or default)
-end
+implement {
+ name = "objectnumber",
+ actions = { getobjectnumber, context },
+ arguments = { "string", "string" },
+}
-function commands.doifobjectreferencefoundelse(tag)
- commands.doifelse(collected[tag] or tobesaved[tag])
-end
+implement {
+ name = "objectpage",
+ actions = { getobjectpage, context },
+ arguments = { "string", "string" },
+}
+implement {
+ name = "doifelseobjectreferencefound",
+ actions = { jobobjects.get, commands.doifelse },
+ arguments = "string"
+}
diff --git a/tex/context/base/pack-obj.mkiv b/tex/context/base/pack-obj.mkiv
index 356a0b7eb..605dd3b9e 100644
--- a/tex/context/base/pack-obj.mkiv
+++ b/tex/context/base/pack-obj.mkiv
@@ -366,7 +366,7 @@
% no undefined test ! ! ! ! (pdftex fails on undefined objects)
\unexpanded\def\pack_objects_register_reference#1#2#3{\normalexpanded{\noexpand\ctxlatecommand{saveobject("#1::#2",#3,\noexpand\the\realpageno)}}}
-\unexpanded\def\pack_objects_overload_reference#1#2#3{\ctxcommand{setobject("#1::#2",#3,\the\realpageno)}}
+\unexpanded\def\pack_objects_overload_reference#1#2#3{\clf_setobject{#1::#2}#3 \realpageno\relax}
\unexpanded\def\dosetobjectreference
{\ifcase\crossreferenceobject
@@ -382,8 +382,8 @@
\def\defaultobjectreference#1#2{0} % driver dependent
\def\defaultobjectpage #1#2{\realfolio}
-\unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\ctxcommand{objectnumber("#1::#2","\defaultobjectreference{#1}{#2}")}}}
-\unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\ctxcommand{objectpage("#1::#2","\defaultobjectpage{#1}{#2}")}}}
+\unexpanded\def\dogetobjectreference #1#2#3{\xdef#3{\clf_objectnumber{#1::#2}{\defaultobjectreference{#1}{#2}}}}
+\unexpanded\def\dogetobjectreferencepage#1#2#3{\xdef#3{\clf_objectpage {#1::#2}{\defaultobjectpage {#1}{#2}}}}
\unexpanded\def\setobject {\driverreferenced\pack_objects_set1}
\unexpanded\def\settightobject{\driverreferenced\pack_objects_set0}
@@ -399,14 +399,17 @@
%D \doifobjectreferencefoundelse{class}{object}{do then}{do else}
%D \stoptyping
-\unexpanded\def\doifobjectfoundelse#1#2%
+\unexpanded\def\doifelseobjectfound#1#2%
{\ifcsname\??objects#1::#2\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifobjectreferencefoundelse#1#2%
- {\ctxcommand{doifobjectreferencefoundelse("#1::#2")}}
+\unexpanded\def\doifelseobjectreferencefound#1#2%
+ {\clf_doifelseobjectreferencefound{#1::#2}}
+
+\let\doifobjectfoundelse \doifelseobjectfound
+\let\doifobjectreferencefoundelse\doifelseobjectreferencefound
\protect \endinput
diff --git a/tex/context/base/pack-pos.mkiv b/tex/context/base/pack-pos.mkiv
index f92ceb78a..c52c01ca4 100644
--- a/tex/context/base/pack-pos.mkiv
+++ b/tex/context/base/pack-pos.mkiv
@@ -62,7 +62,7 @@
\edef\currentpositioning{#1}%
\setupcurrentpositioning[#2]%
\else\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\let\currentpositioning\empty
\setupcurrentpositioning[#1]}%
{\edef\currentpositioning{#1}}%
diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua
index 329ea63b8..151642c3a 100644
--- a/tex/context/base/pack-rul.lua
+++ b/tex/context/base/pack-rul.lua
@@ -14,6 +14,14 @@ if not modules then modules = { } end modules ['pack-rul'] = {
-- challenge: adapt glue_set
-- setfield(h,"glue_set", getfield(h,"glue_set") * getfield(h,"width")/maxwidth -- interesting ... doesn't matter much
+-- \framed[align={lohi,middle}]{$x$}
+-- \framed[align={lohi,middle}]{$ $}
+-- \framed[align={lohi,middle}]{\hbox{ }}
+-- \framed[align={lohi,middle}]{\hbox{}}
+-- \framed[align={lohi,middle}]{$\hskip2pt$}
+
+local type = type
+
local hlist_code = nodes.nodecodes.hlist
local vlist_code = nodes.nodecodes.vlist
local box_code = nodes.listcodes.box
@@ -21,15 +29,26 @@ local line_code = nodes.listcodes.line
local texsetdimen = tex.setdimen
local texsetcount = tex.setcount
-local texgetbox = tex.getbox
-local hpack = nodes.hpack
-local free = nodes.free
-local copy = nodes.copy_list
-local traverse_id = nodes.traverse_id
-local node_dimensions = nodes.dimensions
-function commands.doreshapeframedbox(n)
- local box = texgetbox(n)
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local hpack = nuts.hpack
+local traverse_id = nuts.traverse_id
+local node_dimensions = nuts.dimensions
+
+local function doreshapeframedbox(n)
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
@@ -38,27 +57,27 @@ function commands.doreshapeframedbox(n)
local maxwidth = 0
local totalwidth = 0
local averagewidth = 0
- local boxwidth = box.width
+ local boxwidth = getfield(box,"width")
if boxwidth ~= 0 then -- and h.subtype == vlist_code
- local list = box.list
+ local list = getlist(box)
if list then
local function check(n,repack)
if not firstheight then
- firstheight = n.height
+ firstheight = getfield(n,"height")
end
- lastdepth = n.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
- local l = n.list
+ local l = getlist(n)
if l then
if repack then
- local subtype = n.subtype
+ local subtype = getsubtype(n)
if subtype == box_code or subtype == line_code then
- lastlinelength = node_dimensions(l,n.dir) -- used to be: hpack(copy(l)).width
+ lastlinelength = node_dimensions(l,getfield(n,"dir")) -- used to be: hpack(copy(l)).width
else
- lastlinelength = n.width
+ lastlinelength = getfield(n,"width")
end
else
- lastlinelength = n.width
+ lastlinelength = getfield(n,"width")
end
if lastlinelength > maxwidth then
maxwidth = lastlinelength
@@ -80,33 +99,34 @@ function commands.doreshapeframedbox(n)
-- vdone = true
end
if not firstheight then
- -- done
+ -- done)
elseif maxwidth ~= 0 then
if hdone then
for h in traverse_id(hlist_code,list) do
- local l = h.list
+ local l = getlist(h)
if l then
- local subtype = h.subtype
+ local subtype = getsubtype(h)
if subtype == box_code or subtype == line_code then
- h.list = hpack(l,maxwidth,'exactly',h.dir)
- h.shift = 0 -- needed for display math
+ l = hpack(l,maxwidth,'exactly',getfield(h,"dir")) -- multiple return values
+ setfield(h,"list",l)
+ setfield(h,"shift",0) -- needed for display math, so no width check possible
end
- h.width = maxwidth
+ setfield(h,"width",maxwidth)
end
end
- box.width = maxwidth -- moved
- averagewidth = noflines > 0 and totalwidth/noflines or 0
end
-- if vdone then
-- for v in traverse_id(vlist_code,list) do
- -- local width = n.width
+ -- local width = getfield(n,"width")
-- if width > maxwidth then
- -- v.width = maxwidth
+ -- setfield(v,"width",maxwidth)
-- end
-- end
-- end
- box.width = maxwidth
+ setfield(box,"width",maxwidth)
averagewidth = noflines > 0 and totalwidth/noflines or 0
+ else -- e.g. empty math {$ $} or \hbox{} or ...
+setfield(box,"width",0)
end
end
end
@@ -118,19 +138,19 @@ function commands.doreshapeframedbox(n)
texsetdimen("global","framedaveragewidth",averagewidth)
end
-function commands.doanalyzeframedbox(n)
- local box = texgetbox(n)
+local function doanalyzeframedbox(n)
+ local box = getbox(n)
local noflines = 0
local firstheight = nil
local lastdepth = nil
- if box.width ~= 0 then
- local list = box.list
+ if getfield(box,"width") ~= 0 then
+ local list = getlist(box)
if list then
local function check(n)
if not firstheight then
- firstheight = n.height
+ firstheight = getfield(n,"height")
end
- lastdepth = n.depth
+ lastdepth = getfield(n,"depth")
noflines = noflines + 1
end
for h in traverse_id(hlist_code,list) do
@@ -145,3 +165,46 @@ function commands.doanalyzeframedbox(n)
texsetdimen("global","framedfirstheight",firstheight or 0)
texsetdimen("global","framedlastdepth",lastdepth or 0)
end
+
+implement { name = "doreshapeframedbox", actions = doreshapeframedbox, arguments = "integer" }
+implement { name = "doanalyzeframedbox", actions = doanalyzeframedbox, arguments = "integer" }
+
+function nodes.maxboxwidth(box)
+ local boxwidth = getfield(box,"width")
+ if boxwidth == 0 then
+ return 0
+ end
+ local list = getlist(box)
+ if not list then
+ return 0
+ end
+ if getid(box) == hlist_code then
+ return boxwidth
+ end
+ local lastlinelength = 0
+ local maxwidth = 0
+ local function check(n,repack)
+ local l = getlist(n)
+ if l then
+ if repack then
+ local subtype = getsubtype(n)
+ if subtype == box_code or subtype == line_code then
+ lastlinelength = node_dimensions(l,getfield(n,"dir"))
+ else
+ lastlinelength = getfield(n,"width")
+ end
+ else
+ lastlinelength = getfield(n,"width")
+ end
+ if lastlinelength > maxwidth then
+ maxwidth = lastlinelength
+ end
+ end
+ end
+ for h in traverse_id(hlist_code,list) do -- no dir etc needed
+ check(h,true)
+ end
+ for v in traverse_id(vlist_code,list) do -- no dir etc needed
+ check(v,false)
+ end
+end
diff --git a/tex/context/base/pack-rul.mkiv b/tex/context/base/pack-rul.mkiv
index 377d39499..5f72a1113 100644
--- a/tex/context/base/pack-rul.mkiv
+++ b/tex/context/base/pack-rul.mkiv
@@ -43,23 +43,23 @@
\def\pack_framed_setup_line_width[#1]%
{\assigndimension{#1}\linewidth{.2\points}{.4\points}{.6\points}}
-%D \macros
-%D {setupscreens}
-%D
-%D Sort of obsolete:
-%D
-%D \showsetup{setupscreens}
-
-\installcorenamespace{screens}
-
-\installsetuponlycommandhandler \??screens {screens}
-
-\appendtoks
- \edef\defaultbackgroundscreen{\directscreensparameter\c!screen}
-\to \everysetupscreens
-
-\setupscreens
- [\c!screen=.90] % was .95 but that's hardly visible
+% %D \macros
+% %D {setupscreens}
+% %D
+% %D Sort of obsolete:
+% %D
+% %D \showsetup{setupscreens}
+%
+% \installcorenamespace{screens}
+%
+% \installsetuponlycommandhandler \??screens {screens}
+%
+% \appendtoks
+% \edef\defaultbackgroundscreen{\directscreensparameter\c!screen}
+% \to \everysetupscreens
+%
+% \setupscreens
+% [\c!screen=.90] % was .95 but that's hardly visible
%D The parameter handler:
@@ -155,8 +155,6 @@
%\c!foregroundcolor=,
%\c!foregroundstyle=,
%\c!background=,
- %\c!backgroundscreen=,
- \c!backgroundscreen=\defaultbackgroundscreen,
%\c!backgroundcolor=,
\c!backgroundoffset=\zeropoint,
%\c!framecolor=,
@@ -238,7 +236,6 @@
\let\p_framed_lines \empty
\let\p_framed_empty \empty
\let\p_framed_backgroundcolor \empty
-\let\p_framed_backgroundscreen\empty
\let\p_framed_framecolor \empty
\let\p_framed_component \empty
\let\p_framed_region \empty
@@ -352,14 +349,14 @@
%D The oval box is drawn using a special macro, depending on
%D the driver in use.
-\def\pack_framed_background_box_gray % avoid black rules when no gray
- {\edef\p_framed_backgroundscreen{\framedparameter\c!backgroundscreen}%
- \ifx\p_framed_backgroundscreen\empty \else
- \pack_framed_background_box_gray_indeed
- \fi}
-
-\def\pack_framed_background_box_gray_indeed % can be more direct but who cares, just compatibility
- {\colored[s=\p_framed_backgroundscreen]{\pack_framed_filled_box}}
+% \def\pack_framed_background_box_gray % avoid black rules when no gray
+% {\edef\p_framed_backgroundscreen{\framedparameter\c!backgroundscreen}%
+% \ifx\p_framed_backgroundscreen\empty \else
+% \pack_framed_background_box_gray_indeed
+% \fi}
+%
+% \def\pack_framed_background_box_gray_indeed % can be more direct but who cares, just compatibility
+% {\colored[s=\p_framed_backgroundscreen]{\pack_framed_filled_box}}
%D It won't be a surprise that we not only provide gray boxes, but also colored
%D ones. Here it is:
@@ -429,14 +426,36 @@
%D
%D The resulting box is lowered to the right depth.
-\def\overlaywidth {\the\hsize\space} % We preset the variables
-\def\overlayheight {\the\vsize\space} % to some reasonable default
-\def\overlaydepth {0pt } % values. The attributes
-\let\overlayoffset \overlaydepth % of the frame can be (are)
-\let\overlaylinewidth \overlaydepth % set somewhere else.
+%def\overlaywidth {\the\hsize\space} % We preset the variables
+%def\overlayheight {\the\vsize\space} % to some reasonable default
+%def\overlaydepth {0pt } % values. The attributes
+%let\overlayoffset \overlaydepth % of the frame can be (are)
+%let\overlaylinewidth \overlaydepth % set somewhere else.
\let\overlaycolor \empty
\let\overlaylinecolor \empty
+\newdimen\d_overlay_width
+\newdimen\d_overlay_height
+\newdimen\d_overlay_depth
+\newdimen\d_overlay_offset
+\newdimen\d_overlay_linewidth
+
+% expandable ... in a future version the space will go (in my one can use Overlay*)
+
+\def\overlaywidth {\the\d_overlay_width \space} % We preset the variables
+\def\overlayheight {\the\d_overlay_height \space} % to some reasonable default
+\def\overlaydepth {\the\d_overlay_depth \space} % values.
+\def\overlayoffset {\the\d_overlay_offset \space} % of the frame can be (are)
+\def\overlaylinewidth {\the\d_overlay_linewidth\space} % set somewhere else.
+
+% public but kind of protected
+
+\def\usedoverlaywidth {\dimexpr\d_overlay_width \relax}
+\def\usedoverlayheight {\dimexpr\d_overlay_height \relax}
+\def\usedoverlaydepth {\dimexpr\d_overlay_depth \relax}
+\def\usedoverlayoffset {\dimexpr\d_overlay_offset \relax}
+\def\usedoverlaylinewidth{\dimexpr\d_overlay_linewidth\relax}
+
%D The next register is used to initialize overlays.
\newtoks\everyoverlay
@@ -452,8 +471,8 @@
\to \everyoverlay
\prependtoks
- \hsize\overlaywidth
- \vsize\overlayheight
+ \hsize\d_overlay_width
+ \vsize\d_overlay_height
\to \everyoverlay
\unexpanded\def\defineoverlay
@@ -475,8 +494,8 @@
\egroup
\setlayoutcomponentattribute{\v!overlay:#1}%
\setbox\scratchbox\hbox \layoutcomponentboxattribute
- {\kern -.5\dimexpr\wd\scratchbox-\d_framed_target_wd\relax % was \overlaywidth
- \raise-.5\dimexpr\ht\scratchbox-\d_framed_target_ht\relax % not \overlayheight !
+ {\kern -.5\dimexpr\wd\scratchbox-\d_framed_target_wd\relax % was \d_overlay_width
+ \raise-.5\dimexpr\ht\scratchbox-\d_framed_target_ht\relax % not \d_overlay_height !
\box\scratchbox}%
\wd\scratchbox\d_framed_target_wd
\ht\scratchbox\d_framed_target_ht
@@ -490,19 +509,21 @@
\unexpanded\def\overlayfakebox
{\hbox
{\setbox\scratchbox\emptyhbox
- \wd\scratchbox\overlaywidth
- \ht\scratchbox\overlayheight
+ \wd\scratchbox\d_overlay_width
+ \ht\scratchbox\d_overlay_height
\box\scratchbox}}
%D For testing we provide:
-\def\doifoverlayelse#1% only tests external overlays
+\def\doifelseoverlay#1% only tests external overlays
{\ifcsname\??overlay#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifoverlayelse\doifelseoverlay
+
%D The content of the box will be (temporary) saved in a box. We also have an
%D extra box for backgrounds.
@@ -599,16 +620,28 @@
\hss
\egroup}}
+% \def\pack_framed_overlay_initialize_indeed
+% {\edef\overlaywidth {\the\d_framed_target_wd\space}%
+% \edef\overlayheight {\the\dimexpr\d_framed_target_ht+\d_framed_target_dp\relax\space}%
+% \edef\overlaydepth {\the\d_framed_target_dp\space}%
+% \edef\overlaycolor {\framedparameter\c!backgroundcolor}% let ?
+% \edef\overlaylinecolor{\framedparameter\c!framecolor}% only needed for layers
+% \edef\overlaylinewidth{\the\d_framed_linewidth\space}%
+% %\edef\overlaycorner {\framedparameter\c!backgroundcorner}%
+% %\edef\overlayradius {\framedparameter\c!backgroundradius}%
+% \edef\overlayoffset {\the\framedbackgroundoffset\space}% \backgroundoffset % we steal this one
+% \let\pack_framed_overlay_initialize\relax}
+
\def\pack_framed_overlay_initialize_indeed
- {\edef\overlaywidth {\the\d_framed_target_wd\space}%
- \edef\overlayheight {\the\dimexpr\d_framed_target_ht+\d_framed_target_dp\relax\space}%
- \edef\overlaydepth {\the\d_framed_target_dp\space}%
+ {\d_overlay_width \d_framed_target_wd
+ \d_overlay_height \dimexpr\d_framed_target_ht+\d_framed_target_dp\relax
+ \d_overlay_depth \d_framed_target_dp
+ \d_overlay_linewidth \d_framed_linewidth
+ \d_overlay_offset \framedbackgroundoffset\relax
\edef\overlaycolor {\framedparameter\c!backgroundcolor}% let ?
\edef\overlaylinecolor{\framedparameter\c!framecolor}% only needed for layers
- \edef\overlaylinewidth{\the\d_framed_linewidth\space}%
%\edef\overlaycorner {\framedparameter\c!backgroundcorner}%
%\edef\overlayradius {\framedparameter\c!backgroundradius}%
- \edef\overlayoffset {\the\framedbackgroundoffset\space}% \backgroundoffset % we steal this one
\let\pack_framed_overlay_initialize\relax}
%D One can explictly insert the foreground box. For that purpose we introduce the
@@ -778,6 +811,35 @@
\newcount\c_pack_framed_nesting
+% to be tested (slightly more efficient):
+%
+% \unexpanded\def\pack_frame_common % #1 #2
+% {\bgroup
+% \advance\c_pack_framed_nesting\plusone
+% \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+% \edef\currentframed{>\the\c_pack_framed_nesting}%
+% \pack_framed_initialize
+% \bgroup
+% \doifnextoptionalcselse} % #1 #2
+%
+% \unexpanded\def\framed {\pack_frame_common\pack_framed_process_framed_pickup\pack_framed_process_indeed}
+% \unexpanded\def\startframed{\pack_frame_common\pack_framed_start_framed_pickup \pack_framed_start_indeed }
+%
+% \def\pack_framed_process_framed_pickup[#1]%
+% {\setupcurrentframed[#1]%
+% \pack_framed_process_indeed}
+%
+% \def\pack_framed_start_framed_pickup[#1]%
+% {\setupcurrentframed[#1]% here !
+% \secondargumenttrue % dirty trick
+% \pack_framed_start_framed_indeed}
+%
+% \def\pack_framed_start_framed_indeed
+% {\pack_framed_process_indeed
+% \bgroup}
+%
+% no longer .. we also accept \startframed[tag]
+
\unexpanded\def\pack_framed_process_framed[#1]%
{\bgroup
\iffirstargument % faster
@@ -793,20 +855,63 @@
\pack_framed_initialize
\dosingleempty\pack_framed_process_framed}
+% \unexpanded\def\startframed
+% {\dosingleempty\pack_framed_start_framed}
+%
+% \def\pack_framed_start_framed[#1]%
+% {\bgroup
+% \advance\c_pack_framed_nesting\plusone
+% \expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
+% \edef\currentframed{>\the\c_pack_framed_nesting}%
+% \pack_framed_initialize
+% \bgroup
+% \iffirstargument
+% \secondargumenttrue % dirty trick
+% \setupcurrentframed[#1]% here !
+% \fi
+% \pack_framed_process_indeed
+% \bgroup
+% \ignorespaces}
+
\unexpanded\def\startframed
{\dosingleempty\pack_framed_start_framed}
\def\pack_framed_start_framed[#1]%
{\bgroup
- \advance\c_pack_framed_nesting\plusone
+ \doifelseassignment{#1}\pack_framed_start_framed_yes\pack_framed_start_framed_nop{#1}}
+
+\def\pack_framed_start_framed_yes#1%
+ {\advance\c_pack_framed_nesting\plusone
\expandafter\let\csname\??framed>\the\c_pack_framed_nesting:\s!parent\endcsname\??framed
\iffirstargument\secondargumenttrue\fi % dirty trick
\edef\currentframed{>\the\c_pack_framed_nesting}%
\pack_framed_initialize
- \pack_framed_process_framed[#1]% can be inlined
- \bgroup}
+ \bgroup
+ \iffirstargument
+ \secondargumenttrue % dirty trick
+ \setupcurrentframed[#1]% here !
+ \fi
+ \pack_framed_process_indeed
+ \bgroup
+ \ignorespaces}
+
+\def\pack_framed_start_framed_nop#1%
+ {\edef\currentframed{#1}%
+ \dosingleempty\pack_framed_start_framed_nop_indeed}
-\let\stopframed\egroup
+\def\pack_framed_start_framed_nop_indeed[#1]%
+ {\pack_framed_initialize
+ \bgroup
+ \setupcurrentframed[#1]% here !
+ \pack_framed_process_indeed
+ \bgroup
+ \ignorespaces}
+
+% till here
+
+\unexpanded\def\stopframed
+ {\removeunwantedspaces
+ \egroup}
\unexpanded\def\normalframedwithsettings[#1]%
{\bgroup
@@ -1304,10 +1409,19 @@
\def\pack_framed_restart
{\aftergroup\pack_framed_finish}
-\def\pack_framed_do_top {\raggedtopcommand\framedparameter\c!top}
-\def\pack_framed_do_bottom{\framedparameter\c!bottom\raggedbottomcommand}
+\def\pack_framed_do_top
+ {\raggedtopcommand
+ \framedparameter\c!top
+ \edef\p_blank{\framedparameter\c!blank}%
+ \ifx\p_blank\v!yes\else % auto or no
+ \doinhibitblank
+ \fi}
-%D Carefull analysis of this macro will learn us that not all branches in the last
+\def\pack_framed_do_bottom
+ {\framedparameter\c!bottom
+ \raggedbottomcommand}
+
+%D Careful analysis of this macro will learn us that not all branches in the last
%D conditionals can be encountered, that is, some assignments to \type{\next} will
%D never occur. Nevertheless we implement the whole scheme, if not for future
%D extensions.
@@ -1384,10 +1498,20 @@
\pack_framed_reshape_reset
\fi}
+\def\pack_framed_profile_box
+ {\profilegivenbox\p_profile\b_framed_normal
+ \setbox\b_framed_normal\vbox{\unvbox\b_framed_normal}}
+
\unexpanded\def\pack_framed_finish
- {\pack_framed_stop_orientation % hm, wrong place ! should rotate the result (after reshape)
+ {%\pack_framed_stop_orientation % hm, wrong place ! should rotate the result (after reshape) .. moved down
\pack_framed_locator_before\p_framed_location
\ifconditional\c_framed_has_format
+ \ifconditional\c_framed_has_height \else
+ \edef\p_profile{\framedparameter\c!profile}%
+ \ifx\p_profile\empty\else
+ \pack_framed_profile_box
+ \fi
+ \fi
\ifx\p_framed_autowidth\v!force
\pack_framed_finish_a
\else\ifx\localwidth\v!fit
@@ -1417,6 +1541,7 @@
\ifx\p_framed_empty\v!yes
\pack_framed_fake_box
\fi
+ \pack_framed_stop_orientation % moved here at 2014-05-25
\iftrialtypesetting \else
\edef\p_framed_region{\framedparameter\c!region}%
\ifx\p_framed_region\v!yes % maybe later named
@@ -1910,10 +2035,10 @@
%D \stoplinecorrection
%D
%D \startbuffer
-%D \framed[strut=nee,offset=.5cm] {rule based learning}
-%D \framed[strut=nee,offset=0cm] {rule based learning}
-%D \framed[strut=nee,offset=none] {rule based learning}
-%D \framed[strut=nee,offset=overlay]{rule based learning}
+%D \framed[strut=no,offset=.5cm] {rule based learning}
+%D \framed[strut=no,offset=0cm] {rule based learning}
+%D \framed[strut=no,offset=none] {rule based learning}
+%D \framed[strut=no,offset=overlay]{rule based learning}
%D \stopbuffer
%D
%D \typebuffer
@@ -1923,9 +2048,9 @@
%D \stoplinecorrection
%D
%D \startbuffer
-%D \framed[width=3cm,align=left] {rule\\based\\learning}
-%D \framed[width=3cm,align=middle] {rule\\based\\learning}
-%D \framed[width=3cm,align=right] {rule\\based\\learning}
+%D \framed[width=3cm,align=left] {rule\\based\\learning}
+%D \framed[width=3cm,align=middle] {rule\\based\\learning}
+%D \framed[width=3cm,align=right] {rule\\based\\learning}
%D \framed[width=fit,align=middle] {rule\\based\\learning}
%D \stopbuffer
%D
@@ -1992,7 +2117,6 @@
% \vbox{\hbox{x}}
% \stopTEXpage
-
% \def\pack_framed_forgetall{\forgetall}
\def\pack_framed_set_foregroundcolor
@@ -2001,14 +2125,15 @@
\def\pack_framed_do_setups
{\ifx\p_framed_setups\empty \else
- \setups[\p_framed_setups]% \texsetup
+ \setups[\p_framed_setups]% \texsetup (or only one!)
+ % \fastsetup\p_framed_setup % singular would have been better
\fi}
\def\pack_framed_format_format_yes
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2019,7 +2144,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -2030,7 +2155,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2041,7 +2166,7 @@
\raggedcenter
\vss
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\vss
@@ -2052,7 +2177,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2064,7 +2189,7 @@
\aftergroup\localendstrut
\aftergroup\vss
\aftergroup\egroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\doformatonelinerbox}
@@ -2072,7 +2197,7 @@
{\vbox
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2082,7 +2207,7 @@
\raggedcommand
\pack_framed_do_top
\bgroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localbegstrut
\aftergroup\localendstrut
\aftergroup\pack_framed_do_bottom
@@ -2093,7 +2218,7 @@
{\vbox to \d_framed_height
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
@@ -2106,7 +2231,7 @@
\hbox
\bgroup
\aftergroup\egroup
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2114,13 +2239,13 @@
{\hbox to \d_framed_width
\bgroup
\let\postprocessframebox\relax
-% \pack_framed_forgetall
+ % \pack_framed_forgetall
\iftrialtypesetting \else
\pack_framed_set_foregroundcolor
\fi
\pack_framed_do_setups
\hss
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\bgroup
\aftergroup\hss
@@ -2135,7 +2260,7 @@
\fi
\let\postprocessframebox\relax
\pack_framed_do_setups
-\synchronizeinlinedirection
+ \synchronizeinlinedirection
\localstrut
\doformatonelinerbox}
@@ -2215,8 +2340,8 @@
\framedmaxwidth \zeropoint
\framedaveragewidth\zeropoint}
-\def\pack_framed_reshape_process{\ifvbox\b_framed_normal\ctxcommand{doreshapeframedbox(\number\b_framed_normal)}\fi}
-\def\pack_framed_reshape_analyze{\ifvbox\b_framed_normal\ctxcommand{doanalyzeframedbox(\number\b_framed_normal)}\fi}
+\def\pack_framed_reshape_process{\ifvbox\b_framed_normal\clf_doreshapeframedbox\b_framed_normal\relax\fi}
+\def\pack_framed_reshape_analyze{\ifvbox\b_framed_normal\clf_doanalyzeframedbox\b_framed_normal\relax\fi}
% torture test / strange case (much depth) / method 2 needed
%
@@ -2514,7 +2639,6 @@
%\c!foregroundstyle=,
%\c!background=,
%\c!backgroundcolor=,
- \c!backgroundscreen=\defaultbackgroundscreen,
\c!linecorrection=\v!on,
\c!depthcorrection=\v!on,
\c!margin=\v!standard]
@@ -2544,7 +2668,7 @@
\dodoubleempty\pack_framed_text_start_indeed}
\def\pack_framed_text_start_indeed[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\pack_framed_text_start_continue\empty{#1}}
{\pack_framed_text_start_continue{#1}{#2}}}
@@ -2559,15 +2683,30 @@
\startboxedcontent
\hsize\localhsize
% \insidefloattrue % ? better
- \normalexpanded{\switchtobodyfont[\framedtextparameter\c!bodyfont]}%
+ \usebodyfontparameter\framedtextparameter
\letframedtextparameter\c!strut\v!no
\inheritedframedtextframed\bgroup
\let\\=\endgraf
\framedtextparameter\c!inner % oud spul
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_start_depth_correction
+ \edef\p_framed_text_depthcorrection{\framedtextparameter\c!depthcorrection}%
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_start_depth_correction
+ \else
+ \bgroup
+ \fi
+ \vskip-\strutdp % brrr why is this needed ... needs to be sorted out, see testcase 1
\doinhibitblank
- \setupindenting[\framedtextparameter\c!indenting]%
- \useframedtextstyleandcolor\c!style\c!color}
+ \useindentingparameter\framedtextparameter
+ \useframedtextstyleandcolor\c!style\c!color
+ \ignorespaces}
+
+% testcase 1:
+%
+% \showstruts
+% \startframedtext[align={normal,tolerant},offset=0pt] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \input tufte \stopframedtext
+% \startframedtext[align={normal,tolerant},offset=0pt,depthcorrection=off] \inframed{x} \stopframedtext
+% \framed[align={normal,tolerant},offset=0pt]{\input tufte }
%D The \type {none} option is handy for nested usage, as in the presentation
%D styles, where we don't want interference.
@@ -2577,7 +2716,11 @@
\unexpanded\def\pack_framed_text_stop % no \baselinecorrection, see faq docs
{\endgraf
\removelastskip
- \doif{\framedtextparameter\c!depthcorrection}\v!on\pack_framed_text_stop_depth_correction
+ \ifx\p_framed_text_depthcorrection\v!on
+ \pack_framed_text_stop_depth_correction
+ \else
+ \egroup
+ \fi
\stopboxedcontent
\ifconditional\c_framed_text_location_none
\egroup
@@ -2656,7 +2799,7 @@
\dosingleempty\pack_framed_text_start_direct}
\def\pack_framed_text_start_direct[#1]%
- {\normalexpanded{\switchtobodyfont[\framedtextparameter\c!bodyfont]}%
+ {\usebodyfontparameter\framedtextparameter
\letframedtextparameter\c!strut\v!no
\iffirstargument
\setupcurrentframedtext[#1]%
diff --git a/tex/context/base/page-app.mkiv b/tex/context/base/page-app.mkiv
index e4858d48f..2e81f7537 100644
--- a/tex/context/base/page-app.mkiv
+++ b/tex/context/base/page-app.mkiv
@@ -139,6 +139,9 @@
[TEXpage]
[\c!align=\v!normal] % needed, else problems !
+\unexpanded\def\setupTEXpage
+ {\setupfittingpage[TEXpage]}
+
%D For Mojca:
%D
%D \starttyping
diff --git a/tex/context/base/page-bck.mkiv b/tex/context/base/page-bck.mkiv
index 0246e8eb6..01de48e8c 100644
--- a/tex/context/base/page-bck.mkiv
+++ b/tex/context/base/page-bck.mkiv
@@ -55,7 +55,7 @@
%D This is the only spot where we hav ea low level dependency on the way
%D parent chains are defined but we want the speed.
-\def\page_backgrounds_check_background
+\unexpanded\def\page_backgrounds_check_background
{\ifcsname\??framed\currentotrbackground:\c!background\endcsname
\edef\page_background_temp{\csname\??framed\currentotrbackground:\c!background\endcsname}%
\ifx\page_background_temp\empty
@@ -140,13 +140,15 @@
{\edef\currentotrbackground{\??layoutbackgrounds#1}%
\page_backgrounds_check_background}
-\def\doifsomebackgroundelse#1%
+\def\doifelsesomebackground#1%
{\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifsomebackgroundelse\doifelsesomebackground
+
\def\doifsomebackground#1%
{\ifcsname\??layoutbackgrounds#1\endcsname
\expandafter\firstofoneargument
@@ -277,7 +279,7 @@
\page_backgrounds_set_boxes
\setbox#1\vbox
{\offinterlineskip
- \doifmarginswapelse{\copy\leftbackground}{\copy\rightbackground}%
+ \doifelsemarginswap{\copy\leftbackground}{\copy\rightbackground}%
\box#1}%
\fi}
@@ -343,7 +345,7 @@
{\dontcomplain
\swapmargins
\ifconditional\swapbackgroundmargins
- \doifmarginswapelse \donothing
+ \doifelsemarginswap \donothing
{\swapmacros\v!rightmargin\v!leftmargin
\swapmacros\v!rightedge \v!leftedge}%
\fi
@@ -501,7 +503,7 @@
\unexpanded\def\page_backgrounds_setup_double[#1][#2][#3]% if needed we can speed this up
{\global\settrue\c_page_backgrounds_some
\def\page_backgrounds_setup_step##1%
- {\doifinsetelse{##1}\v_page_backgrounds_double_set
+ {\doifelseinset{##1}\v_page_backgrounds_double_set
{\page_backgrounds_setup_and_check{##1}{#3}}
{\def\page_backgrounds_setup_step_nested####1{\page_backgrounds_setup_and_check{##1####1}{#3}}%
\processcommacommand[#2]\page_backgrounds_setup_step_nested}}%
@@ -510,7 +512,7 @@
\unexpanded\def\page_backgrounds_setup_single[#1][#2][#3]%
{\global\settrue\c_page_backgrounds_some
- \doifcommonelse{#1}\v_page_backgrounds_single_set
+ \doifelsecommon{#1}\v_page_backgrounds_single_set
{\def\page_backgrounds_setup_step##1{\page_backgrounds_setup_and_check{##1}{#2}}%
\processcommacommand[#1]\page_backgrounds_setup_step
\the\everybackgroundssetup}%
@@ -607,6 +609,48 @@
\setfalse\c_page_backgrounds_some
+%D Sometimes you have a document wide (page) background but need to overload it
+%D locally. In such case (at least in my experience) the only values that get set
+%D are the background and backgroundcolor (if set at all). A full inheritance chain
+%D would complicate things because then we need to use named backgrounds which in
+%D turn will make this mechanism slower. I considered independent local backgrounds
+%D but that also complicates the code (not that much) but isolation means that we
+%D need to set more parameters each time. The following simple approach proabbly
+%D suits most usage.
+%D
+%D \starttyping
+%D \starttext
+%D \setupbackgrounds[page][background=color,backgroundcolor=red]
+%D \input tufte \page
+%D \setupbackgrounds[page][background=,backgroundcolor=]
+%D \input tufte \page
+%D \setupbackgrounds[page][background=color,backgroundcolor=red]
+%D \input tufte \page
+%D \pushbackground[page]
+%D \setupbackgrounds[page][background=color,backgroundcolor=green]
+%D \input tufte \page
+%D \popbackground
+%D \input tufte \page
+%D \stoptext
+%D \stoptyping
+
+\unexpanded\def\pushbackground[#1]%
+ {\pushmacro\popbackground
+ \edef\currentotrbackground{\??layoutbackgrounds#1}%
+ \unexpanded\edef\popbackground
+ {\setupframed
+ [\currentotrbackground]
+ [\c!background=\namedframedparameter{\currentotrbackground}\c!background,
+ \c!backgroundcolor=\namedframedparameter{\currentotrbackground}\c!backgroundcolor]%
+ \page_backgrounds_check_background
+ \popmacro\popbackground}%
+ \setupframed
+ [\currentotrbackground]
+ [\c!background=,\c!backgroundcolor=]%
+ \page_backgrounds_check_background}
+
+\let\popbackground\relax
+
\protect \endinput
% %D The next series is used in local (for instance floating) backgrounds.
diff --git a/tex/context/base/page-brk.mkiv b/tex/context/base/page-brk.mkiv
index cc9a9b4d2..f9c933052 100644
--- a/tex/context/base/page-brk.mkiv
+++ b/tex/context/base/page-brk.mkiv
@@ -112,13 +112,13 @@
\endgroup
\fi}
-\def\resetpagebreak % used elsewhere too
+\unexpanded\def\resetpagebreak % used elsewhere too
{\global\settrue\c_page_breaks_enabled}
-\def\simplifypagebreak % to be used grouped !
+\unexpanded\def\simplifypagebreak % to be used grouped !
{\def\page_breaks_process[##1]{\goodbreak}}
-\def\disablepagebreaks % to be used grouped !
+\unexpanded\def\disablepagebreaks % to be used grouped !
{\def\page_breaks_process[##1]{}}
\installpagebreakmethod \s!dummy
@@ -134,11 +134,11 @@
\endgroup}
\installpagebreakmethod \s!unknown
- {\doifinstringelse{+}\page_breaks_current_option
+ {\doifelseinstring{+}\page_breaks_current_option
{\page_otr_flush_all_floats
\page_otr_command_next_page
\dorecurse\page_breaks_current_option\page_otr_insert_dummy_page}
- {\doifnumberelse\page_breaks_current_option
+ {\doifelsenumber\page_breaks_current_option
{\page_otr_flush_all_floats
\page_otr_command_next_page
\doloop
@@ -178,6 +178,8 @@
\global\pageornamentstate\plusone
\fi}
+% also needed: \page \doifoddpageelse\relax{\page[\v!blank,\v!right]
+
\installpagebreakmethod \v!no
{\ifconditional\c_page_breaks_enabled
\dosomebreak\nobreak
@@ -220,11 +222,11 @@
\installpagebreakmethod \v!even
{\page
- \doifoddpageelse\page_reset_marks_and_insert_dummy\donothing}
+ \doifelseoddpage\page_reset_marks_and_insert_dummy\donothing}
\installpagebreakmethod \v!odd
{\page
- \doifoddpageelse\donothing\page_reset_marks_and_insert_dummy}
+ \doifelseoddpage\donothing\page_reset_marks_and_insert_dummy}
\installpagebreakmethod \v!quadruple % not yet ok inside columnsets
{\ifdoublesided
@@ -316,75 +318,204 @@
%D Test page breaks.
-\newdimen \d_page_tests_test
-\newconstant\c_page_tests_mode
-
-\newconstant\testpagemethod % todo: \testnewpage[method=,lines=,voffset=]
-\newconstant\testpagetrigger
+% \newdimen \d_page_tests_test
+% \newconstant\c_page_tests_mode
-\unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
-\unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
-\unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+\newconstant\testpagemethod % old
+\newconstant\testpagetrigger % old
-\def\page_tests_test[#1][#2]% don't change, only add more methods
+% \unexpanded\def\testpage {\c_page_tests_mode\plusone \dodoubleempty\page_tests_test} %
+% \unexpanded\def\testpageonly{\c_page_tests_mode\plustwo \dodoubleempty\page_tests_test} % no penalties added to the mvl
+% \unexpanded\def\testpagesync{\c_page_tests_mode\plusthree\dodoubleempty\page_tests_test} % force sync
+%
+% \def\page_tests_test[#1][#2]% don't change, only add more methods
+% {\relax % needed before \if
+% \ifconditional\c_page_breaks_enabled
+% % new from here
+% \ifcase\testpagetrigger
+% \endgraf
+% \or\ifvmode
+% \dosomebreak\allowbreak
+% \else % indeed?
+% \vadjust{\allowbreak}%
+% \endgraf
+% \fi\fi
+% % till here
+% \ifdim\pagegoal<\maxdimen \relax
+% \ifdim\pagetotal<\pagegoal \relax
+% \d_page_tests_test\dimexpr
+% #1\lineheight
+% +\pagetotal
+% \ifdim\lastskip<\parskip+\parskip\fi
+% \ifsecondargument+#2\fi
+% \relax
+% \ifcase\testpagemethod
+% \ifdim\d_page_tests_test>.99\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or
+% \getnoflines\pagegoal
+% \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
+% \penalty-\plustenthousand
+% \fi
+% \or % same as 0 but more accurate
+% \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
+% \penalty-\plustenthousand
+% \fi
+% \fi
+% \else\ifnum\c_page_tests_mode=\plusthree
+% \page_tests_flush_so_far
+% \fi\fi
+% \else\ifnum\c_page_tests_mode=\plusone
+% \goodbreak
+% \fi\fi
+% \else
+% \endgraf
+% \fi}
+%
+% \def\page_tests_flush_so_far
+% {\endgraf
+% \ifdim\pagetotal>\pagegoal
+% \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+% \goodbreak
+% \else
+% \page
+% \fi
+% \fi}
+
+\installcorenamespace {pagechecker}
+\installcorenamespace {pagecheckermethod}
+
+\installcommandhandler \??pagechecker {pagechecker} \??pagechecker
+
+\setuppagechecker
+ [\c!method=1,
+ \c!before=,
+ \c!after=,
+ \c!inbetween=,
+ \c!lines=\plusthree,
+ \c!offset=\zeropoint]
+
+\def\page_check_amount
+ {\dimexpr
+ \pagecheckerparameter\c!lines\lineheight
+ +\pagetotal
+ \ifdim\lastskip<\parskip+\parskip\fi
+ +\pagecheckerparameter\c!offset
+ \relax}
+
+\unexpanded\def\checkpage
+ {\dodoubleempty\page_check}
+
+\def\page_check[#1][#2]%
{\relax % needed before \if
+ \endgraf
\ifconditional\c_page_breaks_enabled
- % new from here
- \ifcase\testpagetrigger
- \endgraf
- \or\ifvmode
- \dosomebreak\allowbreak
- \else % indeed?
- \vadjust{\allowbreak}%
- \endgraf
- \fi\fi
- % till here
- \ifdim\pagegoal<\maxdimen \relax
- \ifdim\pagetotal<\pagegoal \relax
- \d_page_tests_test\dimexpr
- #1\lineheight
- +\pagetotal
- \ifdim\lastskip<\parskip+\parskip\fi
- \ifsecondargument+#2\fi
- \relax
- \ifcase\testpagemethod
- \ifdim\d_page_tests_test>.99\pagegoal
- \penalty-\plustenthousand
- \fi
- \or
- \ifdim\dimexpr\d_page_tests_test-\pagegoal\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or
- \getnoflines\pagegoal
- \ifdim\dimexpr\d_page_tests_test-\noflines\lineheight\relax>-\lineheight
- \penalty-\plustenthousand
- \fi
- \or % same as 0 but more accurate
- \ifdim\dimexpr\d_page_tests_test-10\scaledpoint\relax>\pagegoal
- \penalty-\plustenthousand
- \fi
- \fi
- \else\ifnum\c_page_tests_mode=\plusthree
- \page_tests_flush_so_far
- \fi\fi
- \else\ifnum\c_page_tests_mode=\plusone
- \goodbreak
- \fi\fi
+ \begingroup
+ \edef\currentpagechecker{#1}%
+ \ifsecondargument\setupcurrentpagechecker[#2]\fi
+ \csname\??pagecheckermethod\pagecheckerparameter\c!method\endcsname
+ \endgroup
+ \fi}
+
+\setvalue{\??pagecheckermethod 0}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\page_check_amount>.99\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\setvalue{\??pagecheckermethod 1}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-\pagegoal\relax>-\lineheight
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \endgraf
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
\fi}
-\def\page_tests_flush_so_far
- {\endgraf
- \ifdim\pagetotal>\pagegoal
- \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
- \goodbreak
+\setvalue{\??pagecheckermethod 2}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \getnoflines\pagegoal
+ \ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
+ \pagecheckparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
\else
- \page
+ \pagecheckerparameter\c!inbetween
\fi
+ \else
+ \pagecheckerparameter\c!inbetween
\fi}
+\setvalue{\??pagecheckermethod 3}%
+ {\ifdim\pagegoal<\maxdimen \relax
+ \ifdim\pagetotal<\pagegoal \relax
+ \ifdim\dimexpr\page_check_amount-10\scaledpoint\relax>\pagegoal
+ \pagecheckerparameter\c!before
+ \penalty-\plustenthousand
+ \pagecheckerparameter\c!after
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \else
+ \ifdim\pagetotal>\pagegoal
+ \ifdim\dimexpr\pagetotal-\pageshrink\relax>\pagegoal
+ \goodbreak
+ \pagecheckerparameter\c!inbetween
+ \else
+ \pagecheckerparameter\c!before
+ \page
+ \pagecheckerparameter\c!after
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi
+ \fi
+ \else
+ \pagecheckerparameter\c!inbetween
+ \fi}
+
+\definepagechecker[\s!unknown:0] [\c!method=0,\c!before=,\c!after=,\c!inbetween=]
+\definepagechecker[\s!unknown:1][\s!unknown:0][\c!method=1]
+\definepagechecker[\s!unknown:2][\s!unknown:0][\c!method=2]
+\definepagechecker[\s!unknown:3][\s!unknown:0][\c!method=3]
+
+\def\page_tests_test_a[#1][#2]{\normalexpanded{\checkpage[\s!unknown:1][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_b[#1][#2]{\normalexpanded{\checkpage[\s!unknown:2][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+\def\page_tests_test_c[#1][#2]{\normalexpanded{\checkpage[\s!unknown:3][\c!lines=#1,\c!offset=\ifsecondargument#2\else\zeropoint\fi]}}
+
+\unexpanded\def\testpage {\dodoubleempty\page_tests_test_a} %
+\unexpanded\def\testpageonly{\dodoubleempty\page_tests_test_b} % no penalties added to the mvl
+\unexpanded\def\testpagesync{\dodoubleempty\page_tests_test_c} % force sync
+
%D Test column breaks.
\unexpanded\def\testcolumn
diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua
index 11aa2be21..e91285d0a 100644
--- a/tex/context/base/page-flt.lua
+++ b/tex/context/base/page-flt.lua
@@ -21,20 +21,23 @@ local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
-- we use floatbox, floatwidth, floatheight
-- text page leftpage rightpage (todo: top, bottom, margin, order)
-local copy_node_list = node.copy_list
+local copy_node_list = node.copy_list
+local flush_node_list = node.flush_list
+local copy_node = node.copy
-local setdimen = tex.setdimen
-local setcount = tex.setcount
-local texgetbox = tex.getbox
-local texsetbox = tex.setbox
+local setdimen = tex.setdimen
+local setcount = tex.setcount
+local texgetbox = tex.getbox
+local texsetbox = tex.setbox
+local textakebox = nodes.takebox
-floats = floats or { }
-local floats = floats
+floats = floats or { }
+local floats = floats
-local noffloats = 0
-local last = nil
-local default = "text"
-local pushed = { }
+local noffloats = 0
+local last = nil
+local default = "text"
+local pushed = { }
local function initialize()
return {
@@ -105,21 +108,20 @@ end
function floats.save(which,data)
which = which or default
- local b = texgetbox("floatbox")
+ local b = textakebox("floatbox")
if b then
local stack = stacks[which]
noffloats = noffloats + 1
- local w, h, d = b.width, b.height, b.depth
local t = {
n = noffloats,
data = data or { },
- box = copy_node_list(b),
+ box = b,
}
- texsetbox("floatbox",nil)
insert(stack,t)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -132,14 +134,13 @@ function floats.resave(which)
if last then
which = which or default
local stack = stacks[which]
- local b = texgetbox("floatbox")
- local w, h, d = b.width, b.height, b.depth
- last.box = copy_node_list(b)
- texsetbox("floatbox",nil)
+ local b = textakebox("floatbox")
+ last.box = b
insert(stack,1,last)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",
+ which,noffloats,#stack,b.width,b.height,b.depth)
else
interfaces.showmessage("floatblocks",2,noffloats)
end
@@ -153,9 +154,10 @@ function floats.flush(which,n,bylabel)
local stack = stacks[which]
local t, b, n = get(stack,n or 1,bylabel)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b) -- ?
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",
+ which,t.n,n,w,h,d)
else
interfaces.showmessage("floatblocks",3,t.n)
end
@@ -173,9 +175,10 @@ function floats.consult(which,n)
local stack = stacks[which]
local t, b, n = get(stack,n)
if t then
- local w, h, d = setdimensions(b)
if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
+ local w, h, d = setdimensions(b)
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",
+ which,t.n,n,w,h,d)
end
return t, b, n
else
@@ -239,10 +242,14 @@ function floats.checkedpagefloat(packed)
end
end
-function floats.nofstacked()
+function floats.nofstacked(which)
return #stacks[which or default] or 0
end
+function floats.hasstacked(which)
+ return (#stacks[which or default] or 0) > 0
+end
+
-- todo: check for digits !
local method = C((1-S(", :"))^1)
@@ -270,27 +277,101 @@ end
-- interface
-local context = context
-local setvalue = context.setvalue
-
-commands.flushfloat = floats.flush
-commands.savefloat = floats.save
-commands.resavefloat = floats.resave
-commands.pushfloat = floats.push
-commands.popfloat = floats.pop
-commands.consultfloat = floats.consult
-commands.collectfloat = floats.collect
-
-function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end
-function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end
-
-function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end
-function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end
-
-function commands.analysefloatmethod(str) -- currently only one method
- local method, label, row, column = floats.analysemethod(str)
- setvalue("floatmethod",method or "")
- setvalue("floatlabel", label or "")
- setvalue("floatrow", row or "")
- setvalue("floatcolumn",column or "")
-end
+local context = context
+local commands = commands
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
+implement {
+ name = "flushfloat",
+ actions = floats.flush,
+ arguments = { "string", "integer" },
+}
+
+implement {
+ name = "flushlabeledfloat",
+ actions = floats.flush,
+ arguments = { "string", "string", true },
+}
+
+implement {
+ name = "savefloat",
+ actions = floats.save,
+ arguments = "string"
+}
+
+implement {
+ name = "savespecificfloat",
+ actions = floats.save,
+ arguments = {
+ "string",
+ {
+ { "specification" },
+ { "label" },
+ }
+ }
+}
+
+implement {
+ name = "resavefloat",
+ actions = floats.resave,
+ arguments = "string"
+}
+
+implement {
+ name = "pushfloat",
+ actions = floats.push
+}
+
+implement {
+ name = "popfloat",
+ actions = floats.pop
+}
+
+implement {
+ name = "consultfloat",
+ actions = floats.consult,
+ arguments = "string",
+}
+
+implement {
+ name = "collectfloat",
+ actions = floats.collect,
+ arguments = { "string", "dimen", "dimen" }
+}
+
+implement {
+ name = "getfloatvariable",
+ actions = { floats.getvariable, context },
+ arguments = "string"
+}
+
+implement {
+ name = "checkedpagefloat",
+ actions = { floats.checkedpagefloat, context },
+ arguments = "string"
+}
+
+implement {
+ name = "nofstackedfloats",
+ actions = { floats.nofstacked, context },
+ arguments = "string"
+}
+
+implement {
+ name = "doifelsestackedfloats",
+ actions = { floats.hasstacked, commands.doifelse },
+ arguments = "string"
+}
+
+implement {
+ name = "analysefloatmethod",
+ actions = function(str)
+ local method, label, row, column = floats.analysemethod(str)
+ setmacro("floatmethod",method or "")
+ setmacro("floatlabel", label or "")
+ setmacro("floatrow", row or "")
+ setmacro("floatcolumn",column or "")
+ end,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-flt.mkiv b/tex/context/base/page-flt.mkiv
index d641e1c7d..16c427139 100644
--- a/tex/context/base/page-flt.mkiv
+++ b/tex/context/base/page-flt.mkiv
@@ -83,49 +83,49 @@
\to \everyfloatscheck
\unexpanded\def\page_floats_flush#1#2%
- {\ctxcommand{flushfloat("#1",\number#2)}%
+ {\clf_flushfloat{#1}#2\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_flush_by_label#1#2%
- {\ctxcommand{flushfloat("#1","#2",true)}%
+ {\clf_flushlabeledfloat{#1}{#2}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_save#1%
- {\ctxcommand{savefloat("#1")}%
+ {\clf_savefloat{#1}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_resave#1%
- {\ctxcommand{resavefloat("#1")}%
+ {\clf_resavefloat{#1}\relax
\the\everyfloatscheck}
\unexpanded\def\page_floats_push_saved
- {\ctxcommand{pushfloat()}%
+ {\clf_pushfloat
\the\everyfloatscheck}
\unexpanded\def\page_floats_pop_saved
- {\ctxcommand{popfloat()}%
+ {\clf_popfloat
\the\everyfloatscheck}
\unexpanded\def\page_floats_get_info#1%
- {\ctxcommand{consultfloat("#1")}}
+ {\clf_consultfloat{#1}}
\unexpanded\def\page_floats_if_else#1%
- {\ctxcommand{doifelsesavedfloat("#1")}}
+ {\clf_doifelsestackedfloats{#1}}
\unexpanded\def\page_floats_collect#1#2#3%
- {\ctxcommand{collectfloat("#1",\number\dimexpr#2,\number\dimexpr#3)}}
+ {\clf_collectfloat{#1}\dimexpr#2\relax\dimexpr#3\relax}
-\unexpanded\def\nofstackedfloatincategory#1%
- {\ctxcommand{nofstackedfloats("#1")}}
+\def\nofstackedfloatincategory#1%
+ {\clf_nofstackedfloats{#1}}
\let\page_floats_column_push_saved\page_floats_push_saved % overloaded in page-mul
\let\page_floats_column_pop_saved \page_floats_pop_saved % overloaded in page-mul
\unexpanded\def\page_floats_save_page_float#1#2%
- {\ctxcommand{savefloat("#1", { specification = "#2" })}}
+ {\clf_savespecificfloat{#1}{specification{#2}}\relax}
\unexpanded\def\page_floats_save_somewhere_float#1#2% #1=method
- {\ctxcommand{savefloat("#1", { specification = "#2", label = "\floatlabel" })}}
+ {\clf_savespecificfloat{#1}{specification{#2}label{\floatlabel}}\relax}
%D This is an experimental new feature (for Alan Braslau), a prelude to more:
%D
@@ -256,7 +256,7 @@
\def\page_floats_flush_page_floats_indeed#1% future releases can do more clever things
{\page_floats_flush{#1}\plusone
- \edef\floatspecification{\ctxcommand{getfloatvariable("specification")}}% Is this okay?
+ \edef\floatspecification{\clf_getfloatvariable{specification}}% Is this okay?
\the\everybeforeflushedpagefloat
\vbox to \textheight
{\doifnotinset\v!high\floatspecification\vfill
@@ -265,7 +265,7 @@
\page_otr_fill_and_eject_page}
\unexpanded\def\page_floats_flush_page_floats % used in postpone
- {\edef\m_page_otf_checked_page_float{\ctxcommand{checkedpagefloat()}}% (true) for packed
+ {\edef\m_page_otf_checked_page_float{\clf_checkedpagefloat}%
\ifx\m_page_otf_checked_page_float\empty
% nothing
\else\ifx\m_page_otf_checked_page_float\v!empty
diff --git a/tex/context/base/page-flw.mkiv b/tex/context/base/page-flw.mkiv
index ec1fa636d..56fe32e5b 100644
--- a/tex/context/base/page-flw.mkiv
+++ b/tex/context/base/page-flw.mkiv
@@ -80,14 +80,14 @@
\def\textflowcollector#1%
{\csname\??textflowbox#1\endcsname}
-\unexpanded\def\doiftextflowcollectorelse#1%
+\unexpanded\def\doifelsetextflowcollector#1%
{\ifcsname\??textflowbox#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doiftextflowelse#1%
+\unexpanded\def\doifelsetextflow#1%
{\ifcsname\??textflowbox#1\endcsname
\ifvoid\csname\??textflowbox#1\endcsname
\doubleexpandafter\secondoftwoarguments
@@ -98,6 +98,9 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiftextflowcollectorelse\doifelsetextflowcollector
+\let\doiftextflowelse \doifelsetextflow
+
% \unexpanded\def\doiftextflow#1%
% {\doiftextflowelse{#1}\firstofoneargument\gobbleoneargument}
diff --git a/tex/context/base/page-grd.mkiv b/tex/context/base/page-grd.mkiv
index 281d0bfbe..e70414b66 100644
--- a/tex/context/base/page-grd.mkiv
+++ b/tex/context/base/page-grd.mkiv
@@ -15,22 +15,19 @@
\unprotect
-\definepalet
- [layout]
- [grid=red,
- page=green]
-
\newconstant\c_page_grids_location
\newconstant\c_page_grids_line_mode
\newconstant\c_page_grids_lineno_mode
+\newconstant\c_page_grids_columns_mode
\unexpanded\def\showgrid
{\dosingleempty\page_grids_show}
\def\page_grids_show[#1]%
- {\c_page_grids_location \plusone % downward compatible default
- \c_page_grids_line_mode \plusone
- \c_page_grids_lineno_mode\plusone
+ {\c_page_grids_location \plusone % downward compatible default
+ \c_page_grids_line_mode \plusone
+ \c_page_grids_lineno_mode \plusone
+ \c_page_grids_columns_mode\plusone
\processallactionsinset
[#1]%
[ \v!reset=>\c_page_grids_location \zerocount,
@@ -48,14 +45,24 @@
\let\page_grids_add_to_box\gobbleoneargument
\else % 1=bottom 2=top
\let\page_grids_add_to_box\page_grids_add_to_box_indeed
+ \fi
+ \ifcase\c_page_grids_columns_mode
+ \let\page_grids_add_to_one\gobbleoneargument
+ \let\page_grids_add_to_mix\gobbleoneargument
+ \else
+ \let\page_grids_add_to_one\page_grids_add_to_one_indeed
+ \let\page_grids_add_to_mix\page_grids_add_to_mix_indeed
\fi}
% if really needed for speed we can cache the grid
\let\page_grids_add_to_box\gobbleoneargument
+\let\page_grids_add_to_one\gobbleoneargument
+\let\page_grids_add_to_mix\gobbleoneargument
\def\page_grids_add_to_box_indeed#1% to be checked for color and layer ..... use mp
{\startcolor[layout:grid]%
+ \resetvisualizers
\gridboxlinemode \c_page_grids_line_mode
\gridboxlinenomode\c_page_grids_lineno_mode
\setgridbox\scratchbox\makeupwidth\textheight % todo: check color
@@ -82,4 +89,16 @@
\ifcase\c_page_grids_location\or\hskip-\makeupwidth\box#1\fi}%
\stopcolor}
+\def\page_grids_add_to_one_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vbox{\backgroundline[layout:one]{\box#1}}%
+ \endgroup}
+
+\def\page_grids_add_to_mix_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vbox{\backgroundline[layout:mix]{\box#1}}%
+ \endgroup}
+
\protect \endinput
diff --git a/tex/context/base/page-imp.mkiv b/tex/context/base/page-imp.mkiv
index cfa535ab2..198a98229 100644
--- a/tex/context/base/page-imp.mkiv
+++ b/tex/context/base/page-imp.mkiv
@@ -41,7 +41,7 @@
\prependtoks
\page_shipouts_flush_text_data
\to \everylastshipout
-
+
% Problem: we need to apply the finalizers to a to be shipped out page (as
% we can have positioning involved). However, we can also add stuff in the
% imposition, like cropmarks. Fortunately we do that with metapost so
@@ -163,7 +163,7 @@
\donetrue
\fi
\else % testen, aangepast / expanded nodig ?
- \normalexpanded{\doifinsetelse{\the\shippedoutpages}{\pagestoshipout}}\donetrue\donefalse
+ \normalexpanded{\doifelseinset{\the\shippedoutpages}{\pagestoshipout}}\donetrue\donefalse
\fi
\ifdone
\setbox\shipoutscratchbox\hbox{#1}%
@@ -288,7 +288,7 @@
\fi
\setuppapersize
\ifarrangingpages
- \ctxlua{job.disablesave()}%
+ \clf_disablejobsave
%\disabledirective[job.save]%
\fi
\fi}
@@ -355,14 +355,14 @@
{\dosetuparrangement{2}{2}{4}{3}{3}%
\pusharrangedpageSIXTEENTWO\poparrangedpagesAtoD\relax}
-\installpagearrangement 2*2*4 % onother one of Willy Egger
+\installpagearrangement 2*2*4 % another one of Willy Egger
{\dosetuparrangement{2}{1}{8}{3}{2}%
\pusharrangedpageSIXTEENFOUR\poparrangedpagesAtoH\relax}
\installpagearrangement 2TOPSIDE
{\dosetuparrangement{1}{2}{4}{2}{3}%
\pusharrangedpageTWOTOPSIDE\poparrangedpagesTWOTOPSIDE\handlearrangedpageTOP}
-
+
\def\filluparrangedpages % beware: \realpageno is 1 ahead
{\ifarrangingpages
\scratchcounter\numexpr\realpageno-\plusone\relax
@@ -748,7 +748,7 @@
\poparrangedpages
\fi}
-%D Might be used if a printer is printing from a rol or creating mini-books from A4:
+%D Might be used if a printer is printing from a roll or creating mini-books from A4:
%D This section has 16 pages. The folding scheme is first a Z-fold and at the end
%D a final fold in the spine.
%D Coding: [2*8*Z]
@@ -1045,7 +1045,7 @@
%D There should be arrangements for sections made of heavy and thick paper. i.e. the heavier the paper
%D the fewer pages per section:
-%D Section with 8 pages put on to sheets of paper. Each sheet carries recto 2 and verso 2 pages.
+%D Section with 8 pages put on two sheets of paper. Each sheet carries recto 2 and verso 2 pages.
%D Coding: [2*2*2]
\installpagearrangement 2*2*2
@@ -1351,7 +1351,7 @@
\doifelse{#1}\v!page {\let\page_boxes_apply_shift_print\page_boxes_apply_shift}{\let\page_boxes_apply_shift_print\gobbleoneargument}%
\doifelse{#1}\v!paper{\let\page_boxes_apply_shift_paper\page_boxes_apply_shift}{\let\page_boxes_apply_shift_paper\gobbleoneargument}%
\else\ifsecondargument
- \doifinsetelse{#1}{\v!page,\v!paper}
+ \doifelseinset{#1}{\v!page,\v!paper}
{\setuppageshift[#1][#2][#2]}
{\setuppageshift[\v!page][#1][#2]}%
\else\iffirstargument
diff --git a/tex/context/base/page-ini.mkiv b/tex/context/base/page-ini.mkiv
index fdffa552d..15783a99b 100644
--- a/tex/context/base/page-ini.mkiv
+++ b/tex/context/base/page-ini.mkiv
@@ -102,6 +102,7 @@
\page_otr_check_for_pending_inserts
% but does not hurt either (we're still in the otr!)
\inpagebodytrue % needed for enabling \blank ! brrr
+ \pagebodymode\plusone % todo: \plustwo when spread
\page_otr_command_flush_saved_floats
\page_otr_command_set_vsize % this is needed for interacting components, like floats and multicolumns
\strc_pagenumbers_increment_counters % should hook into an every
@@ -143,9 +144,15 @@
\newconstant\pageornamentstate % 0=on 1=one-off 2=always-off
+% \appendtoks
+% \ifcase\pageornamentstate \or
+% \pageornamentstate\zerocount
+% \fi
+% \to \everyaftershipout
+
\appendtoks
\ifcase\pageornamentstate \or
- \pageornamentstate\zerocount
+ \global\pageornamentstate\zerocount
\fi
\to \everyaftershipout
@@ -229,7 +236,7 @@
\the\everyafterpagebody
\egroup}
-\def\doiftopofpageelse
+\def\doifelsetopofpage
{\ifdim\pagegoal=\maxdimen
\expandafter\firstoftwoarguments
\else\ifdim\pagegoal=\vsize
@@ -238,6 +245,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doiftopofpageelse\doifelsetopofpage
+
% %D Idea:
%
% \newinsert\thispageinsert % <- installinsertion
diff --git a/tex/context/base/page-inj.lua b/tex/context/base/page-inj.lua
index 56e5a234e..fd66ead08 100644
--- a/tex/context/base/page-inj.lua
+++ b/tex/context/base/page-inj.lua
@@ -16,10 +16,9 @@ pagebuilders.injections = injections
local report = logs.reporter("pagebuilder","injections")
local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end)
-local variables = interfaces.variables
-
local context = context
-local commands = commands
+local implement = interfaces.implement
+local variables = interfaces.variables
local texsetcount = tex.setcount
@@ -103,6 +102,24 @@ function injections.flushafter() -- maybe not public, just commands.*
end
end
-commands.page_injections_save = injections.save
-commands.page_injections_flush_after = injections.flushafter
-commands.page_injections_flush_before = injections.flushbefore
+implement {
+ name = "savepageinjections",
+ actions = injections.save,
+ arguments = {
+ {
+ { "name" },
+ { "state" },
+ { "userdata" }
+ }
+ }
+}
+
+implement {
+ name = "flushpageinjectionsbefore",
+ actions = injections.flushbefore
+}
+
+implement {
+ name = "flushpageinjectionsafter",
+ actions = injections.flushafter
+}
diff --git a/tex/context/base/page-inj.mkvi b/tex/context/base/page-inj.mkvi
index bee564683..03472fe55 100644
--- a/tex/context/base/page-inj.mkvi
+++ b/tex/context/base/page-inj.mkvi
@@ -29,8 +29,8 @@
\installframedcommandhandler \??pageinjection {pageinjection} \??pageinjection
\installcommandhandler \??pageinjectionalternative {pageinjectionalternative} \??pageinjectionalternative
-\def\page_boxes_flush_before{\ctxcommand{page_injections_flush_before()}}
-\def\page_boxes_flush_after {\ctxcommand{page_injections_flush_after ()}}
+\let\page_boxes_flush_before\clf_flushpageinjectionsbefore
+\let\page_boxes_flush_after \clf_flushpageinjectionsafter
\def\page_injections_flush_saved#name#parameters%
{\begingroup
@@ -47,11 +47,11 @@
\dodoubleempty\page_injections_direct}
\def\page_injections_direct[#1][#2]% name parameters | settings parameters | name | parameters
- {\doifassignmentelse{#1}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#1}
+ {\doifelseassignment{#2}
{\page_injections_direct_settings_parameters{#1}{#2}}
{\page_injections_direct_parameters {#1}}}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\page_injections_direct_name_parameters {#1}{#2}}
{\page_injections_direct_name {#1}}}}
@@ -87,11 +87,11 @@
\fi}
\def\page_injections_direct_indeed_yes#parameters%
- {\ctxcommand{page_injections_save{
- name = "\currentpageinjection",
- state = "\p_page_injections_state",
- userdata = \!!bs\normalunexpanded{#parameters}\!!es
- }}%
+ {\clf_savepageinjections
+ name {\currentpageinjection}%
+ state {\p_page_injections_state}%
+ userdata {\normalunexpanded{#parameters}}%
+ \relax
\endgroup}
\def\page_injections_direct_indeed_nop#parameters%
diff --git a/tex/context/base/page-ins.lua b/tex/context/base/page-ins.lua
index 7f870735d..235f586c6 100644
--- a/tex/context/base/page-ins.lua
+++ b/tex/context/base/page-ins.lua
@@ -4,19 +4,9 @@ if not modules then modules = { } end modules ['page-ins'] = {
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
- -- public = {
- -- functions = {
- -- "inserts.define",
- -- "inserts.getdata",
- -- },
- -- commands = {
- -- "defineinsertion",
- -- "inserttionnumber",
- -- }
- -- }
}
--- Maybe we should only register in lua and forget about the tex end.
+local next = next
structures = structures or { }
structures.inserts = structures.inserts or { }
@@ -36,6 +26,9 @@ local v_firstcolumn = variables.firstcolumn
local v_lastcolumn = variables.lastcolumn
local v_text = variables.text
+local context = context
+local implement = interfaces.implement
+
storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
local data = inserts.data
@@ -49,7 +42,7 @@ end
function inserts.define(name,specification)
specification.name= name
local number = specification.number or 0
- data[name] = specification
+ data[name] = specification
data[number] = specification
-- only needed at runtime as this get stored in a bytecode register
stored[name] = specification
@@ -90,8 +83,37 @@ end
-- interface
-commands.defineinsertion = inserts.define
-commands.setupinsertion = inserts.setup
-commands.setinsertionlocation = inserts.setlocation
-commands.insertionnumber = function(name) context(data[name].number or 0) end
+implement {
+ name = "defineinsertion",
+ actions = inserts.define,
+ arguments = {
+ "string",
+ {
+ { "number", "integer" }
+ }
+ }
+}
+
+implement {
+ name = "setupinsertion",
+ actions = inserts.setup,
+ arguments = {
+ "string",
+ {
+ { "location" }
+ }
+ }
+}
+
+implement {
+ name = "setinsertionlocation",
+ actions = inserts.setlocation,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "insertionnumber",
+ actions = function(name) context(data[name].number or 0) end,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-ins.mkiv b/tex/context/base/page-ins.mkiv
index a63de0b26..c91073a14 100644
--- a/tex/context/base/page-ins.mkiv
+++ b/tex/context/base/page-ins.mkiv
@@ -82,13 +82,15 @@
\insert#1{\unvbox#1}%
\fi}
-\unexpanded\def\doifinsertionelse#1%
+\unexpanded\def\doifelseinsertion#1%
{\ifcsname\??insertionnumber#1\endcsname
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifinsertionelse\doifelseinsertion
+
% \unexpanded\def\startinsertion[#1]%
% {\insert\csname\??insertionnumber#1\endcsname\bgroup}
%
@@ -108,7 +110,12 @@
\else
\expandafter\newinsert\csname\??insertionnumber\currentinsertion\endcsname
\page_inserts_synchronize_registers
- \ctxcommand{defineinsertion("\currentinsertion",{ number = \number\currentinsertionnumber })}%
+ \clf_defineinsertion
+ {\currentinsertion}%
+ {%
+ number \currentinsertionnumber
+ }%
+ \relax
\t_page_inserts_list\expandafter\expandafter\expandafter
{\expandafter\the\expandafter\t_page_inserts_list
\expandafter\page_inserts_process\csname\??insertionnumber\currentinsertion\endcsname}%
@@ -123,13 +130,16 @@
\to \everydefineinsertion
\appendtoks
- \ctxcommand{setupinsertion("\currentinsertion",{
- location = "\insertionparameter\c!location",
- })}%
+ \clf_setupinsertion
+ {\currentinsertion}
+ {%
+ location {\insertionparameter\c!location}%
+ }%
+ \relax
\to \everysetupinsertion
\unexpanded\def\page_inserts_set_location#1#2% fast one
- {\ctxcommand{setinsertionlocation("#1","#2")}}
+ {\clf_setinsertionlocation{#1}{#2}}
%D Auxiliary macros:
diff --git a/tex/context/base/page-lay.mkiv b/tex/context/base/page-lay.mkiv
index 81eb0423c..d1328bb6b 100644
--- a/tex/context/base/page-lay.mkiv
+++ b/tex/context/base/page-lay.mkiv
@@ -30,6 +30,8 @@
\newdimen\paperheight \paperheight = 297mm
\newdimen\paperwidth \paperwidth = 210mm
\newdimen\paperoffset \paperoffset = \zeropoint
+\newdimen\paperbleed \paperbleed = \zeropoint
+\newdimen\spinewidth \spinewidth = \zeropoint
\newdimen\printpaperheight \printpaperheight = \paperheight
\newdimen\printpaperwidth \printpaperwidth = \paperwidth
@@ -154,13 +156,15 @@
\fi
\to \everysetuplayout
-\def\doiflayoutdefinedelse#1%
+\def\doifelselayoutdefined#1%
{\ifcsname\namedlayouthash{#1}\c!state\endcsname % maybe a helper
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doiflayoutdefinedelse\doifelselayoutdefined
+
\def\layoutdistance#1#2{\ifdim\zeropoint<#1#2\else\zeropoint\fi}
\def\page_layouts_set_dimensions
@@ -365,7 +369,7 @@
\ifx\currentlayouttarget\empty
% invalid target
\else
- \doifassignmentelse{#2}
+ \doifelseassignment{#2}
{\definelayouttarget[#1][#2]}
{\setevalue{\??layoutpaper#1}{#2}%
\setevalue{\??layoutprint#1}{#3}}%
@@ -390,9 +394,9 @@
\unexpanded\def\page_paper_setup_size[#1][#2]%
{\iffirstargument
- \doifassignmentelse{#1}
+ \doifelseassignment{#1}
{\page_paper_setup_size_settings[#1]}
- {\doifassignmentelse{#2}
+ {\doifelseassignment{#2}
{\page_paper_setup_size_settings_by_name[#1][#2]}
{\page_paper_setup_size_change_size[#1][#2]}}%
\else
@@ -1026,12 +1030,12 @@
\unexpanded\def\startlayout[#1]%
{\page
- \pushmacro\currentlayout
- \doiflayoutdefinedelse{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
+ \globalpushmacro\currentlayout
+ \doifelselayoutdefined{#1}{\setuplayout[#1]}\donothing} % {\setuplayout[\currentlayout]}}
\unexpanded\def\stoplayout
{\page
- \popmacro\currentlayout
+ \globalpopmacro\currentlayout
\setuplayout[\currentlayout]}
% NOG EENS NAGAAN WANNEER NU GLOBAL EN WANNEER NIET
@@ -1121,13 +1125,15 @@
% #single #left #right
-\def\doifoddpageelse
+\def\doifelseoddpage
{\ifodd\pagenoshift
\expandafter\page_layouts_if_odd_else_yes
\else
\expandafter\page_layouts_if_odd_else_nop
\fi}
+\let\doifoddpageelse\doifelseoddpage
+
\def\page_layouts_if_odd_else_yes
{\ifodd\realpageno
\expandafter\secondoftwoarguments
@@ -1142,9 +1148,7 @@
\expandafter\secondoftwoarguments
\fi}
-\let\doifonevenpaginaelse\doifoddpageelse
-
-\def\page_layouts_if_odd_else_again#1{\doifoddpageelse}
+\def\page_layouts_if_odd_else_again#1{\doifelseoddpage}
\def\doifbothsidesoverruled
{\ifdoublesided
@@ -1171,7 +1175,7 @@
\def\settexthoffset % name will change
{\texthoffset\doifbothsides\backspace\backspace{\dimexpr\paperwidth-\backspace-\makeupwidth\relax}}
-
+
% The next hack is too tricky as we may shipout more pages:
%
% \def\freezepagestatechecks
@@ -1199,11 +1203,13 @@
\def\goleftonpage % name will change (we could cache)
{\hskip-\dimexpr\leftmargindistance+\leftmarginwidth+\leftedgedistance+\leftedgewidth\relax}
-\def\doifmarginswapelse#1#2%
+\def\doifelsemarginswap#1#2%
{\doifbothsides{#1}{#1}{#2}}
+\let\doifmarginswapelse\doifelsemarginswap
+
\def\swapmargins % name will change
- {\doifmarginswapelse\relax\doswapmargins}
+ {\doifelsemarginswap\relax\doswapmargins}
\def\doswapmargins % name will change
{\let\swapmargins \relax % to prevent local swapping
@@ -1221,7 +1227,7 @@
{\ifsinglesided
\expandafter\firstoftwoarguments
\else
- \expandafter\doifoddpageelse
+ \expandafter\doifelseoddpage
\fi}
\def\outermarginwidth {\rightorleftpageaction\rightmarginwidth \leftmarginwidth }
@@ -1275,7 +1281,7 @@
{\globalpopmacro\currentlayout
\globalpopmacro\page_paper_restore
\page_paper_restore
- \setuplayout\relax}
+ \setuplayout[\currentlayout]\relax} % explicit !
%D \macros
%D {showprint, showframe, showlayout, showsetups}
@@ -1377,6 +1383,9 @@
\definepapersize [A9] [\c!width=37mm,\c!height=52mm]
\definepapersize [A10] [\c!width=26mm,\c!height=37mm]
+\definepapersize [A4/2][\c!width=\dimexpr297mm/2\relax,\c!height=210mm] % 148.5mm
+%definepapersize [2A5] [\c!width=296mm,\c!height=210mm] % doublewide
+
\definepapersize [B0] [\c!width=1000mm,\c!height=1414mm]
\definepapersize [B1] [\c!width=707mm,\c!height=1000mm]
\definepapersize [B2] [\c!width=500mm,\c!height=707mm]
@@ -1424,6 +1433,7 @@
\definepapersize [SW] [\c!width=800pt,\c!height=450pt] % wide
\definepapersize [HD] [\c!width=1920pt,\c!height=1080pt]
\definepapersize [HD+] [\c!width=1920pt,\c!height=1200pt]
+\definepapersize [HD-] [\c!width=960pt,\c!height=540pt]
%D These are handy too:
@@ -1491,6 +1501,11 @@
\definepapersize [A3plus] [\c!width=329mm,\c!height=483mm]
+%D For Alan:
+
+\definepapersize [business] [\c!width=85mm,\c!height=55mm]
+\definepapersize [businessUS] [\c!width=3.5in,\c!height=2in]
+
%D We can now default to a reasonable size. We match the print
%D paper size with the typeset paper size. This setting should
%D come after the first layout specification (already done).
@@ -1505,15 +1520,18 @@
% [ \c!width=\paperwidth,
% \c!height=\paperheight]
+\setuppapersize
+ [\c!distance=1.5cm] % offset is already taken
+
\definepapersize
[oversized]
- [ \c!width=\dimexpr\paperwidth +1.5cm\relax,
- \c!height=\dimexpr\paperheight+1.5cm\relax]
+ [ \c!width=\dimexpr\paperwidth +\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr\paperheight+\layouttargetparameter\c!distance\relax]
\definepapersize
[undersized]
- [ \c!width=\dimexpr\paperwidth -1.5cm\relax,
- \c!height=\dimexpr\paperheight-1.5cm\relax]
+ [ \c!width=\dimexpr\paperwidth -\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr\paperheight-\layouttargetparameter\c!distance\relax]
\definepapersize
[doublesized]
@@ -1522,8 +1540,13 @@
\definepapersize
[doubleoversized]
- [ \c!width=\dimexpr \paperheight+1.5cm\relax,
- \c!height=\dimexpr2\paperwidth +1.5cm\relax]
+ [ \c!width=\dimexpr \paperheight+\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr2\paperwidth +\layouttargetparameter\c!distance\relax]
+
+\definepapersize
+ [doublewide]
+ [ \c!width=\dimexpr2\paperwidth \relax,
+ \c!height=\dimexpr \paperheight\relax]
% \setuppapersize
% [A4][A4]
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index 7e8e9ad8a..5a447c458 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -8,37 +8,49 @@ if not modules then modules = { } end modules ['page-lin'] = {
-- experimental -> will become builders
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
-local report_lines = logs.reporter("lines")
+local next, tonumber = next, tonumber
-local attributes, nodes, node, context = attributes, nodes, node, context
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
+local report_lines = logs.reporter("lines")
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
+local attributes = attributes
+local nodes = nodes
+local context = context
-local texgetbox = tex.getbox
+local implement = interfaces.implement
-lines.scratchbox = lines.scratchbox or 0
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
-local leftmarginwidth = nodes.leftmarginwidth
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
-storage.register("lines/data", lines.data, "nodes.lines.data")
+lines.scratchbox = lines.scratchbox or 0
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
+storage.register("lines/data", data, "nodes.lines.data")
local variables = interfaces.variables
+local v_next = variables.next
+local v_page = variables.page
+local v_no = variables.no
+
local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local whatsit_code = nodecodes.whatsit
+local glue_code = nodecodes.glue
+local glyph_code = nodecodes.glyph
+local leftskip_code = skipcodes.leftskip
+local textdir_code = whatcodes.dir
local a_displaymath = attributes.private('displaymath')
local a_linenumber = attributes.private('linenumber')
@@ -49,12 +61,32 @@ local current_list = { }
local cross_references = { }
local chunksize = 250 -- not used in boxed
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local copy_node = node.copy
-local hpack_node = node.hpack
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
+
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getnext = nuts.getnext
+local getattr = nuts.getattr
+local getlist = nuts.getlist
+local getbox = nuts.getbox
+local getfield = nuts.getfield
+
+local setfield = nuts.setfield
+
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
+local copy_node = nuts.copy
+local hpack_node = nuts.hpack
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local is_display_math = nuts.is_display_math
+local leftmarginwidth = nuts.leftmarginwidth
+
+local negated_glue = nuts.pool.negatedglue
+local new_hlist = nuts.pool.hlist
+
+local ctx_convertnumber = context.convertnumber
+local ctx_makelinenumber = context.makelinenumber
-- cross referencing
@@ -67,16 +99,16 @@ end
local function resolve(n,m) -- we can now check the 'line' flag (todo)
while n do
- local id = n.id
+ local id = getid(n)
if id == whatsit_code then -- why whatsit
- local a = n[a_linereference]
+ local a = getattr(n,a_linereference)
if a then
cross_references[a] = m
end
elseif id == hlist_code or id == vlist_code then
- resolve(n.list,m)
+ resolve(getlist(n),m)
end
- n = n.next
+ n = getnext(n)
end
end
@@ -105,7 +137,7 @@ filters.line = filters.line or { }
function filters.line.default(data)
-- helpers.title(data.entries.linenumber or "?",data.metadata)
- context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
+ ctx_convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
end
function filters.line.page(data,prefixspec,pagespec) -- redundant
@@ -133,9 +165,19 @@ function boxed.register(configuration)
return last
end
-function commands.registerlinenumbering(configuration)
- context(boxed.register(configuration))
-end
+implement {
+ name = "registerlinenumbering",
+ actions = { boxed.register, context },
+ arguments = {
+ {
+ { "continue" },
+ { "start", "integer" },
+ { "step", "integer" },
+ { "method" },
+ { "tag" },
+ }
+ }
+}
function boxed.setup(n,configuration)
local d = data[n]
@@ -155,7 +197,20 @@ function boxed.setup(n,configuration)
return n
end
-commands.setuplinenumbering = boxed.setup
+implement {
+ name = "setuplinenumbering",
+ actions = boxed.setup,
+ arguments = {
+ "integer",
+ {
+ { "continue" },
+ { "start", "integer" },
+ { "step", "integer" },
+ { "method" },
+ { "tag" },
+ }
+ }
+}
local function check_number(n,a,skip,sameline)
local d = data[a]
@@ -165,20 +220,20 @@ local function check_number(n,a,skip,sameline)
if sameline then
skipflag = 0
if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
elseif not skip and s % d.step == 0 then
skipflag, d.start = 1, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
else
skipflag, d.start = 0, s + 1 -- (d.step or 1)
if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or v_no)
end
end
- context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
+ ctx_makelinenumber(tag,skipflag,s,getfield(n,"shift"),getfield(n,"width"),leftmarginwidth(getlist(n)),getfield(n,"dir"))
end
end
@@ -189,26 +244,27 @@ end
local function identify(list)
if list then
for n in traverse_id(hlist_code,list) do
- if n[a_linenumber] then
- return list
+ local a = getattr(n,a_linenumber)
+ if a then
+ return list, a
end
end
local n = list
while n do
- local id = n.id
+ local id = getid(n)
if id == hlist_code or id == vlist_code then
- local ok = identify(n.list)
+ local ok, a = identify(getlist(n))
if ok then
- return ok
+ return ok, a
end
end
- n = n.next
+ n = getnext(n)
end
end
end
function boxed.stage_zero(n)
- return identify(texgetbox(n).list)
+ return identify(getlist(getbox(n)))
end
-- reset ranges per page
@@ -217,66 +273,143 @@ end
function boxed.stage_one(n,nested)
current_list = { }
- local box = texgetbox(n)
+ local box = getbox(n)
if box then
- local list = box.list
- if nested then
- list = identify(list)
+ local found = nil
+ local list = getlist(box)
+ if list and nested then
+ list, found = identify(list)
end
- local last_a, last_v, skip = nil, -1, false
- for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if n.height == 0 and n.depth == 0 then
- -- skip funny hlists -- todo: check line subtype
- else
- local list = n.list
- local a = list[a_linenumber]
- if a and a > 0 then
- if last_a ~= a then
- local da = data[a]
- local ma = da.method
- if ma == variables.next then
- skip = true
- elseif ma == variables.page then
- da.start = 1 -- eventually we will have a normal counter
- end
- last_a = a
- if trace_numbers then
- report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
+ if list then
+ local last_a, last_v, skip = nil, -1, false
+ for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
+ if getfield(n,"height") == 0 and getfield(n,"depth") == 0 then
+ -- skip funny hlists -- todo: check line subtype
+ else
+ local list = getlist(n)
+ local a = getattr(list,a_linenumber)
+ if not a or a == 0 then
+ local n = getnext(list)
+ while n do
+ local id = getid(n)
+ if id == whatsit_code and getsubtype(n) == textdir_code then
+ n = getnext(n)
+ elseif id == glue_code and getsubtype(n) == leftskip_code then
+ n = getnext(n)
+ else
+if id == glyph_code then
+ break
+else
+ -- can be hlist or skip (e.g. footnote line)
+ n = getnext(n)
+end
+ end
end
+ a = n and getattr(n,a_linenumber)
end
- if n[a_displaymath] then
- if nodes.is_display_math(n) then
- check_number(n,a,skip)
+ if a and a > 0 then
+ if last_a ~= a then
+ local da = data[a]
+ local ma = da.method
+ if ma == v_next then
+ skip = true
+ elseif ma == v_page then
+ da.start = 1 -- eventually we will have a normal counter
+ end
+ last_a = a
+ if trace_numbers then
+ report_lines("starting line number range %s: start %s, continue %s",a,da.start,da.continue or v_no)
+ end
end
- else
- local v = list[a_verbatimline]
- if not v or v ~= last_v then
- last_v = v
- check_number(n,a,skip)
+ if getattr(n,a_displaymath) then
+ if is_display_math(n) then
+ check_number(n,a,skip)
+ end
else
- check_number(n,a,skip,true)
+ local v = getattr(list,a_verbatimline)
+ if not v or v ~= last_v then
+ last_v = v
+ check_number(n,a,skip)
+ else
+ check_number(n,a,skip,true)
+ end
end
+ skip = false
end
- skip = false
end
end
end
end
end
+-- [dir][leftskip][content]
+
+function boxed.stage_two(n,m)
+ if #current_list > 0 then
+ m = m or lines.scratchbox
+ local t, tn = { }, 0
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
+ tn = tn + 1
+ t[tn] = copy_node(l) -- use take_box instead
+ end
+ for i=1,#current_list do
+ local li = current_list[i]
+ local n, m, ti = li[1], li[2], t[i]
+ if ti then
+ local l = getlist(n)
+ -- we want to keep leftskip at the start
+-- local id = getid(l)
+-- if id == whatsit_code and getsubtype(l) == textdir_code then
+-- l = getnext(l)
+-- id = getid(l)
+-- end
+-- if getid(l) == glue_code and getsubtype(l) == leftskip_code then
+-- -- [leftskip] [number] [rest]
+-- local forward = copy_node(l)
+-- local backward = negated_glue(l)
+-- local next = getnext(l)
+-- setfield(l,"next",backward)
+-- setfield(backward,"prev",l)
+-- setfield(backward,"next",ti)
+-- setfield(ti,"prev",backward)
+-- setfield(ti,"next",forward)
+-- setfield(forward,"prev",ti)
+-- setfield(forward,"next",next)
+-- setfield(next,"prev",forward)
+-- else
+ -- [number] [rest]
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ setfield(n,"list",ti)
+-- end
+ resolve(n,m)
+ else
+ report_lines("error in linenumbering (1)")
+ return
+ end
+ end
+ end
+end
+
function boxed.stage_two(n,m)
if #current_list > 0 then
m = m or lines.scratchbox
local t, tn = { }, 0
- for l in traverse_id(hlist_code,texgetbox(m).list) do
+ for l in traverse_id(hlist_code,getlist(getbox(m))) do
tn = tn + 1
- t[tn] = copy_node(l)
+ t[tn] = copy_node(l) -- use take_box instead
end
for i=1,#current_list do
local li = current_list[i]
local n, m, ti = li[1], li[2], t[i]
if ti then
- ti.next, n.list = n.list, ti
+ local l = getlist(n)
+ setfield(ti,"next",l)
+ setfield(l,"prev",ti)
+ local h = copy_node(n)
+ setfield(h,"dir","TLT")
+ setfield(h,"list",ti)
+ setfield(n,"list",h)
resolve(n,m)
else
report_lines("error in linenumbering (1)")
@@ -286,5 +419,14 @@ function boxed.stage_two(n,m)
end
end
-commands.linenumbersstageone = boxed.stage_one
-commands.linenumbersstagetwo = boxed.stage_two
+implement {
+ name = "linenumbersstageone",
+ actions = boxed.stage_one,
+ arguments = { "integer", "boolean" }
+}
+
+implement {
+ name = "linenumbersstagetwo",
+ actions = boxed.stage_two,
+ arguments = { "integer", "integer" }
+}
diff --git a/tex/context/base/page-lin.mkiv b/tex/context/base/page-lin.mkiv
deleted file mode 100644
index ae293091c..000000000
--- a/tex/context/base/page-lin.mkiv
+++ /dev/null
@@ -1,573 +0,0 @@
-%D \module
-%D [ file=page-lin,
-%D version=2007.11.29,
-%D title=\CONTEXT\ Core Macros,
-%D subtitle=Line Numbering,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-% generic or not ... maybe not bother too much and simplify to mkiv only
-% get rid of \mk* (left over from experimental times)
-%
-% to be redone (was experiment) .. can be hooked into margin code
-
-\writestatus{loading}{ConTeXt Core Macros / Line Numbering}
-
-\unprotect
-
-% todo: save settings
-%
-% low level interface
-%
-% we should use normal counters but then we need to sync settings
-
-% some line
-%
-% \startlocallinenumbering
-% some source code 1\par
-% some source code 2\par
-% some source code 3\par
-% \stoplocallinenumbering
-%
-% some line
-
-\registerctxluafile{page-lin}{1.001}
-
-\definesystemattribute[linenumber] [public]
-\definesystemattribute[linereference][public]
-
-\appendtoksonce
- \attribute\linenumberattribute\attributeunsetvalue
-\to \everyforgetall
-
-\newcount \linenumber % not used
-\newbox \b_page_lines_scratch
-\newcount \c_page_lines_reference
-\newconstant\c_page_lines_nesting
-
-\newconditional\tracelinenumbering % we keep this for old times sake
-
-\installtextracker
- {lines.numbers.show}
- {\settrue \tracelinenumbering}
- {\setfalse\tracelinenumbering}
-
-% id nr shift width leftskip dir
-
-\installcorenamespace{linenumberinginstance}
-
-\let\makelinenumber\gobblesevenarguments % used at lua end
-
-\newconditional\page_postprocessors_needed_box
-
-\unexpanded\def\page_postprocessors_linenumbers_page #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
-\unexpanded\def\page_postprocessors_linenumbers_box #1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \zerocount}
-\unexpanded\def\page_postprocessors_linenumbers_deepbox#1{\page_lines_add_numbers_to_box{#1}\plusone \plusone \plusone }
-\unexpanded\def\page_postprocessors_linenumbers_column #1{\page_lines_add_numbers_to_box{#1}\currentcolumn\nofcolumns\zerocount}
-
-\def\page_lines_parameters_regular
- {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi",
- start = \number\linenumberingparameter\c!start,
- step = \number\linenumberingparameter\c!step,
- method = "\linenumberingparameter\c!method",
- tag = "\currentlinenumbering"}
-
-\def\page_lines_parameters_update
- {continue = "\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi"}
-
-\def\page_lines_start_define
- {\setxvalue{\??linenumberinginstance\currentlinenumbering}{\ctxcommand{registerlinenumbering({\page_lines_parameters_regular})}}}
-
-\def\page_lines_start_update
- {\ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_update})}}
-
-\def\page_lines_setup
- {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
- \ctxcommand{setuplinenumbering(\csname\??linenumberinginstance\currentlinenumbering\endcsname,{\page_lines_parameters_regular})}%
- \fi}
-
-% we could make this a bit more efficient by putting the end reference
-% in the same table as the start one but why make things complex ...
-
-\let\dofinishlinereference\dofinishfullreference % at lua end
-
-\unexpanded\def\page_lines_some_reference#1#2#3%
- {\dontleavehmode\begingroup
- \global\advance\c_page_lines_reference\plusone
- \attribute\linereferenceattribute\c_page_lines_reference
- #3%
- % for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
- \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
- \endgroup}
-
-% \def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}} % reimplemented later
-% \def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{}} % reimplemented later
-
-% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
-% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
-
-\newif\ifnumberinglines % will change
-\newif\iftypesettinglines % will change
-
-\installcorenamespace{linenumbering}
-
-\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
-
-\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
-\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
-\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
-
-\newdimen\d_page_lines_width
-\newdimen\d_page_lines_distance
-
-\newevery \beforeeverylinenumbering \relax
-\newevery \aftereverylinenumbering \relax
-\newevery \everylinenumber \relax
-
-\appendtoks
- \page_lines_setup
-\to \everysetuplinenumbering
-
-\appendtoks
- \page_lines_start_define
-\to \everydefinelinenumbering
-
-\setuplinenumbering
- [\c!conversion=\v!numbers,
- \c!start=1,
- \c!step=1,
- \c!method=\v!first,
- \c!continue=\v!no,
- \c!location=\v!left,
- \c!style=,
- \c!color=,
- \c!width=2\emwidth,
- \c!left=,
- \c!right=,
- \c!command=,
- \c!distance=\zeropoint,
- \c!align=\v!auto]
-
-\definelinenumbering
- []
-
-% no intermediate changes in values, define a class, otherwise each range
-% would need a number
-
-% todo: text
-
-\installcorenamespace{linenumberinglocation}
-\installcorenamespace{linenumberingalternative}
-
-\expandafter\let\csname\??linenumberinglocation\v!middle \endcsname \zerocount
-\expandafter\let\csname\??linenumberinglocation\v!left \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!margin \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!inmargin \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!inleft \endcsname \plusone
-\expandafter\let\csname\??linenumberinglocation\v!right \endcsname \plustwo
-\expandafter\let\csname\??linenumberinglocation\v!inright \endcsname \plustwo
-\expandafter\let\csname\??linenumberinglocation\v!inner \endcsname \plusthree
-\expandafter\let\csname\??linenumberinglocation\v!outer \endcsname \plusfour
-\expandafter\let\csname\??linenumberinglocation\v!text \endcsname \plusfive
-\expandafter\let\csname\??linenumberinglocation\v!begin \endcsname \plussix
-\expandafter\let\csname\??linenumberinglocation\v!end \endcsname \plusseven
-
-\expandafter\let\csname\??linenumberingalternative\v!middle \endcsname \zerocount
-\expandafter\let\csname\??linenumberingalternative\v!right \endcsname \plusone
-\expandafter\let\csname\??linenumberingalternative\v!flushleft \endcsname \plusone
-\expandafter\let\csname\??linenumberingalternative\v!left \endcsname \plustwo
-\expandafter\let\csname\??linenumberingalternative\v!flushright\endcsname \plustwo
-\expandafter\let\csname\??linenumberingalternative\v!auto \endcsname \plusfive
-
-% \startlinenumbering[|continue|settings|name]
-% \startlinenumbering[name][|continue|settings]
-
-\unexpanded\def\startlinenumbering
- {\dodoubleempty\page_lines_start}
-
-\def\page_lines_start % we stay downward compatible
- {\begingroup
- \ifsecondargument
- \expandafter\page_lines_start_two
- \else\iffirstargument
- \doubleexpandafter\page_lines_start_one
- \else
- \doubleexpandafter\page_lines_start_zero
- \fi\fi}
-
-\def\page_lines_start_zero[#1][#2]%
- {\edef\m_argument{\linenumberingparameter\c!continue}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \else
- \c_page_lines_mode\plusone
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_one[#1][#2]% [continue||settings] % historic
- {\edef\m_argument{#1}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \let\currentlinenumbering\empty
- \else
- \c_page_lines_mode\plusone
- \ifx\m_argument\v!empty
- \let\currentlinenumbering\empty
- \else
- \doifassignmentelse{#1}
- {\let\currentlinenumbering\empty
- \setupcurrentlinenumbering[#1]}
- {\doifnumberelse\m_argument
- {\let\currentlinenumbering\empty
- \letlinenumberingparameter\c!start\m_argument}
- {\let\currentlinenumbering\m_argument}}%
- \fi
- \edef\p_continue{\linenumberingparameter\c!continue}%
- \ifx\p_continue\v!yes
- \c_page_lines_mode\zerocount
- \fi
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_two[#1][#2]% [tag][continue||settings]
- {\edef\currentlinenumbering{#1}%
- \edef\m_argument{#2}%
- \ifx\m_argument\v!continue
- \c_page_lines_mode\zerocount
- \else
- \c_page_lines_mode\plusone
- \ifx\m_argument\v!empty \else
- \doifassignmentelse{#2}
- {\setupcurrentlinenumbering[#2]}
- {\doifnumber\m_argument
- {\letlinenumberingparameter\c!start\m_argument}}%
- \fi
- \edef\p_continue{\linenumberingparameter\c!continue}%
- \ifx\p_continue\v!yes
- \c_page_lines_mode\zerocount
- \fi
- \fi
- \page_lines_start_followup}
-
-\def\page_lines_start_followup
- {\numberinglinestrue
- \the\beforeeverylinenumbering
- \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
- \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
- \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
- \ifcase\c_page_lines_mode\relax
- \page_lines_start_update % continue
- \or
- \page_lines_start_define % only when assignment
- \fi
- \attribute\linenumberattribute\getvalue{\??linenumberinginstance\currentlinenumbering}\relax}
-
-\unexpanded\def\stoplinenumbering
- {\attribute\linenumberattribute\attributeunsetvalue
- \the\aftereverylinenumbering
- \endgroup}
-
-% number placement .. will change into (the new) margin code
-
-\def\page_lines_number_inner_indeed{\doifoddpageelse\page_lines_number_left_indeed\page_lines_number_right_indeed}
-\def\page_lines_number_outer_indeed{\doifoddpageelse\page_lines_number_right_indeed\page_lines_number_left_indeed}
-
-\def\page_lines_number_left
- {\ifcase\c_page_lines_location
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_left_indeed
- \or
- \expandafter\page_lines_number_inner_indeed
- \or
- \expandafter\page_lines_number_outer_indeed
- \or
- \expandafter\page_lines_number_text_indeed
- \or
- \expandafter\page_lines_number_begin_indeed
- \or
- \expandafter\page_lines_number_end_indeed
- \fi}
-
-\def\page_lines_number_right
- {\ifcase\c_page_lines_location
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_right_indeed
- \or
- \expandafter\page_lines_number_outer_indeed
- \or
- \expandafter\page_lines_number_inner_indeed
- \or
- \expandafter\page_lines_number_text_indeed
- \or
- \expandafter\page_lines_number_end_indeed
- \or
- \expandafter\page_lines_number_begin_indeed
- \fi}
-
-\newconditional\c_page_lines_fake_number
-\newconstant \b_page_lines_number
-\newconstant \c_page_lines_column
-\newconstant \c_page_lines_last_column
-
-\def\page_lines_add_numbers_to_box#1#2#3#4% box col max nesting
- {\bgroup
- \b_page_lines_number #1\relax
- \c_page_lines_column #2\relax
- \c_page_lines_last_column#3\relax
- \c_page_lines_nesting #4\relax
- \fullrestoreglobalbodyfont
- \let\makelinenumber\page_lines_make_number % used at lua end
- \setbox\b_page_lines_scratch\vbox
- {\forgetall
- \offinterlineskip
- \ctxcommand{linenumbersstageone(\number\b_page_lines_number,\ifcase\c_page_lines_nesting false\else true\fi)}}%
- \ctxcommand{linenumbersstagetwo(\number\b_page_lines_number,\number\b_page_lines_scratch)}% can move to lua code
- \egroup}
-
-\let\page_lines_make_number_indeed\relax
-
-\def\page_lines_make_number#1#2%
- {\edef\currentlinenumbering{#1}%
- \ifcase#2\relax
- \settrue \c_page_lines_fake_number
- \else
- \setfalse\c_page_lines_fake_number
- \fi
- \c_page_lines_location \executeifdefined{\??linenumberinglocation \linenumberingparameter\c!location}\plusone \relax % left
- \c_page_lines_alignment\executeifdefined{\??linenumberingalternative\linenumberingparameter\c!align }\plusfive\relax % auto
- \ifcase\c_page_lines_last_column\relax
- \settrue \c_page_lines_fake_number
- \or
- % one column
- \ifcase\c_page_lines_location
- \settrue \c_page_lines_fake_number
- \let\page_lines_make_number_indeed\page_lines_number_fake_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_left
- \or
- \let\page_lines_make_number_indeed\page_lines_number_right
- \or % inner
- \let\page_lines_make_number_indeed\page_lines_number_inner_indeed
- \or % outer
- \let\page_lines_make_number_indeed\page_lines_number_outer_indeed
- \or % text
- \let\page_lines_make_number_indeed\page_lines_number_text_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_begin_indeed
- \or
- \let\page_lines_make_number_indeed\page_lines_number_end_indeed
- \fi
- \else\ifcase\c_page_lines_column\relax
- \settrue \c_page_lines_fake_number
- \or
- \let\page_lines_make_number_indeed\page_lines_number_left
- \ifcase\c_page_lines_location\or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plustwo
- \else
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plusone % todo
- \or
- \c_page_lines_location\plusone % todo
- \fi
- \else
- \let\page_lines_make_number_indeed\page_lines_number_right
- \ifcase\c_page_lines_location\or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plusone
- \or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plustwo
- \or
- \c_page_lines_location\plustwo % todo
- \or
- \c_page_lines_location\plustwo % todo
- \fi
- \fi\fi
- \page_lines_make_number_indeed{#1}}
-
-\let\page_lines_number_fake_indeed\gobblesixarguments % needs checking
-
-\def\page_lines_number_text_indeed#1#2#3#4#5#6% beware, one needs so compensate for this in the \hsize
- {\hbox{\page_lines_number_construct{#1}{2}{#2}{#5}\hskip#3\scaledpoint}}
-
-\def\page_lines_number_left_indeed#1#2#3#4#5#6%
- {\naturalhbox to \zeropoint
- {\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \llap{\page_lines_number_construct{#1}{2}{#2}{#5}\kern#3\scaledpoint}}}
-
-\def\page_lines_number_right_indeed#1#2#3#4#5#6%
- {\naturalhbox to \zeropoint
- {\ifcase\istltdir#6\else \hskip-#4\scaledpoint \fi
- \rlap{\hskip\dimexpr#4\scaledpoint+#3\scaledpoint\relax\page_lines_number_construct{#1}{1}{#2}{#5}}}}
-
-\def\page_lines_number_begin_indeed#1#2#3#4#5#6%
- {\ifcase\istltdir#6\relax
- \c_page_lines_location\plusone
- \expandafter\page_lines_number_left_indeed
- \else
- \c_page_lines_location\plustwo
- \expandafter\page_lines_number_left_indeed
- \fi{#1}{#2}{#3}{#4}{#5}{#6}}
-
-\def\page_lines_number_end_indeed#1#2#3#4#5#6%
- {\ifcase\istltdir#6\relax
- \c_page_lines_location\plustwo
- \expandafter\page_lines_number_left_indeed
- \else
- \c_page_lines_location\plusone
- \expandafter\page_lines_number_left_indeed
- \fi{#1}{#2}{#3}{#4}{#5}{#6}}
-
-\def\page_lines_number_construct#1#2#3#4% tag 1=left|2=right linenumber leftskip
- {\begingroup
- \def\currentlinenumbering{#1}%
- \def\linenumber{#3}% unsafe
- \doifelse{\linenumberingparameter\c!width}\v!margin
- {\d_page_lines_width\leftmarginwidth}
- {\d_page_lines_width\linenumberingparameter\c!width}%
- \d_page_lines_distance\linenumberingparameter\c!distance\relax
- \ifcase#2\relax\or\hskip\d_page_lines_distance\fi\relax
- \ifnum\c_page_lines_location=\plusfive
- \scratchdimen\dimexpr#4\scaledpoint-\d_page_lines_distance\relax
- \c_page_lines_location\plusone
- \else
- \scratchdimen\zeropoint
- \fi
- \ifcase\c_page_lines_alignment
- \c_page_lines_location\zerocount % middle
- \or
- \c_page_lines_location\plusone % left
- \or
- \c_page_lines_location\plustwo % right
- \fi
- \ifconditional\tracelinenumbering\ruledhbox\else\hbox\fi to \d_page_lines_width
- {\ifcase\c_page_lines_location
- \hss % middle
- \or
- % left
- \or
- \hss % right
- \or
- \doifoddpageelse\relax\hss % inner
- \or
- \doifoddpageelse\hss\relax % outer
- \fi
- \ifconditional\c_page_lines_fake_number
- % we need to reserve space
- \else
- \uselinenumberingstyleandcolor\c!style\c!color
- \linenumberingparameter\c!command
- {\linenumberingparameter\c!left
- \convertnumber{\linenumberingparameter\c!conversion}{#3}%
- \linenumberingparameter\c!right}%
- \fi
- \ifcase\c_page_lines_location
- \hss % middle
- \or
- \hss % left
- \or
- % right
- \or
- \doifoddpageelse\hss\relax % inner
- \or
- \doifoddpageelse\relax\hss % outer
- \fi}%
- \ifcase#2\relax
- \hskip-\scratchdimen
- \or
- \hskip-\scratchdimen
- \or
- \hskip\dimexpr\d_page_lines_distance-\scratchdimen\relax
- \fi
- \relax
- \the\everylinenumber
- \endgroup}
-
-% referencing: \permithyphenation, also removes leading spaces (new per 29-11-2013)
-
-\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
-\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}\ignorespaces}
-\unexpanded\def\stopline [#1]{\removeunwantedspaces\permithyphenation\page_lines_reference_stop{#1}}
-
-\def\page_lines_reference_show_start
- {\ifconditional\tracelinenumbering
- \expandafter\page_lines_reference_show_start_indeed
- \else
- \expandafter\gobbleoneargument
- \fi}
-
-\def\page_lines_reference_show_stop
- {\ifconditional\tracelinenumbering
- \expandafter\page_lines_reference_show_stop_indeed
- \else
- \expandafter\gobbleoneargument
- \fi}
-
-
-\def\page_lines_reference_show_start_indeed#1%
- {\setbox\scratchbox\hbox{\llap
- {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
- \smashbox\scratchbox
- \box\scratchbox}
-
-\def\page_lines_reference_show_stop_indeed#1%
- {\setbox\scratchbox\hbox{\rlap
- {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
- \smashbox\scratchbox
- \box\scratchbox}
-
-\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}}
-\def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop {#1}}}
-
-% eventually we will do this in lua
-
-\def\currentreferencelinenumber{\ctxcommand{filterreference("linenumber")}}
-
-\let\m_page_lines_from\empty
-\let\m_page_lines_to \empty
-
-\unexpanded\def\doifelsesamelinereference#1#2#3%
- {\doifreferencefoundelse{lr:b:#1}
- {\edef\m_page_lines_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:#1}
- {\edef\m_page_lines_to{\currentreferencelinenumber}%
- %[\m_page_lines_from,\m_page_lines_to]
- \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
- {#2}}
- {#2}}
-
-\unexpanded\def\inline#1[#2]%
- {\doifelsenothing{#1}
- {\doifelsesamelinereference{#2}
- {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
- {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
- {\doifelsesamelinereference{#2}
- {\in{#1}[lr:b:#2]}
- {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
-
-\unexpanded\def\inlinerange[#1]%
- {\doifelsesamelinereference{#1}
- {\in[lr:b:#1]}
- {\in[lr:b:#1]\endash\in[lr:e:#1]}}
-
-\protect \endinput
diff --git a/tex/context/base/page-lin.mkvi b/tex/context/base/page-lin.mkvi
new file mode 100644
index 000000000..dd13a98c3
--- /dev/null
+++ b/tex/context/base/page-lin.mkvi
@@ -0,0 +1,590 @@
+%D \module
+%D [ file=page-lin,
+%D version=2007.11.29,
+%D title=\CONTEXT\ Core Macros,
+%D subtitle=Line Numbering,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% generic or not ... maybe not bother too much and simplify to mkiv only
+% get rid of \mk* (left over from experimental times)
+%
+% to be redone (was experiment) .. can be hooked into margin code
+% reshuffle arguments
+
+\writestatus{loading}{ConTeXt Core Macros / Line Numbering}
+
+\unprotect
+
+% todo: save settings
+%
+% low level interface
+%
+% we should use normal counters but then we need to sync settings
+
+% some line
+%
+% \startlocallinenumbering
+% some source code 1\par
+% some source code 2\par
+% some source code 3\par
+% \stoplocallinenumbering
+%
+% some line
+
+\registerctxluafile{page-lin}{1.001}
+
+\definesystemattribute[linenumber] [public]
+\definesystemattribute[linereference][public]
+
+\appendtoksonce
+ \attribute\linenumberattribute\attributeunsetvalue
+\to \everyforgetall
+
+\newcount \linenumber % not used
+\newbox \b_page_lines_scratch
+\newcount \c_page_lines_reference
+\newconstant\c_page_lines_nesting
+
+\newconditional\tracelinenumbering % we keep this for old times sake
+
+\installtextracker
+ {lines.numbers.show}
+ {\settrue \tracelinenumbering}
+ {\setfalse\tracelinenumbering}
+
+% id nr shift width leftskip dir
+
+\installcorenamespace{linenumberinginstance}
+
+% tag skipflag s getfield(n,"shift") getfield(n,"width") leftmarginwidth(getlist(n)) getfield(n,"dir"))
+
+\let\makelinenumber\gobblesevenarguments % used at lua end
+
+\newconditional\page_postprocessors_needed_box
+
+\unexpanded\def\page_postprocessors_linenumbers_page #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_box #tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \zerocount}
+\unexpanded\def\page_postprocessors_linenumbers_deepbox#tag{\page_lines_add_numbers_to_box{#tag}\plusone \plusone \plusone }
+\unexpanded\def\page_postprocessors_linenumbers_column #tag{\page_lines_add_numbers_to_box{#tag}\currentcolumn\nofcolumns\zerocount}
+
+\def\page_lines_start_define
+ {\setxvalue{\??linenumberinginstance\currentlinenumbering}%
+ {\clf_registerlinenumbering
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ start \linenumberingparameter\c!start
+ step \linenumberingparameter\c!step
+ method {\linenumberingparameter\c!method}%
+ tag {\currentlinenumbering}%
+ }}
+
+\def\page_lines_start_update
+ {\clf_setuplinenumbering
+ \csname\??linenumberinginstance\currentlinenumbering\endcsname
+ {%
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ }%
+ \relax}
+
+\def\page_lines_setup
+ {\ifcsname \??linenumberinginstance\currentlinenumbering\endcsname
+ \clf_setuplinenumbering
+ \csname\??linenumberinginstance\currentlinenumbering\endcsname
+ {%
+ continue {\ifnum\c_page_lines_mode=\zerocount\v!yes\else\v!no\fi}%
+ start \linenumberingparameter\c!start
+ step \linenumberingparameter\c!step
+ method {\linenumberingparameter\c!method}%
+ tag {\currentlinenumbering}%
+ }%
+ \relax
+ \fi}
+
+% we could make this a bit more efficient by putting the end reference
+% in the same table as the start one but why make things complex ...
+
+\unexpanded\def\page_lines_some_reference#1#2#3%
+ {\dontleavehmode\begingroup
+ \global\advance\c_page_lines_reference\plusone
+ \attribute\linereferenceattribute\c_page_lines_reference
+ #3% todo: #3{#1} as there is no need to pass #1 as part of #3
+ % for the moment we use a simple system i.e. no prefixes etc .. todo: store as number
+ \c_strc_references_bind_state\zerocount % we don't want the prewordbreak and manage it here
+ \normalexpanded{\strc_references_set_named_reference{line}{#2}{conversion=\linenumberingparameter\c!conversion}{\the\c_page_lines_reference}}% kind labels userdata text
+ \endgroup}
+
+% \def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{}} % reimplemented later
+% \def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{}} % reimplemented later
+
+% \def\mklinestartreference#1[#2]{\in{#1}[lr:b:#2]} % not interfaced/ not used
+% \def\mklinestopreference #1[#2]{\in{#1}[lr:e:#2]} % not interfaced/ not used
+
+\newif\ifnumberinglines % will change
+\newif\iftypesettinglines % will change
+
+\installcorenamespace{linenumbering}
+
+\installcommandhandler \??linenumbering {linenumbering} \??linenumbering
+
+\setnewconstant\c_page_lines_mode \plusone % 0=continue, 1=restart
+\setnewconstant\c_page_lines_location \plusone % 0=middle, 1=left, 2=right, 3=inner, 4=outer, 5=text, 6=begin, 7=end
+\setnewconstant\c_page_lines_alignment\plusfive % 0=middle, 1=left, 2=right, 5=auto
+
+\newdimen\d_page_lines_width
+\newdimen\d_page_lines_distance
+
+\newevery \beforeeverylinenumbering \relax
+\newevery \aftereverylinenumbering \relax
+\newevery \everylinenumber \relax
+
+\appendtoks
+ \page_lines_setup
+\to \everysetuplinenumbering
+
+\appendtoks
+ \page_lines_start_define
+\to \everydefinelinenumbering
+
+\setuplinenumbering
+ [\c!conversion=\v!numbers,
+ \c!start=1,
+ \c!step=1,
+ \c!method=\v!first,
+ \c!continue=\v!no,
+ \c!style=,
+ \c!color=,
+ \c!width=2\emwidth,
+ \c!left=,
+ \c!right=,
+ \c!command=,
+ \c!margin=2.5\emwidth,
+ \c!distance=\zeropoint,
+ \c!location=\v!default, % depends on direction, columns etc
+ \c!align=\v!auto]
+
+\definelinenumbering
+ []
+
+% \startlinenumbering[|continue|settings|name]
+% \startlinenumbering[name][|continue|settings]
+
+\unexpanded\def\startlinenumbering
+ {\dodoubleempty\page_lines_start}
+
+\def\page_lines_start % we stay downward compatible
+ {\begingroup
+ \ifsecondargument
+ \expandafter\page_lines_start_two
+ \else\iffirstargument
+ \doubleexpandafter\page_lines_start_one
+ \else
+ \doubleexpandafter\page_lines_start_zero
+ \fi\fi}
+
+\def\page_lines_start_zero[#1][#2]%
+ {\edef\m_argument{\linenumberingparameter\c!continue}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_one[#1][#2]% [continue||settings] % historic
+ {\edef\m_argument{#1}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \let\currentlinenumbering\empty
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty
+ \let\currentlinenumbering\empty
+ \else
+ \doifelseassignment{#1}
+ {\let\currentlinenumbering\empty
+ \setupcurrentlinenumbering[#1]}
+ {\doifelsenumber\m_argument
+ {\let\currentlinenumbering\empty
+ \letlinenumberingparameter\c!start\m_argument}
+ {\let\currentlinenumbering\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\def\page_lines_start_two[#1][#2]% [tag][continue||settings]
+ {\edef\currentlinenumbering{#1}%
+ \edef\m_argument{#2}%
+ \ifx\m_argument\v!continue
+ \c_page_lines_mode\zerocount
+ \else
+ \c_page_lines_mode\plusone
+ \ifx\m_argument\v!empty \else
+ \doifelseassignment{#2}
+ {\setupcurrentlinenumbering[#2]}
+ {\doifnumber\m_argument
+ {\letlinenumberingparameter\c!start\m_argument}}%
+ \fi
+ \edef\p_continue{\linenumberingparameter\c!continue}%
+ \ifx\p_continue\v!yes
+ \c_page_lines_mode\zerocount
+ \fi
+ \fi
+ \page_lines_start_followup}
+
+\newconditional\c_page_lines_auto_narrow
+
+\def\page_lines_start_followup
+ {\numberinglinestrue
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \setfalse\c_page_lines_auto_narrow
+ \ifhmode \else
+ \ifx\p_location\v!text
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!begin
+ \ifdim\leftskip>\zeropoint \else
+ \advance\leftskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \else\ifx\p_location\v!end
+ \ifdim\leftskip>\zeropoint \else
+ \advance\rightskip\linenumberingparameter\c!margin
+ \settrue\c_page_lines_auto_narrow
+ \fi
+ \fi\fi\fi
+ \fi
+ \the\beforeeverylinenumbering
+ \globallet\page_postprocessors_page \page_postprocessors_linenumbers_page
+ \globallet\page_postprocessors_column\page_postprocessors_linenumbers_column
+ \global\settrue\page_postprocessors_needed_box % see core-rul.mkiv
+ \ifcase\c_page_lines_mode\relax
+ \page_lines_start_update % continue
+ \or
+ \page_lines_start_define % only when assignment
+ \fi
+ \attribute\linenumberattribute\csname\??linenumberinginstance\currentlinenumbering\endcsname\relax}
+
+\unexpanded\def\stoplinenumbering
+ {\attribute\linenumberattribute\attributeunsetvalue
+ \the\aftereverylinenumbering
+ \ifconditional\c_page_lines_auto_narrow\par\fi
+ \endgroup}
+
+% number placement .. will change into (the new) margin code
+
+\newconditional\c_page_lines_fake_number
+\newconstant \b_page_lines_number
+\newconstant \c_page_lines_column
+\newconstant \c_page_lines_last_column
+\newdimen \d_page_lines_line_width
+\settrue \c_page_lines_dir_left_to_right
+
+\installcorenamespace{linenumberinghandler}
+
+\def\page_line_swap_align % can become a helper
+ {\ifx\p_align\v!inner \let\p_align\v!outer \else
+ \ifx\p_align\v!outer \let\p_align\v!inner \else
+ \ifx\p_align\v!flushleft \let\p_align\v!flushright\else
+ \ifx\p_align\v!flushright\let\p_align\v!flushleft \else
+ \ifx\p_align\v!left \let\p_align\v!right \else
+ \ifx\p_align\v!right \let\p_align\v!left \fi\fi\fi\fi\fi\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \clf_linenumbersstageone
+ \b_page_lines_number
+ \ifcase\c_page_lines_nesting false\else true\fi
+ \relax}%
+ \clf_linenumbersstagetwo
+ \b_page_lines_number
+ \b_page_lines_scratch
+ \fi
+ \egroup}
+
+\let\page_lines_make_number_indeed\relax
+
+% \def\page_lines_rlap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\rlap\else\expandafter\llap\fi}
+% \def\page_lines_llap{\ifconditional\c_page_lines_dir_left_to_right\expandafter\llap\else\expandafter\rlap\fi}
+
+\def\page_lines_add_numbers_to_box#box#column#max#nesting%
+ {\bgroup
+ \b_page_lines_number #box\relax
+ \c_page_lines_column #column\relax
+ \c_page_lines_last_column#max\relax
+ \c_page_lines_nesting #nesting\relax
+ \fullrestoreglobalbodyfont
+ \let\makelinenumber\page_lines_make_number % used at lua end
+ \setbox\b_page_lines_scratch\vbox
+ {\forgetall
+ \offinterlineskip
+ \clf_linenumbersstageone
+ \b_page_lines_number
+ \ifcase\c_page_lines_nesting false\else true\fi
+ \relax}%
+ \clf_linenumbersstagetwo
+ \b_page_lines_number
+ \b_page_lines_scratch
+ \relax
+ \egroup}
+
+\def\page_lines_make_number#tag#mode#linenumber#shift#width#leftskip#dir% beware, one needs so compensate for this in the \hsize
+ {\naturalhbox to \zeropoint \bgroup
+ \ifcase#mode\relax
+ % \settrue \c_page_lines_fake_number
+ \else
+ % \setfalse\c_page_lines_fake_number
+ \edef\currentlinenumbering{#tag}%
+ \def\linenumber{#linenumber}% unsafe
+ \d_page_lines_line_width#width\scaledpoint\relax
+ \d_page_lines_distance\linenumberingparameter\c!distance\relax
+ \edef\p_align{\linenumberingparameter\c!align}%
+ \edef\p_location{\linenumberingparameter\c!location}%
+ \ifcase\istltdir#dir\relax
+ \settrue \c_page_lines_dir_left_to_right
+ \else
+ \setfalse\c_page_lines_dir_left_to_right
+ \fi
+ %
+ % maybe we also need an option to ignore columns, so that we renumber
+ % once but on the other hand this assumes aligned lines
+ %
+ \ifcase\c_page_lines_last_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one column
+ \or
+ % two columns
+ \ifx\p_location\v!default % or just margin
+ \ifcase\c_page_lines_column\relax
+ \settrue \c_page_lines_fake_number % why
+ \or
+ % one
+ \let\p_location\v!left
+ \else
+ % two
+ \let\p_location\v!right
+ % can become a helper
+ \page_line_swap_align
+ \fi
+ \fi
+ \else
+ % too fuzzy
+ \fi
+ \ifx\p_location\v!default
+ \ifconditional\c_page_lines_dir_left_to_right
+ \let\p_location\v!left
+ \else
+ \let\p_location\v!right
+ \page_line_swap_align % yes or no
+ \fi
+ \fi
+ %
+ \executeifdefined{\??linenumberinghandler\p_location}\relax
+ \fi
+ \egroup}
+
+\def\page_lines_number_inject#align#width%
+ {\edef\p_width{\linenumberingparameter\c!width}%
+ \ifx\p_width\v!margin
+ \d_page_lines_width#width%
+ \else
+ \d_page_lines_width\p_width
+ \fi
+ \relax
+ \ifdim\d_page_lines_width>\zeropoint
+% \ifconditional\c_page_lines_dir_left_to_right\else
+% \let\simplealignedbox\simplereversealignedbox
+% \fi
+ \ifconditional\tracelinenumbering
+ \ruledhbox{\simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}}%
+ \else
+ \simplealignedbox\d_page_lines_width#align{\page_lines_number_inject_indeed}%
+ \fi
+ \else
+ \ifconditional\tracelinenumbering
+ \ruledhbox
+ \else
+ % \hbox
+ \fi
+ {\page_lines_number_inject_indeed}%
+ \fi}
+
+\def\page_lines_number_inject_indeed
+ {\uselinenumberingstyleandcolor\c!style\c!color
+ \linenumberingparameter\c!command
+ {\linenumberingparameter\c!left
+ \convertnumber{\linenumberingparameter\c!conversion}\linenumber
+ \linenumberingparameter\c!right}}
+
+% \def\dodorlap{\hbox to \zeropoint{\box\nextbox\normalhss}\endgroup}
+% \def\dodollap{\hbox to \zeropoint{\normalhss\box\nextbox}\endgroup}
+
+\def\page_line_handle_left#align#width#distance%
+ {\llap
+ {\page_lines_number_inject#align#width%
+ \kern\dimexpr#distance+\d_page_lines_distance\relax
+ \the\everylinenumber
+ \hss}}
+
+\def\page_line_handle_right#align#width#distance%
+ {\rlap
+ {\kern\dimexpr#distance+\d_page_lines_distance+\d_page_lines_line_width\relax
+ \page_lines_number_inject#align#width%
+ \the\everylinenumber}}
+
+\setuvalue{\??linenumberinghandler\v!left}%
+ {\page_line_handle_left\p_align\leftmarginwidth\leftmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!right}%
+ {\page_line_handle_right\p_align\rightmarginwidth\rightmargindistance}
+
+\setuvalue{\??linenumberinghandler\v!inner}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_left\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_right\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\setuvalue{\??linenumberinghandler\v!outer}%
+ {\ifodd\realpageno
+ \ifx\p_align\v!inner
+ \page_line_handle_right\v!flushleft\leftmarginwidth\leftmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_right\v!flushright\leftmarginwidth\leftmargindistance
+ \else
+ \page_line_handle_right\p_align\leftmarginwidth\leftmargindistance
+ \fi\fi
+ \else
+ \ifx\p_align\v!inner
+ \page_line_handle_left\v!flushright\rightmarginwidth\rightmargindistance
+ \else\ifx\p_align\v!outer
+ \page_line_handle_left\v!flushleft\rightmarginwidth\rightmargindistance
+ \else
+ \page_line_handle_left\p_align\rightmarginwidth\rightmargindistance
+ \fi\fi
+ \fi}
+
+\def\page_line_handle_begin#align%
+ {\rlap
+ {\kern\d_page_lines_distance
+ \page_lines_number_inject#align\zeropoint
+ \the\everylinenumber}}
+
+\def\page_line_handle_end#align%
+ {\rlap
+ {\kern\d_page_lines_line_width\relax
+ \llap
+ {\page_lines_number_inject#align\zeropoint
+ \kern\d_page_lines_distance
+ \the\everylinenumber}}}
+
+\setuvalue{\??linenumberinghandler\v!begin}{\page_line_handle_begin\p_align}
+\setuvalue{\??linenumberinghandler\v!end }{\page_line_handle_end \p_align}
+\setuvalue{\??linenumberinghandler\v!text }{\page_line_handle_begin\p_align}
+
+\setuevalue{\??linenumberinghandler\v!inleft }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inmargin}{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!margin }{\getvalue{\??linenumberinghandler\v!left }}
+\setuevalue{\??linenumberinghandler\v!inright }{\getvalue{\??linenumberinghandler\v!right}}
+
+% referencing: \permithyphenation, also removes leading spaces (new per 29-11-2013)
+
+\unexpanded\def\someline [#1]{\page_lines_reference_start{#1}\page_lines_reference_stop{#1}} % was just a def
+\unexpanded\def\startline[#1]{\page_lines_reference_start{#1}\ignorespaces}
+\unexpanded\def\stopline [#1]{\removeunwantedspaces\permithyphenation\page_lines_reference_stop{#1}}
+
+\def\page_lines_reference_show_start
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_start_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_stop
+ {\ifconditional\tracelinenumbering
+ \expandafter\page_lines_reference_show_stop_indeed
+ \else
+ \expandafter\gobbleoneargument
+ \fi}
+
+\def\page_lines_reference_show_start_indeed#1%
+ {\setbox\scratchbox\hbox{\llap
+ {\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht\raise.85\strutht\hbox{\llap{\tt\txx#1}}}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_show_stop_indeed#1%
+ {\setbox\scratchbox\hbox{\rlap
+ {\raise.85\strutht\hbox{\rlap{\tt\txx#1}}\vrule\s!width\onepoint\s!depth\strutdp\s!height.8\strutht}}%
+ \smashbox\scratchbox
+ \box\scratchbox}
+
+\def\page_lines_reference_start#1{\page_lines_some_reference{#1}{lr:b:#1}{\page_lines_reference_show_start{#1}}}
+\def\page_lines_reference_stop #1{\page_lines_some_reference{#1}{lr:e:#1}{\page_lines_reference_show_stop {#1}}}
+
+% eventually we will do this in lua
+
+\def\currentreferencelinenumber{\clf_filterreference{linenumber}}
+
+\let\m_page_lines_from\empty
+\let\m_page_lines_to \empty
+
+\unexpanded\def\doifelsesamelinereference#1#2#3%
+ {\doifelsereferencefound{lr:b:#1}
+ {\edef\m_page_lines_from{\currentreferencelinenumber}%
+ \doifelsereferencefound{lr:e:#1}
+ {\edef\m_page_lines_to{\currentreferencelinenumber}%
+ %[\m_page_lines_from,\m_page_lines_to]
+ \ifx\m_page_lines_from\m_page_lines_to#2\else#3\fi}
+ {#2}}
+ {#2}}
+
+\let\doifsamelinereferenceelse\doifelsesamelinereference
+
+\unexpanded\def\inline#1[#2]%
+ {\doifelsenothing{#1}
+ {\doifelsesamelinereference{#2}
+ {\in{\leftlabeltext\v!line}{\rightlabeltext\v!line}[lr:b:#2]}
+ {\in{\leftlabeltext\v!lines}{}[lr:b:#2]--\in{}{\rightlabeltext\v!lines}[lr:e:#2]}}
+ {\doifelsesamelinereference{#2}
+ {\in{#1}[lr:b:#2]}
+ {\in{#1}[lr:b:#2]--\in[lr:e:#2]}}}
+
+\unexpanded\def\inlinerange[#1]%
+ {\doifelsesamelinereference{#1}
+ {\in[lr:b:#1]}
+ {\in[lr:b:#1]\endash\in[lr:e:#1]}}
+
+\protect \endinput
diff --git a/tex/context/base/page-mak.mkvi b/tex/context/base/page-mak.mkvi
index 71af520a1..ee144f20a 100644
--- a/tex/context/base/page-mak.mkvi
+++ b/tex/context/base/page-mak.mkvi
@@ -45,7 +45,7 @@
\appendtoks
\setuevalue{\e!start\currentmakeup\e!makeup}{\startmakeup[\currentmakeup]}%
\setuevalue{\e!stop \currentmakeup\e!makeup}{\stopmakeup}%
- \doiflayoutdefinedelse\currentmakeup\donothing{\definelayout[\currentmakeup]}% new
+ \doifelselayoutdefined\currentmakeup\donothing{\definelayout[\currentmakeup]}% new
\to \everydefinemakeup
%D The \type{\start}||\type{\stop} macros are used for both
@@ -91,13 +91,59 @@
\def\page_makeup_start_yes[#name]% [#settings]%
{\doifelsecommandhandler\??makeup{#name}\page_makeup_start_indeed\page_makeup_start_nop[#name]}%
+% case 1:
+%
+% \setuplayout[height=5cm]
+%
+% case 2:
+%
+% \definelayout[crap][height=10cm]
+% \definelayout[standard][crap]
+%
+% case 3:
+%
+% \setuplayout[standard][height=15cm]
+%
+% case 4:
+%
+% \definelayout[whatever][height=2cm]
+% \setuplayout[whatever]
+
\def\page_makeup_start_indeed[#name][#settings]%
- {\doifelsenothing{\namedmakeupparameter{#name}\c!page}
- {\page}% new, so best not have dangling mess here like references (we could capture then and flush embedded)
- {\page[\namedmakeupparameter{#name}\c!page]}%
+ {% the next grouping hack is somewhat messy:
+ \begingroup
+ % we need to figure out the current layout
+ \xdef\m_page_makeup_name{#name}%
+ \let\currentmakeup\m_page_makeup_name
+ \let\currentlayout\m_page_makeup_name
+ \xdef\m_page_makeup_layout_parent{\layoutparameter\s!parent}%
+ \setupcurrentmakeup[#settings]%
+ \edef\p_page{\makeupparameter\c!page}%
+ \ifx\p_page\empty
+ \endgroup
+ \page % new, so best not have dangling mess here like references (we could capture then and flush embedded)
+ \else\ifx\p_page\v!no
+ % nothing
+ \endgroup
+ \else
+ \normalexpanded{\endgroup\page[\p_page]}%
+ \fi\fi
+ % some dirty trickery (sorry) for determining if we have
+ % - a layout definition at all
+ % - inherit from the parent of that definition
+ % - inherit from the current layout otherwise
+ \ifx\m_page_makeup_name\currentlayout
+ % we already use the layout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ % we inherit from the current layout
+ \normalexpanded{\setuplayout[#name][\s!parent=\??layout\currentlayout]}% is remembered but checked later anyway
+ % \else
+ % we have an inherited layout
+ \fi\fi
\startlayout[#name]% includes \page
\bgroup
- \edef\currentmakeup{#name}%
+ %\edef\currentmakeup{#name}%
+ \let\currentmakeup\m_page_makeup_name
\setupcurrentmakeup[#settings]%
\setsystemmode\v!makeup
\the\t_page_makeup_every_setup
@@ -137,7 +183,12 @@
\fi \fi
\strc_pagenumbers_page_state_pop % new
\egroup
- \stoplayout} % includes \page
+ \stoplayout % includes \page
+ \ifx\m_page_makeup_name\currentlayout
+ \else\ifx\m_page_makeup_layout_parent\??layout
+ \normalexpanded{\setuplayout[\m_page_makeup_name][\s!parent=\??layout]}% is remembered but checked later anyway
+ % \else
+ \fi\fi}
\setvalue{\??makeupdoublesided\v!yes}%
{\emptyhbox
@@ -184,6 +235,7 @@
\c!headerstate=\v!stop,
\c!footerstate=\v!stop,
\c!pagestate=\v!stop] % in manual ! ! !
+% \c!pagestate=\v!start]
\definemakeup
[\v!standard]
diff --git a/tex/context/base/page-mbk.mkvi b/tex/context/base/page-mbk.mkvi
index 9e3e57193..8038771d9 100644
--- a/tex/context/base/page-mbk.mkvi
+++ b/tex/context/base/page-mbk.mkvi
@@ -98,7 +98,7 @@
\unvbox\b_page_margin_blocks
\marginblockparameter\c!inbetween
\fi
- \setupalign[\marginblockparameter\c!align]%
+ \usealignparameter\marginblockparameter
\usemarginblockstyleandcolor\c!style\c!color
\begstrut
\ignorespaces}
diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua
index 7d13d9e4e..61a4f944d 100644
--- a/tex/context/base/page-mix.lua
+++ b/tex/context/base/page-mix.lua
@@ -13,48 +13,81 @@ if not modules then modules = { } end modules ["page-mix"] = {
-- local trackers, logs, storage = trackers, logs, storage
-- local number, table = number, table
+local next, type = next, type
local concat = table.concat
-
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-local nodepool = nodes.pool
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local hpack = node.hpack
-local vpack = node.vpack
-local freenode = node.free
-local concatnodes = nodes.concat
-
-local texgetbox = tex.getbox
-local texsetbox = tex.setbox
-local texgetskip = tex.getskip
-
-local points = number.points
-
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
+local ceil, floor = math.ceil, math.floor
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+local rule_code = nodecodes.rule
+
+local topskip_code = gluecodes.topskip
+local lineskip_code = gluecodes.lineskip
+local baselineskip_code = gluecodes.baselineskip
+local userskip_code = gluecodes.userskip
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local nodetostring = nuts.tostring
+local listtoutf = nodes.listtoutf
+
+local hpack = nuts.hpack
+local vpack = nuts.vpack
+local freenode = nuts.free
+local concatnodes = nuts.concat
+local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
+local traversenodes = nuts.traverse
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+local setbox = nuts.setbox
+local getskip = nuts.getskip
+local getattribute = nuts.getattribute
+
+local nodepool = nuts.pool
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local points = number.points
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+local v_fixed = variables.fixed
+local v_auto = variables.auto
+local v_none = variables.none
+local v_more = variables.more
+local v_less = variables.less
+local v_halfline = variables.halfline
+
+local context = context
+local implement = interfaces.implement
+
pagebuilders = pagebuilders or { }
pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
local mixedcolumns = pagebuilders.mixedcolumns
@@ -77,13 +110,13 @@ local function collectinserts(result,nxt,nxtid)
local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
while nxt do
if nxtid == insert_code then
- inserttotal = inserttotal + nxt.height + nxt.depth
- local s = nxt.subtype
+ inserttotal = inserttotal + getfield(nxt,"height") + getfield(nxt,"depth")
+ local s = getsubtype(nxt)
local c = inserts[s]
if not c then
c = { }
inserts[s] = c
- local width = texgetskip(s).width
+ local width = getfield(getskip(s),"width")
if not result.inserts[s] then
currentskips = currentskips + width
end
@@ -100,9 +133,9 @@ local function collectinserts(result,nxt,nxtid)
else
break
end
- nxt = nxt.next
+ nxt = getnext(nxt)
if nxt then
- nxtid = nxt.id
+ nxtid = getid(nxt)
else
break
end
@@ -128,30 +161,30 @@ end
local function discardtopglue(current,discarded)
local size = 0
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
- size = size + current.spec.width
+ size = size + getfield(getfield(current,"spec"),"width")
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- if current.penalty == forcedbreak then
+ if getfield(current,"penalty") == forcedbreak then
discarded[#discarded+1] = current
- current = current.next
- while current and current.id == glue_code do
- size = size + current.spec.width
+ current = getnext(current)
+ while current and getid(current) == glue_code do
+ size = size + getfield(getfield(current,"spec"),"width")
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
end
else
discarded[#discarded+1] = current
- current = current.next
+ current = getnext(current)
end
else
break
end
end
if current then
- current.prev = nil
+ setfield(current,"prev",nil) -- prevent look back
end
return current, size
end
@@ -162,13 +195,13 @@ local function stripbottomglue(results,discarded)
local r = results[i]
local t = r.tail
while t and t ~= r.head do
- local prev = t.prev
+ local prev = getprev(t)
if not prev then
break
end
- local id = t.id
+ local id = getid(t)
if id == penalty_code then
- if t.penalty == forcedbreak then
+ if getfield(t,"penalty") == forcedbreak then
break
else
discarded[#discarded+1] = t
@@ -177,7 +210,7 @@ local function stripbottomglue(results,discarded)
end
elseif id == glue_code then
discarded[#discarded+1] = t
- local width = t.spec.width
+ local width = getfield(getfield(t,"spec"),"width")
if trace_state then
report_state("columns %s, discarded bottom glue %p",i,width)
end
@@ -195,51 +228,52 @@ local function stripbottomglue(results,discarded)
return height
end
-local function setsplit(specification) -- a rather large function
+local function preparesplit(specification) -- a rather large function
local box = specification.box
if not box then
report_state("fatal error, no box")
return
end
- local list = texgetbox(box)
+ local list = getbox(box)
if not list then
report_state("fatal error, no list")
return
end
- local head = list.head or specification.originalhead
+ local head = getlist(list) or specification.originalhead
if not head then
report_state("fatal error, no head")
return
end
- local discarded = { }
- local originalhead = head
- local originalwidth = specification.originalwidth or list.width
- local originalheight = specification.originalheight or list.height
- local current = head
- local skipped = 0
- local height = 0
- local depth = 0
- local skip = 0
- local splitmethod = specification.splitmethod or false
+ slidenodes(head) -- we can have set prev's to nil to prevent backtracking
+ local discarded = { }
+ local originalhead = head
+ local originalwidth = specification.originalwidth or getfield(list,"width")
+ local originalheight = specification.originalheight or getfield(list,"height")
+ local current = head
+ local skipped = 0
+ local height = 0
+ local depth = 0
+ local skip = 0
+ local splitmethod = specification.splitmethod or false
if splitmethod == v_none then
splitmethod = false
end
- local options = settings_to_hash(specification.option or "")
+ local options = settings_to_hash(specification.option or "")
local stripbottom = specification.alternative == v_local
- local cycle = specification.cycle or 1
- local nofcolumns = specification.nofcolumns or 1
+ local cycle = specification.cycle or 1
+ local nofcolumns = specification.nofcolumns or 1
if nofcolumns == 0 then
nofcolumns = 1
end
local preheight = specification.preheight or 0
- local extra = specification.extra or 0
+ local extra = specification.extra or 0
local maxheight = specification.maxheight
- local optimal = originalheight/nofcolumns
+ local optimal = originalheight/nofcolumns
if specification.balance ~= v_yes then
optimal = maxheight
end
- local target = optimal + extra
- local overflow = target > maxheight - preheight
+ local target = optimal + extra
+ local overflow = target > maxheight - preheight
local threshold = specification.threshold or 0
if overflow then
target = maxheight - preheight
@@ -267,33 +301,73 @@ local function setsplit(specification) -- a rather large function
local rest = nil
local lastlocked = nil
local lastcurrent = nil
+ local lastcontent = nil
local backtracked = false
if trace_state then
report_state("setting collector to column %s",column)
end
+ local function unlock(penalty)
+ if lastlocked then
+ if trace_state then
+ report_state("penalty %s, unlocking in column %s",penalty or "-",column)
+ end
+ lastlocked = nil
+ end
+ lastcurrent = nil
+ lastcontent = nil
+ end
+
+ local function lock(penalty,current)
+ if trace_state then
+ report_state("penalty %s, locking in column %s",penalty,column)
+ end
+ lastlocked = penalty
+ lastcurrent = current or lastcurrent
+ lastcontent = nil
+ end
+
local function backtrack(start)
local current = start
-- first skip over glue and penalty
while current do
- local id = current.id
- if id == glue_code or id == penalty_code then
- current = current.prev
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","glue",column)
+ end
+ current = getprev(current)
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("backtracking over %s in column %s","penalty",column)
+ end
+ current = getprev(current)
else
break
end
end
-- then skip over content
while current do
- local id = current.id
- if id == glue_code or id == penalty_code then
+ local id = getid(current)
+ if id == glue_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","glue",column)
+ end
+ break
+ elseif id == penalty_code then
+ if trace_state then
+ report_state("quitting at %s in column %s","penalty",column)
+ end
break
else
- current = current.prev
+ current = getprev(current)
end
end
if not current then
+ if trace_state then
+ report_state("no effective backtracking in column %s",column)
+ end
current = start
end
return current
@@ -310,7 +384,12 @@ local function setsplit(specification) -- a rather large function
backtracked = true
end
lastcurrent = nil
- lastlocked = nil
+ if lastlocked then
+ if trace_state then
+ report_state("unlocking in column %s",column)
+ end
+ lastlocked = nil
+ end
end
if head == lasthead then
if trace_state then
@@ -324,7 +403,7 @@ local function setsplit(specification) -- a rather large function
if current == head then
result.tail = head
else
- result.tail = current.prev
+ result.tail = getprev(current)
end
result.height = height
result.depth = depth
@@ -344,6 +423,9 @@ local function setsplit(specification) -- a rather large function
report_state("setting collector to column %s",column)
end
current, skipped = discardtopglue(current,discarded)
+ if trace_detail and skipped ~= 0 then
+ report_state("check > column 1, discarded %p",skipped)
+ end
head = current
return true, skipped
end
@@ -352,6 +434,7 @@ local function setsplit(specification) -- a rather large function
local function checked(advance,where,locked)
local total = skip + height + depth + advance
local delta = total - target
+-- - 65536*3
local state = "same"
local okay = false
local skipped = 0
@@ -366,7 +449,7 @@ local function setsplit(specification) -- a rather large function
end
end
if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p => %a (height %p, depth %p, skip %p)",
where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
end
return state, skipped
@@ -387,7 +470,7 @@ local function setsplit(specification) -- a rather large function
head = current
local function process_skip(current,nxt)
- local advance = current.spec.width
+ local advance = getfield(getfield(current,"spec"),"width")
if advance ~= 0 then
local state, skipped = checked(advance,"glue")
if trace_state then
@@ -401,17 +484,28 @@ local function setsplit(specification) -- a rather large function
end
height = height + depth + skip
depth = 0
+if advance < 0 then
+ height = height + advance
+ skip = 0
+ if height < 0 then
+ height = 0
+ end
+else
skip = height > 0 and advance or 0
+end
if trace_state then
report_state("%-7s > column %s, height %p, depth %p, skip %p","glue",column,height,depth,skip)
end
else
-- what else? ignore? treat as valid as usual?
end
+ if lastcontent then
+ unlock()
+ end
end
local function process_kern(current,nxt)
- local advance = current.kern
+ local advance = getfield(current,"kern")
if advance ~= 0 then
local state, skipped = checked(advance,"kern")
if trace_state then
@@ -434,25 +528,28 @@ local function setsplit(specification) -- a rather large function
local function process_rule(current,nxt)
-- simple variant of h|vlist
- local advance = current.height -- + current.depth
- local state, skipped = checked(advance+currentskips,"rule")
- if trace_state then
- report_state("%-7s > column %s, state %a, rule, advance %p, height %p","line",column,state,advance,inserttotal,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","rule",column,skipped)
+ local advance = getfield(current,"height") -- + getfield(current,"depth")
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"rule")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, rule, advance %p, height %p","rule",column,state,advance,inserttotal,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","rule",column,skipped)
+ end
end
+ if state == "quit" then
+ return true
+ end
+ height = height + depth + skip + advance
+ -- if state == "next" then
+ -- height = height + nextskips
+ -- else
+ -- height = height + currentskips
+ -- end
+ depth = getfield(current,"depth")
+ skip = 0
end
- if state == "quit" then
- return true
- end
- height = height + depth + skip + advance
- if state == "next" then
- height = height + nextskips
- else
- height = height + currentskips
- end
- depth = current.depth
- skip = 0
+ lastcontent = current
end
-- okay, here we could do some badness like magic but we want something
@@ -462,12 +559,11 @@ local function setsplit(specification) -- a rather large function
-- [chapter] [penalty] [section] [penalty] [first line]
local function process_penalty(current,nxt)
- local penalty = current.penalty
+ local penalty = getfield(current,"penalty")
if penalty == 0 then
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
elseif penalty == forcedbreak then
- local needed = current[a_checkedbreak]
+ local needed = getattribute(current,a_checkedbreak)
local proceed = not needed or needed == 0
if not proceed then
local available = target - height
@@ -477,8 +573,7 @@ local function setsplit(specification) -- a rather large function
end
end
if proceed then
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
local okay, skipped = gotonext()
if okay then
if trace_state then
@@ -499,28 +594,26 @@ local function setsplit(specification) -- a rather large function
end
elseif penalty < 0 then
-- we don't care too much
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
elseif penalty >= 10000 then
if not lastcurrent then
- lastcurrent = current
- lastlocked = penalty
+ lock(penalty,current)
elseif penalty > lastlocked then
- lastlocked = penalty
+ lock(penalty)
end
else
- lastlocked = nil
- lastcurrent = nil
+ unlock(penalty)
end
end
local function process_list(current,nxt)
- local nxtid = nxt and nxt.id
+ local nxtid = nxt and getid(nxt)
line = line + 1
local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = current.height -- + current.depth
+ local advance = getfield(current,"height")
+-- + getfield(current,"depth") -- when > strutdp
if trace_state then
- report_state("%-7s > column %s, content: %s","line",column,listtoutf(current.list,true,true))
+ report_state("%-7s > column %s, content: %s","line",column,listtoutf(getlist(current),true,true))
end
if nxt and (nxtid == insert_code or nxtid == mark_code) then
nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
@@ -541,7 +634,7 @@ local function setsplit(specification) -- a rather large function
else
height = height + currentskips
end
- depth = current.depth
+ depth = getfield(current,"depth")
skip = 0
if inserts then
-- so we already collect them ... makes backtracking tricky ... alternatively
@@ -551,12 +644,15 @@ local function setsplit(specification) -- a rather large function
if trace_state then
report_state("%-7s > column %s, height %p, depth %p, skip %p","line",column,height,depth,skip)
end
+ lastcontent = current
end
+local kept = head
+
while current do
- local id = current.id
- local nxt = current.next
+ local id = getid(current)
+ local nxt = getnext(current)
backtracked = false
@@ -602,14 +698,16 @@ local function setsplit(specification) -- a rather large function
if not current then
if trace_state then
- report_state("nilling rest")
+ report_state("nothing left")
end
- rest = nil
- elseif rest == lasthead then
+ -- needs well defined case
+ -- rest = nil
+ elseif rest == lasthead then
if trace_state then
- report_state("nilling rest as rest is lasthead")
+ report_state("rest equals lasthead")
end
- rest = nil
+ -- test case: x\index{AB} \index{AA}x \blank \placeindex
+ -- makes line disappear: rest = nil
end
if stripbottom then
@@ -629,24 +727,26 @@ local function setsplit(specification) -- a rather large function
specification.overflow = overflow
specification.discarded = discarded
- texgetbox(specification.box).list = nil
+ setfield(getbox(specification.box),"list",nil)
return specification
end
-function mixedcolumns.finalize(result)
+local function finalize(result)
if result then
- local results = result.results
- for i=1,result.nofcolumns do
+ local results = result.results
+ local columns = result.nofcolumns
+ local maxtotal = 0
+ for i=1,columns do
local r = results[i]
local h = r.head
if h then
- h.prev = nil
+ setfield(h,"prev",nil)
local t = r.tail
if t then
- t.next = nil
+ setfield(t,"next",nil)
else
- h.next = nil
+ setfield(h,"next",nil)
r.tail = h
end
for c, list in next, r.inserts do
@@ -655,16 +755,26 @@ function mixedcolumns.finalize(result)
local l = list[i]
local h = new_hlist()
t[i] = h
- h.head = l.head
- h.height = l.height
- h.depth = l.depth
- l.head = nil
+ setfield(h,"list",getfield(l,"head"))
+ setfield(h,"height",getfield(l,"height"))
+ setfield(h,"depth",getfield(l,"depth"))
+ setfield(l,"head",nil)
end
- t[1].prev = nil -- needs checking
- t[#t].next = nil -- needs checking
+ setfield(t[1],"prev",nil) -- needs checking
+ setfield(t[#t],"next",nil) -- needs checking
r.inserts[c] = t
end
end
+ local total = r.height + r.depth
+ if total > maxtotal then
+ maxtotal = total
+ end
+ r.total = total
+ end
+ result.maxtotal = maxtotal
+ for i=1,columns do
+ local r = results[i]
+ r.extra = maxtotal - r.total
end
end
end
@@ -679,12 +789,12 @@ local function report_deltas(result,str)
report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
end
-function mixedcolumns.setsplit(specification)
+local function setsplit(specification)
splitruns = splitruns + 1
if trace_state then
report_state("split run %s",splitruns)
end
- local result = setsplit(specification)
+ local result = preparesplit(specification)
if result then
if result.overflow then
if trace_state then
@@ -697,7 +807,7 @@ function mixedcolumns.setsplit(specification)
local cycles = specification.cycles or 100
while result.rest and cycle <= cycles do
specification.extra = cycle * step
- result = setsplit(specification) or result
+ result = preparesplit(specification) or result
if trace_state then
report_state("cycle: %s.%s, original height %p, total height %p",
splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight)
@@ -719,7 +829,7 @@ function mixedcolumns.setsplit(specification)
end
end
-function mixedcolumns.getsplit(result,n)
+local function getsplit(result,n)
if not result then
report_state("flush, column %s, no result",n)
return
@@ -733,17 +843,18 @@ function mixedcolumns.getsplit(result,n)
return new_glue(result.originalwidth)
end
- h.prev = nil -- move up
+ setfield(h,"prev",nil) -- move up
local strutht = result.strutht
local strutdp = result.strutdp
local lineheight = strutht + strutdp
+ local isglobal = result.alternative == v_global
local v = new_vlist()
- v.head = h
+ setfield(v,"list",h)
-- local v = vpack(h,"exactly",height)
- if result.alternative == v_global then -- option
+ if isglobal then -- option
result.height = result.maxheight
end
@@ -751,24 +862,69 @@ function mixedcolumns.getsplit(result,n)
local dp = 0
local wd = result.originalwidth
- local grid = result.grid
+ local grid = result.grid
+ local internalgrid = result.internalgrid
+ local httolerance = .25
+ local dptolerance = .50
+ local lineheight = internalgrid == v_halfline and lineheight/2 or lineheight
+
+ local function amount(r,s,t)
+ local l = ceil((r-t)/lineheight)
+ local a = lineheight * l
+ if a > s then
+ return a - s
+ else
+ return s
+ end
+ end
if grid then
- ht = lineheight * math.ceil(result.height/lineheight) - strutdp
- dp = strutdp
+ -- print(n,result.maxtotal,r.total,r.extra)
+ if isglobal then
+ local rh = r.height
+ -- ht = (lineheight * ceil(result.height/lineheight) - strutdp
+ ht = amount(rh,strutdp,0)
+ dp = strutdp
+ else
+ -- natural dimensions
+ local rh = r.height
+ local rd = r.depth
+ if rh > ht then
+ ht = amount(rh,strutdp,httolerance*strutht)
+ end
+ if rd > dp then
+ dp = amount(rd,strutht,dptolerance*strutdp)
+ end
+ -- forced dimensions
+ local rh = result.height or 0
+ local rd = result.depth or 0
+ if rh > ht then
+ ht = amount(rh,strutdp,httolerance*strutht)
+ end
+ if rd > dp then
+ dp = amount(rd,strutht,dptolerance*strutdp)
+ end
+ -- always one line at least
+ if ht < strutht then
+ ht = strutht
+ end
+ if dp < strutdp then
+ dp = strutdp
+ end
+ end
else
ht = result.height
dp = result.depth
end
- v.width = wd
- v.height = ht
- v.depth = dp
+ setfield(v,"width",wd)
+ setfield(v,"height",ht)
+ setfield(v,"depth",dp)
if trace_state then
- local id = h.id
+ local id = getid(h)
if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",listtoutf(getlist(h)))
else
report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
end
@@ -777,27 +933,27 @@ function mixedcolumns.getsplit(result,n)
for c, list in next, r.inserts do
local l = concatnodes(list)
local b = vpack(l) -- multiple arguments, todo: fastvpack
- -- texsetbox("global",c,b)
- texsetbox(c,b)
+ -- setbox("global",c,b)
+ setbox(c,b)
r.inserts[c] = nil
end
return v
end
-function mixedcolumns.getrest(result)
+local function getrest(result)
local rest = result and result.rest
result.rest = nil -- to be sure
return rest
end
-function mixedcolumns.getlist(result)
+local function getlist(result)
local originalhead = result and result.originalhead
result.originalhead = nil -- to be sure
return originalhead
end
-function mixedcolumns.cleanup(result)
+local function cleanup(result)
local discarded = result.discarded
for i=1,#discarded do
freenode(discarded[i])
@@ -805,52 +961,100 @@ function mixedcolumns.cleanup(result)
result.discarded = { }
end
+mixedcolumns.setsplit = setsplit
+mixedcolumns.getsplit = getsplit
+mixedcolumns.finalize = finalize
+mixedcolumns.getrest = getrest
+mixedcolumns.getlist = getlist
+mixedcolumns.cleanup = cleanup
+
-- interface --
local result
-function commands.mixsetsplit(specification)
- if result then
- for k, v in next, specification do
- result[k] = v
+implement {
+ name = "mixsetsplit",
+ actions = function(specification)
+ if result then
+ for k, v in next, specification do
+ result[k] = v
+ end
+ result = setsplit(result)
+ else
+ result = setsplit(specification)
end
- result = mixedcolumns.setsplit(result)
- else
- result = mixedcolumns.setsplit(specification)
- end
-end
+ end,
+ arguments = {
+ {
+ { "box", "integer" },
+ { "nofcolumns", "integer" },
+ { "maxheight", "dimen" },
+ { "step", "dimen" },
+ { "cycles", "integer" },
+ { "preheight", "dimen" },
+ { "prebox", "integer" },
+ { "strutht", "dimen" },
+ { "strutdp", "dimen" },
+ { "threshold", "dimen" },
+ { "splitmethod" },
+ { "balance" },
+ { "alternative" },
+ { "internalgrid" },
+ { "grid", "boolean" },
+ }
+ }
+}
-function commands.mixgetsplit(n)
- if result then
- context(mixedcolumns.getsplit(result,n))
- end
-end
+implement {
+ name = "mixgetsplit",
+ arguments = "integer",
+ actions = function(n)
+ if result then
+ context(tonode(getsplit(result,n)))
+ end
+ end,
+}
-function commands.mixfinalize()
- if result then
- mixedcolumns.finalize(result)
+implement {
+ name = "mixfinalize",
+ actions = function()
+ if result then
+ finalize(result)
+ end
end
-end
+}
-function commands.mixflushrest()
- if result then
- context(mixedcolumns.getrest(result))
+implement {
+ name = "mixflushrest",
+ actions = function()
+ if result then
+ context(tonode(getrest(result)))
+ end
end
-end
+}
-function commands.mixflushlist()
- if result then
- context(mixedcolumns.getlist(result))
+implement {
+ name = "mixflushlist",
+ actions = function()
+ if result then
+ context(tonode(getlist(result)))
+ end
end
-end
+}
-function commands.mixstate()
- context(result and result.rest and 1 or 0)
-end
+implement {
+ name = "mixstate",
+ actions = function()
+ context(result and result.rest and 1 or 0)
+ end
+}
-function commands.mixcleanup()
- if result then
- mixedcolumns.cleanup(result)
- result = nil
+implement {
+ name = "mixcleanup",
+ actions = function()
+ if result then
+ cleanup(result)
+ result = nil
+ end
end
-end
+}
diff --git a/tex/context/base/page-mix.mkiv b/tex/context/base/page-mix.mkiv
index 5d1c54a71..6d7f144a6 100644
--- a/tex/context/base/page-mix.mkiv
+++ b/tex/context/base/page-mix.mkiv
@@ -29,6 +29,7 @@
% wide floats
% move floats
% offsets (inner ones, so we change the hsize ... needed with backgrounds
+% when no content we currently loose the page
% luatex buglet:
%
@@ -68,6 +69,7 @@
\c!maxheight=\textheight,
\c!maxwidth=\makeupwidth,
\c!grid=\v!tolerant,
+ \c!internalgrid=\v!line,
\c!step=.25\lineheight, % needs some experimenting
%\c!splitmethod=\v!fixed, % will be default
\c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS
@@ -75,7 +77,7 @@
\let\startmixedcolumns\relax % defined later
\let\stopmixedcolumns \relax % defined later
-\appendtoks
+\appendtoks % could become an option
\setuevalue{\e!start\currentmixedcolumns}{\startmixedcolumns[\currentmixedcolumns]}%
\setuevalue{\e!stop \currentmixedcolumns}{\stopmixedcolumns}%
\to \everydefinemixedcolumns
@@ -166,8 +168,18 @@
[\c!n=\itemgroupparameter\c!n,
\c!separator=\v!none,
\c!splitmethod=\v!none,
+ \c!grid=\v!tolerant,
+ \c!internalgrid=\v!halfline, % new, we may still revert to \v!line
\c!balance=\v!yes]
+% better
+
+\setupmixedcolumns
+ [\s!itemgroupcolumns]
+ [\c!splitmethod=\v!fixed,
+ \c!grid=\v!yes,
+ \c!internalgrid=\v!line]
+
\unexpanded\def\strc_itemgroups_start_columns
{\startmixedcolumns[\s!itemgroupcolumns]} % we could have a fast one
@@ -195,11 +207,6 @@
%D The interceptor is quite simple, at least for the moment.
-% \def\page_mix_routine_intercept
-% {\global\setbox\b_page_mix_preceding\vbox
-% {\page_otr_command_flush_top_insertions
-% \unvbox\normalpagebox}}
-
\def\page_mix_routine_intercept
{\ifdim\pagetotal>\pagegoal
% testcase: preceding-001 ... if we don't do this, text can disappear as
@@ -208,7 +215,7 @@
\fi
\global\setbox\b_page_mix_preceding\vbox
{\page_otr_command_flush_top_insertions
- \ifdim\ht\b_page_mix_preceding=\zeropoint \else
+ \ifdim\htdp\b_page_mix_preceding=\zeropoint \else
\writestatus\m!columns{preceding error}%
\unvbox\b_page_mix_preceding
\fi
@@ -288,18 +295,32 @@
\installcorenamespace{mixedcolumnsseparator}
-\setvalue{\??mixedcolumnsseparator\v!rule}%
- {\starttextproperties
- \usemixedcolumnscolorparameter\c!rulecolor
- \vrule\s!width\mixedcolumnsparameter\c!rulethickness
- \stoptextproperties}
+\unexpanded\def\installmixedcolumnseparator#1#2%
+ {\setvalue{\??mixedcolumnsseparator#1}{#2}}
+
+\installmixedcolumnseparator\v!rule
+ {\vrule
+ \s!width \mixedcolumnsparameter\c!rulethickness
+ \s!height\mixedcolumnseparatorheight
+ \s!depth \mixedcolumnseparatordepth
+ \relax}
\unexpanded\def\page_mix_command_inject_separator
- {\bgroup
+ {\begingroup
+ \setbox\scratchbox\hbox to \zeropoint \bgroup
+ \hss
+ \starttextproperties
+ \usemixedcolumnscolorparameter\c!rulecolor
+ \csname\??mixedcolumnsseparator\p_separator\endcsname % was \c!rule
+ \stoptextproperties
+ \hss
+ \egroup
+ \ht\scratchbox\zeropoint
+ \dp\scratchbox\zeropoint
\hss
- \csname\??mixedcolumnsseparator\mixedcolumnsparameter\c!separator\endcsname % was \c!rule
+ \box\scratchbox
\hss
- \egroup}
+ \endgroup}
%D We've now arrived at the real code. The start command mostly sets up the
%D environment and variables that are used in the splitter. One of the last
@@ -347,13 +368,13 @@
\csname\??mixedcolumnsstart\currentmixedcolumnsmethod\endcsname}
\def\page_mix_start_columns_b[#1][#2]%
- {\doifassignmentelse{#1}%
+ {\doifelseassignment{#1}%
{\let\currentmixedcolumns\empty
\page_mix_error_b}
{\edef\currentmixedcolumns{#1}%
\firstargumentfalse}%
\edef\currentmixedcolumnsmethod{\mixedcolumnsparameter\c!method}%
- \mixedcolumnsparameter\c!before\relax % so, it doesn't list to local settings !
+ \mixedcolumnsparameter\c!before\relax % so, it doesn't listen to local settings !
\csname\??mixedcolumnsbefore\currentmixedcolumnsmethod\endcsname\relax
\begingroup
\iffirstargument
@@ -425,8 +446,7 @@
[\s!itemgroupcolumns]
[\c!grid=\itemgroupparameter\c!grid]
-\setupitemgroups
- [\c!grid=\v!tolerant]
+% better
%D The common initialization:
@@ -451,6 +471,10 @@
%
\usemixedcolumnscolorparameter\c!color
%
+ \insidecolumnstrue % new
+ %
+ \useprofileparameter\mixedcolumnsparameter % new
+ %
\nofcolumns\c_page_mix_n_of_columns} % public
%D The otr method related hooks are defined next:
@@ -464,23 +488,44 @@
\newcount\c_page_mix_otr_nesting
+% \setvalue{\??mixedcolumnsbefore\s!otr}%
+% {\par
+% \global\advance\c_page_mix_otr_nesting\plusone
+% \ifcase\c_page_mix_otr_nesting\or
+% \ifdim\pagetotal=\zeropoint \else
+% \obeydepth % we could handle this in pre material
+% \fi
+% \fi}
+
\setvalue{\??mixedcolumnsbefore\s!otr}%
{\par
\global\advance\c_page_mix_otr_nesting\plusone
\ifcase\c_page_mix_otr_nesting\or
\ifdim\pagetotal=\zeropoint \else
- \obeydepth % we could handle this in pre material
+ % make sure that whitespace an dblanks are done
+ \strut
+ \vskip-\lineheight
+ % no, bad spacing: \obeydepth % we could handle this in pre material
\fi
\fi}
\setvalue{\??mixedcolumnsstart\s!otr}%
{\ifcase\c_page_mix_otr_nesting\or
+ \scratchwidth\textwidth
\setupoutputroutine[\s!mixedcolumn]%
\c_page_mix_routine\c_page_mix_routine_intercept
\page_otr_trigger_output_routine
%
\holdinginserts\maxdimen
%
+ \ifvoid\b_page_mix_preceding \else
+ % moved here, before the packaging
+ \page_postprocessors_linenumbers_deepbox\b_page_mix_preceding
+ % we need to avoid unvboxing with successive balanced on one page
+ \global\setbox\b_page_mix_preceding\vbox{\box\b_page_mix_preceding}%
+ \wd\b_page_mix_preceding\scratchwidth % \makeupwidth
+ \page_grids_add_to_one\b_page_mix_preceding
+ \fi
\global\d_page_mix_preceding_height\ht\b_page_mix_preceding
\c_page_mix_routine\c_page_mix_routine_continue
%
@@ -500,8 +545,14 @@
\setvalue{\??mixedcolumnsstop\s!otr}%
{\par
\ifcase\c_page_mix_otr_nesting\or
- \doif{\mixedcolumnsparameter\c!balance}\v!yes{\c_page_mix_routine\c_page_mix_routine_balance}%
+ \doifelse{\mixedcolumnsparameter\c!balance}\v!yes
+ {\c_page_mix_routine\c_page_mix_routine_balance}%
+ {\penalty-\plustenthousand}% weird hack, we need to trigger the otr sometimes (new per 20140306, see balancing-001.tex)
\page_otr_trigger_output_routine
+ \ifvoid\b_page_mix_preceding \else
+ % empty columns so we need to make sure pending content is flushed
+ \unvbox\b_page_mix_preceding % new per 2014.10.25
+ \fi
\fi}
\setvalue{\??mixedcolumnsafter\s!otr}%
@@ -517,54 +568,76 @@
%D footnotes. Eventually we will have multiple strategies available.
\unexpanded\def\page_mix_routine_construct#1%
- {\ctxcommand{mixsetsplit {
- box = \number\b_page_mix_collected,
- nofcolumns = \number\c_page_mix_n_of_columns,
- maxheight = \number\d_page_mix_max_height,
- step = \number\d_page_mix_balance_step,
- cycles = \number\c_page_mix_balance_cycles,
- preheight = \number\d_page_mix_preceding_height,
- prebox = \number\b_page_mix_preceding,
- strutht = \number\strutht,
- strutdp = \number\strutdp,
- threshold = \number\d_page_mix_threshold,
- splitmethod = "\mixedcolumnsparameter\c!splitmethod",
- balance = "#1",
- alternative = "\mixedcolumnsparameter\c!alternative",
- grid = \ifgridsnapping true\else false\fi,
- }}%
+ {\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
+ \clf_mixsetsplit
+ box \b_page_mix_collected
+ nofcolumns \c_page_mix_n_of_columns
+ maxheight \d_page_mix_max_height
+ step \d_page_mix_balance_step
+ cycles \c_page_mix_balance_cycles
+ preheight \d_page_mix_preceding_height
+ prebox \b_page_mix_preceding
+ strutht \strutht
+ strutdp \strutdp
+ threshold \d_page_mix_threshold
+ splitmethod {\mixedcolumnsparameter\c!splitmethod}%
+ balance {#1}%
+ alternative {\mixedcolumnsparameter\c!alternative}%
+ internalgrid {\mixedcolumnsparameter\c!internalgrid}%
+ grid \ifgridsnapping true\else false\fi
+ \relax
\deadcycles\zerocount}
+\newdimen\mixedcolumnseparatorheight
+\newdimen\mixedcolumnseparatordepth
+\newdimen\mixedcolumnseparatorwidth
+
+\def\page_mix_routine_package_step
+ {% needs packaging anyway
+ \setbox\scratchbox\page_mix_command_package_column
+ \page_marks_synchronize_column\plusone\c_page_mix_n_of_columns\recurselevel\scratchbox
+ % backgrounds
+ \anch_mark_column_box\scratchbox
+ % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner
+ % moved to start: \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone
+ % the framed needs a reset of strut, align, setups etc
+ \mixedcolumnseparatorheight\ht\scratchbox
+ \mixedcolumnseparatordepth \dp\scratchbox
+ \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox
+ % optional
+ \ifnum\recurselevel<\c_page_mix_n_of_columns
+ \ifcsname\??mixedcolumnsseparator\p_separator\endcsname
+ \page_mix_command_inject_separator
+ \else
+ \hss
+ \fi
+ \fi}
+
\unexpanded\def\page_mix_routine_package
- {\ctxcommand{mixfinalize()}%
+ {\clf_mixfinalize
\setbox\b_page_mix_collected\vbox \bgroup
\ifvoid\b_page_mix_preceding \else
- \box\b_page_mix_preceding
+ \page_postprocessors_linenumbers_deepbox\b_page_mix_preceding
+ \vbox\bgroup
+ \box\b_page_mix_preceding
+ \egroup
\global\d_page_mix_preceding_height\zeropoint
\nointerlineskip
+ % no no:
+ % \prevdepth\strutdepth
\fi
\hskip\d_page_mix_leftskip
\page_mix_hbox to \d_page_mix_max_width \bgroup
- \dorecurse\c_page_mix_n_of_columns{%
- % needs packaging anyway
- \setbox\scratchbox\page_mix_command_package_column
- \page_marks_synchronize_column\plusone\c_page_mix_n_of_columns\recurselevel\scratchbox
- % for the moment a quick and dirty patch .. we need to go into the box (hence the \plusone) .. a slowdowner
- \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_mix_n_of_columns\plusone
- % the framed needs a reset of strut, align, setups etc
- \inheritedmixedcolumnsframedbox\currentmixedcolumns\scratchbox
- % optional
- \ifnum\recurselevel<\c_page_mix_n_of_columns
- \page_mix_command_inject_separator
- \fi
- }%
+ \edef\p_separator{\mixedcolumnsparameter\c!separator}%
+ \mixedcolumnseparatorwidth\d_page_mix_distance % \mixedcolumnsparameter\c!rulethickness\relax
+ \dorecurse\c_page_mix_n_of_columns\page_mix_routine_package_step
\egroup
\egroup}
\unexpanded\def\page_mix_command_package_column
{\page_mix_hbox to \d_page_mix_column_width \bgroup
% maybe intercept empty
- \ctxcommand{mixgetsplit(\recurselevel)}%
+ \clf_mixgetsplit\recurselevel\relax
\hskip-\d_page_mix_column_width
\page_mix_hbox to \d_page_mix_column_width \bgroup
\placenoteinserts
@@ -580,8 +653,8 @@
\page_mix_routine_construct\v!no
\page_mix_routine_package
\page_otr_construct_and_shipout\box\b_page_mix_collected
- \ctxcommand{mixflushrest()}%
- \ctxcommand{mixcleanup()}%
+ \clf_mixflushrest
+ \clf_mixcleanup
\egroup}
\unexpanded\def\page_mix_routine_balance
@@ -592,9 +665,9 @@
\doloop
{%writestatus\m!columns{construct continue (\the\htdp\b_page_mix_collected)}%
\page_mix_routine_construct\v!no
- \ifcase\ctxcommand{mixstate()}\relax
+ \ifcase\clf_mixstate\relax
% 0 = okay, we can balance
- \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushlist()}}% we could avoid this
+ \setbox\b_page_mix_collected\vbox{\clf_mixflushlist}% we could avoid this
%writestatus\m!columns{construct balance}%
\page_mix_routine_construct\v!yes
\page_mix_routine_package
@@ -604,21 +677,22 @@
\page_otr_command_set_hsize
\par
%writestatus\m!columns{flush balance}%
+ \page_grids_add_to_mix\b_page_mix_collected % no linenumbers here
\box\b_page_mix_collected
\vskip\zeropoint % triggers recalculation of page stuff (weird that this is needed but it *is* needed, see mixed-001.tex)
\par
\nointerlineskip
\prevdepth\strutdp
- \ctxcommand{mixflushrest()}% rubish
- \ctxcommand{mixcleanup()}% rubish
+ \clf_mixflushrest% rubish
+ \clf_mixcleanup % rubish
\exitloop
\or
% 1 = we have stuff left, so flush and rebalance
%writestatus\m!columns{flush continue}%
\page_mix_routine_package
\page_otr_construct_and_shipout\box\b_page_mix_collected
- \setbox\b_page_mix_collected\vbox{\ctxcommand{mixflushrest()}}% we could avoid this
- \ctxcommand{mixcleanup()}%
+ \setbox\b_page_mix_collected\vbox{\clf_mixflushrest}% we could avoid this
+ \clf_mixcleanup
\ifdim\ht\b_page_mix_collected=\zeropoint
\exitloop
\fi
@@ -686,11 +760,32 @@
\letvalue{\??mixedcolumnsbefore\s!box}\donothing
\letvalue{\??mixedcolumnsafter \s!box}\donothing
+% \setvalue{\??mixedcolumnsstart\s!box}%
+% {\edef\p_page_mix_strut{\mixedcolumnsparameter\c!strut}%
+% \setbox\b_page_mix_collected\vbox\bgroup
+% \let\currentoutputroutine\s!mixedcolumn % makes \column work
+% \forgetall
+% \page_mix_command_set_hsize
+% \ifx\p_page_mix_strut\v!yes
+% \begstrut
+% \ignorespaces
+% \fi}
+%
+% \setvalue{\??mixedcolumnsstop\s!box}%
+% {\ifx\p_page_mix_strut\v!yes
+% \removeunwantedspaces
+% \endstrut
+% \fi
+% \egroup
+% \page_mix_box_balance}
+
\setvalue{\??mixedcolumnsstart\s!box}%
{\edef\p_page_mix_strut{\mixedcolumnsparameter\c!strut}%
- \setbox\b_page_mix_collected\vbox\bgroup
+ \setbox\b_page_mix_collected\vbox \bgroup
\let\currentoutputroutine\s!mixedcolumn % makes \column work
\forgetall
+ \usegridparameter\mixedcolumnsparameter
+ % \useprofileparameter\mixedcolumnsparameter
\page_mix_command_set_hsize
\ifx\p_page_mix_strut\v!yes
\begstrut
@@ -703,6 +798,16 @@
\endstrut
\fi
\egroup
+ \edef\p_profile{\mixedcolumnsparameter\c!profile}%
+ \ifx\p_profile\empty \else
+ % this can never be ok because we cheat with depth and height
+ % and glue in between and when we're too large we run into issues
+ % so mayb best limit correction to one line
+ \profilegivenbox\p_profile\b_page_mix_collected
+ \setbox\b_page_mix_collected\vbox{\unvbox\b_page_mix_collected}%
+ % tracing
+ % \addprofiletobox\b_page_mix_collected
+ \fi
\page_mix_box_balance}
%D The related balancer is only a few lines:
@@ -713,8 +818,8 @@
\page_mix_routine_construct\v!yes
\page_mix_routine_package
\dontleavehmode\box\b_page_mix_collected
- \ctxcommand{mixflushrest()}%
- \ctxcommand{mixcleanup()}%
+ \clf_mixflushrest
+ \clf_mixcleanup
\egroup}
%D As usual, floats complicates matters and this is where experimental code
@@ -755,11 +860,11 @@
% \unexpanded\def\page_mix_command_flush_top_insertions
% {\page_one_command_flush_top_insertions}
-% \unexpanded\def\page_mix_place_float_top
-% {\showmessage\m!columns4\empty\page_one_place_float_here}
+\unexpanded\def\page_mix_place_float_top
+ {\showmessage\m!columns4\empty\page_one_place_float_here}
-% \unexpanded\def\page_mix_place_float_bottom
-% {\showmessage\m!columns5\empty\page_one_place_float_here}
+\unexpanded\def\page_mix_place_float_bottom
+ {\showmessage\m!columns5\empty\page_one_place_float_here}
\unexpanded\def\page_mix_place_float_here
{\page_one_place_float_here}
diff --git a/tex/context/base/page-mul.mkiv b/tex/context/base/page-mul.mkiv
index a874cd116..8db5a4773 100644
--- a/tex/context/base/page-mul.mkiv
+++ b/tex/context/base/page-mul.mkiv
@@ -960,7 +960,7 @@
\ifnum\c_page_mul_balance_tries>\c_page_mul_balance_tries_max\relax
\showmessage\m!columns7\empty
\else
- \showmessage\m!columns8{\the\c_page_mul_balance_tries\space}%
+ \showmessage\m!columns8{\the\c_page_mul_balance_tries}%
\fi
\egroup}
@@ -1430,7 +1430,7 @@
% \stopcolumns
% \def\backgroundfinishcolumnbox
- % {\doifinsetelse\@@kloffset{\v!none,\v!overlay}
+ % {\doifelseinset\@@kloffset{\v!none,\v!overlay}
% {\let\@@kloffset\!!zeropoint}
% {\scratchdimen\@@kloffset
% \advance\scratchdimen -\@@klrulethickness
@@ -1605,9 +1605,11 @@
\else
\balancecolumnsfalse
\fi
- \installalign\v!yes {\page_columns_align_option_yes }%
- \installalign\v!no {\page_columns_align_option_no }%
- \installalign\v!text{\page_columns_align_option_text}%
+ % % this won't work (blocked by check for overloading; too fuzzy anyway)
+ % \installalign\v!yes {\page_columns_align_option_yes }% \stretchcolumnstrue \inheritcolumnsfalse
+ % \installalign\v!no {\page_columns_align_option_no }% \stretchcolumnsfalse\inheritcolumnsfalse
+ % \installalign\v!text{\page_columns_align_option_text}% \stretchcolumnsfalse\inheritcolumnstrue
+ % %
\stretchcolumnsfalse
\inheritcolumnstrue
\edef\p_align{\columnsparameter\c!align}%
diff --git a/tex/context/base/page-one.mkiv b/tex/context/base/page-one.mkiv
index 3f9dcd7c6..4cf59da2d 100644
--- a/tex/context/base/page-one.mkiv
+++ b/tex/context/base/page-one.mkiv
@@ -454,10 +454,13 @@
\fi\fi
\fi}
+
\def\page_one_place_float_here_indeed
- {%\ifgridsnapping \else
- \baselinecorrection
- %\fi
+ {\ifgridsnapping
+ % otherwise real bad outcome
+ \else
+ \baselinecorrection % this has to be done better (and definitely not in column mode)
+ \fi
\doplacefloatbox
\page_floats_report_total
\dohandlenextfloatindent}
@@ -496,12 +499,12 @@
\def\page_one_place_float_bottom {\page_one_place_float_otherwise}
\def\page_one_place_float_otherwise
- {\doifinsetelse\v!here\floatlocationmethod
+ {\doifelseinset\v!here\floatlocationmethod
\page_one_place_float_otherwise_here
\page_one_place_float_otherwise_else}
\def\page_one_place_float_otherwise_here
- {\doifinsetelse\v!always\floatlocationmethod
+ {\doifelseinset\v!always\floatlocationmethod
{\page[\v!preference]%
\page_otr_command_check_if_float_fits
\ifconditional\c_page_floats_room
@@ -525,7 +528,7 @@
\fi}}
\def\page_one_place_float_otherwise_else
- {\doifinsetelse\v!always\floatlocationmethod
+ {\doifelseinset\v!always\floatlocationmethod
{\page_otr_command_check_if_float_fits
\ifconditional\c_page_floats_room
\page_one_place_float_auto_top_bottom
diff --git a/tex/context/base/page-pst.lua b/tex/context/base/page-pst.lua
index 50580ae33..472bdbabe 100644
--- a/tex/context/base/page-pst.lua
+++ b/tex/context/base/page-pst.lua
@@ -8,16 +8,18 @@ if not modules then modules = { } end modules ['page-pst'] = {
-- todo: adapt message
-local tonumber, next = tonumber, next
-local format, validstring = string.format, string.valid
-local sortedkeys = table.sortedkeys
+local tonumber, next, type = tonumber, next, type
+local find, validstring = string.find, string.valid
-local context = context
-local commands = commands
+local context = context
+local implement = interfaces.implement
local texgetcount = tex.getcount
local texsetcount = tex.setcount
+local sortedkeys = table.sortedkeys
+local formatters = string.formatters
+
local cache = { }
local function flush(page)
@@ -25,7 +27,7 @@ local function flush(page)
if c then
for i=1,#c do
-- characters.showstring(c[i])
- context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
+ context.viafile(c[i],formatters["page.%s"](validstring(page,"nopage")))
end
cache[page] = nil
end
@@ -43,11 +45,11 @@ local function setnextpage()
texsetcount("global","c_page_postponed_blocks_next_page",n)
end
-function commands.flushpostponedblocks(page)
+local function flushpostponedblocks(specification)
-- we need to flush previously pending pages as well and the zero
-- slot is the generic one so that one is always flushed
local t = sortedkeys(cache)
- local p = tonumber(page) or texgetcount("realpageno") or 0
+ local p = tonumber(specification.page) or texgetcount("realpageno") or 0
for i=1,#t do
local ti = t[i]
if ti <= p then
@@ -59,9 +61,19 @@ function commands.flushpostponedblocks(page)
setnextpage()
end
-function commands.registerpostponedblock(page)
+implement {
+ name = "flushpostponedblocks",
+ actions = flushpostponedblocks,
+ arguments = {
+ {
+ { "page" }
+ }
+ }
+}
+
+local function registerpostponedblock(page)
if type(page) == "string" then
- if string.find(page,"^+") then
+ if find(page,"^+") then
page = texgetcount("realpageno") + (tonumber(page) or 1) -- future delta page
else
page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion
@@ -80,7 +92,14 @@ function commands.registerpostponedblock(page)
if page == 0 then
interfaces.showmessage("layouts",3,#c)
else
- interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page))
+ interfaces.showmessage("layouts",3,formatters["%s (realpage: %s)"](#c,page))
end
setnextpage()
end
+
+implement {
+ name = "registerpostponedblock",
+ actions = registerpostponedblock,
+ arguments = "string"
+}
+
diff --git a/tex/context/base/page-pst.mkiv b/tex/context/base/page-pst.mkiv
index 704289246..f12663f66 100644
--- a/tex/context/base/page-pst.mkiv
+++ b/tex/context/base/page-pst.mkiv
@@ -57,10 +57,10 @@
\unexpanded\setvalue{\e!start\v!postponing}%
{\bgroup
\obeylines
- \doifnextoptionalelse{\egroup\page_postponed_blocks_start}{\egroup\page_postponed_blocks_start[0]}}
+ \doifelsenextoptional{\egroup\page_postponed_blocks_start}{\egroup\page_postponed_blocks_start[0]}}
\unexpanded\setvalue{\e!stop\v!postponing}%
- {\ctxcommand{registerpostponedblock("\currentpostponedpage")}\relax}
+ {\clf_registerpostponedblock{\currentpostponedpage}\relax}
\def\page_postponed_blocks_start[#1]%
{\edef\currentpostponedpage{#1}%
@@ -80,7 +80,8 @@
\setnormalcatcodes % postponing in verbatim
\uncatcodespacetokens % postponing in startlines
\restoreglobalbodyfont % otherwise problems inside split verbatim
- \ctxcommand{flushpostponedblocks()}%
+ \clf_flushpostponedblocks
+ % page {123}
\relax
\page_otr_command_flush_floats % new but potential dangerous, maybe we need a classification
\endgroup} % of blocks: with and without flush
diff --git a/tex/context/base/page-run.mkiv b/tex/context/base/page-run.mkiv
index dabf37252..9adcb23c7 100644
--- a/tex/context/base/page-run.mkiv
+++ b/tex/context/base/page-run.mkiv
@@ -74,18 +74,34 @@
\startluacode
local format, concat = string.format, table.concat
+local todimen = number.todimen
+local texdimen = tex.dimen
-local function todimen(name,unit,fmt)
- return number.todimen(tex.dimen[name],unit,fmt)
+local function asdimen(name,unit)
+ return todimen(texdimen[name],unit,"%0.4f") -- 4 is more than enough, even 3 would be okay
end
-function commands.showlayoutvariables(options)
-
- if options == "" then
+local function checkedoptions(options)
+ if type(options) == "table" then
+ return options
+ elseif not options or options == "" then
options = "pt,cm"
end
+ options = utilities.parsers.settings_to_hash(options)
+ local n = 4
+ for k, v in table.sortedhash(options) do
+ local m = tonumber(k)
+ if m then
+ n = m
+ end
+ end
+ options.n = n
+ return options
+end
+
+function commands.showlayoutvariables(options)
- local options = utilities.parsers.settings_to_hash(options)
+ options = checkedoptions(options)
local dimensions = { "pt", "bp", "cm", "mm", "dd", "cc", "pc", "nd", "nc", "sp", "in" }
@@ -108,7 +124,7 @@ function commands.showlayoutvariables(options)
for i=1,#dimensions do
local d = dimensions[i]
if options[d] then
- context("%s%s",todimen(name,d,"%0.4f"),d)
+ context("%s%s",asdimen(name,d),d)
context.NC()
end
end
@@ -129,7 +145,7 @@ function commands.showlayoutvariables(options)
for i=1,#dimensions do
local d = dimensions[i]
if options[d] then
- result[#result+1] = format("%12s%s",todimen(name,d,"%0.4f"),d)
+ result[#result+1] = format("%12s%s",asdimen(name,d),d)
end
end
commands.writestatus("layout",format("%-24s %s",interfaces.interfacedcommand(name),concat(result," ")))
@@ -215,6 +231,8 @@ end
function commands.showlayout(options)
+ options = checkedoptions(options)
+
if tex.count.textlevel == 0 then
commands.showlayoutvariables(options)
@@ -225,7 +243,7 @@ function commands.showlayout(options)
context.bgroup()
context.showframe()
context.setuplayout { marking = interfaces.variables.on }
- for i=1,4 do
+ for i=1,(options.n or 4) do
commands.showlayoutvariables(options)
context.page()
end
@@ -281,8 +299,8 @@ end
\unexpanded\gdef\showmargins
{\starttabulate
- \NC asynchrone \NC \doifoddpageelse {odd} {even} \NC \NR
- \NC synchrone \NC \doifrightpageelse {right} {left} \NC \NR
+ \NC asynchrone \NC \doifelseoddpage {odd} {even} \NC \NR
+ \NC synchrone \NC \doifelserightpage {right} {left} \NC \NR
\NC right margin \NC \the\rightmarginwidth \NC \NR
\NC left margin \NC \the\leftmarginwidth \NC \NR
\NC outer margin \NC \the\outermarginwidth \NC \NR
diff --git a/tex/context/base/page-sel.mkvi b/tex/context/base/page-sel.mkvi
index cb9bcb509..ee25a37db 100644
--- a/tex/context/base/page-sel.mkvi
+++ b/tex/context/base/page-sel.mkvi
@@ -56,7 +56,7 @@
{\dotripleempty\page_selectors_insert}
\def\page_selectors_insert[#filename][#emptylist][#settings]%
- {\doifassignmentelse{#emptylist}
+ {\doifelseassignment{#emptylist}
{\page_selectors_insert_indeed[#filename][][#emptylist]}
{\page_selectors_insert_indeed[#filename][#emptylist][#settings]}}
diff --git a/tex/context/base/page-set.mkiv b/tex/context/base/page-set.mkiv
index 9c232f535..a5afb92e9 100644
--- a/tex/context/base/page-set.mkiv
+++ b/tex/context/base/page-set.mkiv
@@ -387,7 +387,7 @@
{\advance\scratchcounter\plusone}}%
\popmacro\columnmaxcells}
-\long\def\OTRSETrecurseRL#1%
+\def\OTRSETrecurseRL#1%
{\dostepwiserecurse\nofcolumns\plusone\minusone
{#1\hskip\namedcolumnsetparameter{\currentcolumnset:\recurselevel}\c!distance}}
@@ -2184,11 +2184,11 @@
\def\dodefinecolumntextarea[#1][#2][#3]% y=0 is mogelijke en handig !
{\ifthirdargument
- \doifinsetelse{#2}{\v!both,\v!fixed}
+ \doifelseinset{#2}{\v!both,\v!fixed}
{\definecolumntextarea[#1][\v!left ][\c!type=#2,#3]%
\definecolumntextarea[#1][\v!right][\c!type=#2,#3]}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\definecolumntextarea[#1][\v!right][\c!type=#2,#3]}
{\definecolumntextarea[#1][\v!left ][\c!type=#2,#3]}}
{\presetlocalframed
@@ -2214,7 +2214,7 @@
{\setupcolumntextarea[#1][\v!left ][#3]%
\setupcolumntextarea[#1][\v!right][#3]}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\setupcolumntextarea[#1][\v!right][#3]}
{\setupcolumntextarea[#1][\v!left][#3]}}
{\getparameters[\??mt#1#2][#3]}}%
@@ -2379,13 +2379,13 @@
\unexpanded\def\setupcolumntextareatext
{\dotripleempty\dosetupcolumntextareatext}
-\long\def\dosetupcolumntextareatext[#1][#2][#3]%
+\def\dosetupcolumntextareatext[#1][#2][#3]%
{\ifthirdargument
\doifelse{#2}\v!both
{\setvalue{\??mt#1\v!left }{#3}%
\setvalue{\??mt#1\v!right}{#3}}
{\doifelse{#2}\v!next
- {\doifoddpageelse
+ {\doifelseoddpage
{\setvalue{\??mt#1\v!right}{#3}}%
{\setvalue{\??mt#1\v!left }{#3}}}%
{\setvalue{\??mt#1#2}{#3}}}%
@@ -2645,8 +2645,6 @@
% \chapter{thuan} \dorecurse{25}{\input thuan \endgraf\placefigure{}{}}
% \stopcolumnset
-\unprotect
-
% only in columnsets
% \def\cornerfigure
diff --git a/tex/context/base/page-sid.mkiv b/tex/context/base/page-sid.mkiv
index f7a2357bf..cbee4da20 100644
--- a/tex/context/base/page-sid.mkiv
+++ b/tex/context/base/page-sid.mkiv
@@ -15,21 +15,18 @@
\unprotect
-% These macro deal with side floats. We started with Daniel
-% Comenetz macros as published in TUGBoat Volume 14 (1993),
-% No.\ 1: Anchored Figures at Either Margin. I extended and
-% patched the macros to suite our needs which results in a
-% messy module. Therefore, this module badly needs an update
-% because it's now a mixture of old and new macros.
+% These macro deal with side floats. We started with Daniel Comenetz macros as published
+% in TUGBoat Volume 14 (1993), No.\ 1: Anchored Figures at Either Margin. I extended and
+% patched the macros to suite our needs which results in a messy module. Therefore, this
+% module badly needs an update because it's now a mixture of old and new macros.
% Interesting cases where it goes wrong:
%
% \placefigure[left]{}{} \dorecurse{3}{\input ward } {\par} \input ward
%
-% Here we get an unwanted carried over hangindent and parindent. A
-% solution is to associate it with the local par node instead. This
-% is something to discuss with Taco as it could be a new luatex
-% feature: explicitly set par properties.
+% Here we get an unwanted carried over hangindent and parindent. A solution is to associate
+% it with the local par node instead. This is something to discuss with Taco as it could be
+% a new luatex/mkiv feature: explicitly set par properties.
% Maybe I should just rewrite the lot.
@@ -202,17 +199,72 @@
\global\setfalse\c_page_sides_short
\global\setfalse\c_page_sides_flag}
+\unexpanded\def\doifelsesidefloat
+ {\par
+ \ifnum\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\let\doifsidefloatelse\doifelsesidefloat
+
+% \def\page_sides_flush_floats_indeed
+% {\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
+% \begingroup
+% \let\page_sides_flush_floats\relax
+% \forgetall
+% \doloop
+% {\strut
+% \iftracesidefloats
+% \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
+% \fi
+% \par
+% % \ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
+% \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
+% \ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
+% \exitloop
+% \fi
+% \else
+% \exitloop
+% \fi}%
+% \endgroup
+% \ifdim\parskip>\zeropoint % why this test ?
+% \ifdim\d_page_sides_bottomskip>\parskip
+% % \nowhitespace
+% % \vskip\d_page_sides_bottomskip
+% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
+% \fi
+% \else
+% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
+% \fi}
+
+\installcorenamespace{sidefloatsteps}
+
+\setvalue{\??sidefloatsteps\v!line }{\strut}
+\setvalue{\??sidefloatsteps\v!big }{\strut}
+\setvalue{\??sidefloatsteps\v!medium}{\halfstrut}
+\setvalue{\??sidefloatsteps\v!small }{\quarterstrut}
+\setvalue{\??sidefloatsteps\v!depth }{\depthstrut}
+
+% we don't officially know what kind of float we flush
+
\def\page_sides_flush_floats_indeed
{\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
\begingroup
\let\page_sides_flush_floats\relax
+ \edef\m_pages_strut{\executeifdefined{\??sidefloatsteps\rootfloatparameter\c!step}\strut}%
\forgetall
\doloop
- {\strut
- \iftracesidefloats
- \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
+ {\iftracesidefloats
+ \dontleavehmode
+ \ruledhbox{\m_pages_strut\kern\d_page_sides_width}%
+ \else
+ \m_pages_strut
\fi
\par
+ \nointerlineskip
+ % \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
\ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
\ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
\exitloop
@@ -223,11 +275,28 @@
\endgroup
\ifdim\parskip>\zeropoint % why this test ?
\ifdim\d_page_sides_bottomskip>\parskip
- \nowhitespace
- \vskip\d_page_sides_bottomskip
+ % \nowhitespace
+ % \vskip\d_page_sides_bottomskip
+% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
+ \blank[\v!nowhite,\rootfloatparameter\c!sidespaceafter]
\fi
+ \else
+% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
+ \blank[\rootfloatparameter\c!sidespaceafter]% new, so needs checking
\fi}
+% alternative method (unsnapped)
+%
+% \def\page_sides_flush_floats_indeed
+% {\scratchdimen\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax
+% \ifdim\parskip>\zeropoint % why this test ?
+% \ifdim\scratchdimen>\parskip
+% \blank[\v!nowhite,\the\scratchdimen] % better in stages
+% \fi
+% \else
+% \blank[\the\scratchdimen]
+% \fi}
+
\def\page_sides_check_floats_after_par
{\page_sides_check_floats_indeed
\ifdim\oldpagetotal=\pagetotal \else
@@ -294,10 +363,10 @@
\page_otr_sides_pop_penalties}
\def\page_sides_output_routine_yes % we need to rework this ... add pagediscards and such
- {\unvbox\normalpagebox
+ {\unvbox\normalpagebox % bah, and the discards?
\setbox\b_page_sides_bottom\lastbox
\ifdim\wd\b_page_sides_bottom>\d_page_sides_hsize
- \penalty-201
+ \penalty-201 % hm, i really need to write this from scatch
\box\b_page_sides_bottom
\else\ifvoid\b_page_sides_bottom
\else
@@ -592,14 +661,39 @@
\global\settrue\c_page_floats_room
\fi}
+% \def\page_sides_prepare_space
+% {\par
+% % no longer needed \whitespace
+% \begingroup
+% \forgetall
+% \reseteverypar
+% \verticalstrut
+% \vskip-\struttotal
+% \endgroup}
+
+\installtextracker
+ {sidefloats.anchor}
+ {\let\page_sides_anchor\page_sides_anchor_yes}
+ {\let\page_sides_anchor\page_sides_anchor_nop}
+
+\def\page_sides_anchor_yes
+ {\darkred
+ \hskip-5\emwidth
+ \vrule\s!height.05\exheight\s!depth.05\exheight\s!width10\emwidth}
+
+\def\page_sides_anchor_nop
+ {\strut}
+
+\let\page_sides_anchor\page_sides_anchor_nop
+
\def\page_sides_prepare_space
{\par
- \whitespace
\begingroup
- \forgetall
\reseteverypar
- \verticalstrut
+ \dontleavehmode\hbox to \zeropoint{\page_sides_anchor\hss\strut}%
+ \vskip-\parskip
\vskip-\struttotal
+ \inhibitblank
\endgroup}
\def\page_sides_handle_float#1% grid (4) is rather experimental
@@ -702,13 +796,15 @@
\fi}
\def\page_sides_inject_dummy_lines
- {\scratchcounter\pageshrink
+ {\begingroup
+ \scratchcounter\pageshrink
\divide\scratchcounter \baselineskip
\advance\scratchcounter \plusone
\parskip\zeropoint
\dorecurse\scratchcounter{\hbox to \hsize{}}%
\kern-\scratchcounter\baselineskip
- \penalty\zerocount}
+ \penalty\zerocount
+ \endgroup}
% Display math
%
@@ -764,7 +860,7 @@
\def\checksidefloat {\page_sides_check_floats}
\def\flushsidefloats {\page_sides_flush_floats}
\def\flushsidefloatsafterpar{\page_sides_flush_floats_after_par}
-%def\forgetsidefloats {\page_sides_forget_floats}
+\def\forgetsidefloats {\page_sides_forget_floats}
%def\synchronizesidefloats {\page_sides_synchronize_floats}
\protect \endinput
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index 35ce85609..56c6167aa 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -17,10 +17,12 @@ local nodes, node = nodes, node
local nodepool = nodes.pool
local tasks = nodes.tasks
+local implement = interfaces.implement
+
local new_kern = nodepool.kern
local new_glyph = nodepool.glyph
-local find_tail = node.slide
+local slide_nodelist = node.slide
local write_node = node.write
local free_node = node.free
local copy_nodelist = node.copy_list
@@ -73,7 +75,7 @@ function streams.collect(head,where)
end
local last = dana[#dana]
if last then
- local tail = find_tail(last)
+ local tail = slide_nodelist(last)
tail.next, head.prev = head, tail
elseif last == false then
dana[#dana] = head
@@ -202,7 +204,7 @@ function streams.synchronize(list) -- this is an experiment !
else
-- this is not yet ok as we also need to keep an eye on vertical spacing
-- so we might need to do some splitting or whatever
- local tail = vbox.list and find_tail(vbox.list)
+ local tail = vbox.list and slide_nodelist(vbox.list)
local n, delta = 0, delta_height -- for tracing
while delta > 0 do
-- we need to add some interline penalties
@@ -235,6 +237,60 @@ tasks.disableaction("mvlbuilders", "streams.collect")
function streams.initialize()
tasks.enableaction ("mvlbuilders", "streams.collect")
+ function streams.initialize() end
end
-- todo: remove empty last { }'s
+-- todo: better names, enable etc
+
+implement {
+ name = "initializestream",
+ actions = streams.initialize,
+ onlyonce = true,
+}
+
+implement {
+ name = "enablestream",
+ actions = streams.enable,
+ arguments = "string"
+}
+
+implement {
+ name = "disablestream",
+ actions = streams.disable
+}
+
+implement {
+ name = "startstream",
+ actions = streams.start,
+ arguments = "string"
+}
+
+implement {
+ name = "stopstream",
+ actions = streams.stop
+}
+
+implement {
+ name = "flushstream",
+ actions = streams.flush,
+ arguments = "string"
+}
+
+implement {
+ name = "flushstreamcopy",
+ actions = streams.flush,
+ arguments = { "string", true }
+}
+
+implement {
+ name = "synchronizestream",
+ actions = streams.synchronize,
+ arguments = "string"
+}
+
+implement {
+ name = "pushstream",
+ actions = streams.push,
+ arguments = "string"
+}
diff --git a/tex/context/base/page-str.mkiv b/tex/context/base/page-str.mkiv
index 200a71377..e4b2fa229 100644
--- a/tex/context/base/page-str.mkiv
+++ b/tex/context/base/page-str.mkiv
@@ -29,14 +29,12 @@
%D
%D Remark: marknotes are gone, at least for a while.
-\writestatus{loading}{ConTeXt Page Macros / Page Streams}
-
\registerctxluafile{page-str}{1.001}
\unprotect
\let \currentoutputstream \empty
-\newif \ifinoutputstream % will becoem a conditional or mode
+\newif \ifinoutputstream % will become a conditional or mode
\newtoks \everyenableoutputstream
\appendtoks
@@ -44,7 +42,7 @@
\to \everyenableoutputstream
\unexpanded\def\initializeoutputstreams
- {\ctxlua{streams.initialize()}%
+ {\clf_initializestream
\glet\initializeoutputstreams\relax}
\unexpanded\def\enableoutputstream[#1]% could be \startoutputsubstream
@@ -52,12 +50,12 @@
\the\everyenableoutputstream
\inoutputstreamtrue
\xdef\currentoutputstream{#1}%
- \ctxlua{streams.enable("#1")}}
+ \clf_enablestream{#1}}
\unexpanded\def\disableoutputstream
{\inoutputstreamfalse
\global\let\currentoutputstream\s!default
- \ctxlua{streams.disable()}}
+ \clf_disablestream}
\unexpanded\def\startoutputstream[#1]%
{\begingroup
@@ -65,10 +63,10 @@
\the\everyenableoutputstream
\inoutputstreamtrue
\xdef\currentoutputstream{#1}%
- \ctxlua{streams.start("#1")}}
+ \clf_startstream{#1}}
\unexpanded\def\stopoutputstream
- {\ctxlua{streams.stop()}%
+ {\clf_stopstream
\endgroup}
\unexpanded\def\startoutputsubstream[#1]% just push/pop instead
@@ -79,13 +77,13 @@
{\globalpopmacro\currentoutputstream
\enableoutputstream[\currentoutputstream]}
-\def\flushoutputstream [#1]{\ctxlua{streams.flush("#1")}}
-\def\outputstreamcopy [#1]{\vbox{\ctxlua{streams.flush("#1",true)}}}
-\def\outputstreambox [#1]{\vbox{\ctxlua{streams.flush("#1")}}}
-\def\outputstreamunvcopy[#1]{\ctxlua{streams.flush("#1",true)}}
-\def\outputstreamunvbox [#1]{\ctxlua{streams.flush("#1")}}
-\def\synchronizestreams [#1]{\ctxlua{streams.synchronize("#1")}}
-\def\dopushoutputstream [#1]{\ctxlua{streams.push("#1")}}
+\def\flushoutputstream [#1]{\clf_flushstream{#1}}
+\def\outputstreambox [#1]{\vbox{\clf_flushstream{#1}}}
+\def\outputstreamcopy [#1]{\vbox{\clf_flushstreamcopy{#1}}}
+\def\outputstreamunvbox [#1]{\clf_flushstream{#1}}
+\def\outputstreamunvcopy[#1]{\clf_flushstreamcopy{#1}}
+\def\synchronizestreams [#1]{\clf_synchronizestream{#1}}
+\def\dopushoutputstream [#1]{\clf_pushstream{#1}}
\unexpanded\def\pushoutputstream
{\dosingleempty\dopushoutputstream}
diff --git a/tex/context/base/page-txt.mkvi b/tex/context/base/page-txt.mkvi
index 240f0e00b..76143a018 100644
--- a/tex/context/base/page-txt.mkvi
+++ b/tex/context/base/page-txt.mkvi
@@ -142,11 +142,11 @@
%D \showsetup{noheaderandfooterlines}
%D \showsetup{notopandbottomlines}
-\def\noheaderandfooterlines
+\unexpanded\def\noheaderandfooterlines
{\setuplayoutelement[\v!header][\c!state=\v!empty]%
\setuplayoutelement[\v!footer][\c!state=\v!empty]}
-\def\notopandbottomlines
+\unexpanded\def\notopandbottomlines
{\setuplayoutelement[\v!top ][\c!state=\v!empty]%
\setuplayoutelement[\v!bottom][\c!state=\v!empty]}
@@ -253,7 +253,7 @@
%D only when double sided typesetting is enabled.
\unexpanded\def\page_layouts_process_element_double
- {\doifoddpageelse
+ {\doifelseoddpage
\page_layouts_process_element_double_odd
\page_layouts_process_element_double_even}
@@ -378,6 +378,9 @@
\doubleexpandafter\firstoftwoarguments
\fi\fi}
+\let\doiflayouttextlineelse\doifelselayouttextline
+\let\doiflayoutsomelineelse\doifelselayoutsomeline
+
\newconditional\resyncaftertextline
\setvalue{\??layouttextsline\v!normal}{\page_layouts_place_text_line_indeed}
@@ -418,7 +421,7 @@
%D The following macro has to be called after a page
%D is flushed.
-\def\resetlayouttextlines % public
+\unexpanded\def\resetlayouttextlines % public
{\csname\??layouttextsreset\v!top \endcsname
\csname\??layouttextsreset\v!header\endcsname
\csname\??layouttextsreset\v!text \endcsname
@@ -435,17 +438,17 @@
% \settext[header][text][middle][xxx][yyy]
-\def\settextcontent
+\unexpanded\def\settextcontent
{\doquintupleempty\page_layouts_set_text_content}
\def\page_layouts_set_text_content[#vertical][#horizontal][#one][#two][#three]% header text middle text/text
{\iffifthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#three}}%
\else\iffourthargument
- \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\c!text:#one}\c!middletext}%
+ \setvalue{\namedlayoutelementhash{#vertical:#horizontal}\executeifdefined{\??layouttextcontent\v!text:#one}\c!middletext}%
{\page_layouts_process_element_double
\c!leftstyle \c!leftcolor \c!leftwidth {#two}%
\c!rightstyle\c!rightcolor\c!rightwidth{#two}}%
@@ -456,22 +459,22 @@
\c!rightstyle\c!rightcolor\c!rightwidth{#one}}%
\fi\fi\fi}
-\def\resettextcontent
+\unexpanded\def\resettextcontent
{\dotripleempty\page_layouts_reset_text_content}
\def\page_layouts_reset_text_content[#vertical][#horizontal][#tag]% header text middle
{\edef\currentlayoutelement{#vertical:#horizontal}%
\ifthirdargument
- \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\c!text:#tag}\c!middletext}%
+ \letvalueempty{\layoutelementhash\executeifdefined{\??layouttextcontent\v!text:#tag}\c!middletext}%
\else\ifsecondargument
\resetlayoutelementparameter\c!lefttext
\resetlayoutelementparameter\c!middletext
\resetlayoutelementparameter\c!righttext
\fi\fi}
-\letvalue{\??layouttextcontent\c!middle:\c!text}\c!middletext
-\letvalue{\??layouttextcontent\c!left :\c!text}\c!lefttext
-\letvalue{\??layouttextcontent\c!right :\c!text}\c!righttext
+\letvalue{\??layouttextcontent\c!middle:\v!text}\c!middletext
+\letvalue{\??layouttextcontent\c!left :\v!text}\c!lefttext
+\letvalue{\??layouttextcontent\c!right :\v!text}\c!righttext
%D The placement of a whole line is handled by the next two
%D macros. These are hooked into the general purpose token
@@ -756,10 +759,12 @@
\page_layouts_reset_page_number_location
\ifx\p_strc_pagenumbers_location\empty
% set otherwise
+ \else\ifx\p_strc_pagenumbers_location\v!none
+ % set otherwise
\else
\page_layouts_identify_page_number_location
\page_layouts_set_page_number_location
- \fi
+ \fi\fi
\fi}
\def\page_layouts_place_page_number_left % historic
diff --git a/tex/context/base/pdfr-def.mkii b/tex/context/base/pdfr-def.mkii
index 7554bda9e..b3f67b93f 100644
--- a/tex/context/base/pdfr-def.mkii
+++ b/tex/context/base/pdfr-def.mkii
@@ -1,4 +1,4 @@
-% filename : pdfr-def.tex
+% filename : pdfr-def.mkii
% comment : generated by mtxrun --script chars --pdf
% author : Hans Hagen, PRAGMA-ADE, Hasselt NL
% copyright: PRAGMA ADE / ConTeXt Development Team
diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua
index e40d1eabb..7430b62d7 100644
--- a/tex/context/base/phys-dim.lua
+++ b/tex/context/base/phys-dim.lua
@@ -39,6 +39,7 @@ if not modules then modules = { } end modules ['phys-dim'] = {
-- RevPerSec = [[RPS]],
-- RevPerMin = [[RPM]],
+local rawset, next = rawset, next
local V, P, S, R, C, Cc, Cs, matchlpeg = lpeg.V, lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.match
local format, lower = string.format, string.lower
local appendlpeg = lpeg.append
@@ -49,12 +50,11 @@ local utfchar = utf.char
physics = physics or { }
physics.units = physics.units or { }
-local variables = interfaces.variables
-local v_reverse = variables.reverse
local allocate = utilities.storage.allocate
local context = context
local commands = commands
+local implement = interfaces.implement
local trace_units = false
local report_units = logs.reporter("units")
@@ -172,8 +172,8 @@ local p_c = (ddigitspace^1 * dskipperiod)^0 -- ___.
local p_c_dparser = math_one + math_two + dleader * p_c * dtrailer * dfinal
local c_p_dparser = math_one + math_two + dleader * c_p * dtrailer * dfinal
-function commands.digits(str,p_c)
- if p_c == v_reverse then
+local function makedigits(str,reverse)
+ if reverse then
matchlpeg(p_c_dparser,str)
else
matchlpeg(c_p_dparser,str)
@@ -286,18 +286,29 @@ local long_units = {
-- synonyms
- ["Metric Ton"] = "tonne",
+ MetricTon = "tonne",
Litre = "liter",
+ ["Metric Ton"] = "tonne",
+
-- non-SI units whose values must be obtained experimentally (Table 7)
- ["Electron Volt"] = "electronvolt",
+ AtomicMassUnit = "atomicmassunit",
+ AstronomicalUnit = "astronomicalunit",
+ ElectronVolt = "electronvolt",
Dalton = "dalton",
+
["Atomic Mass Unit"] = "atomicmassunit",
["Astronomical Unit"] = "astronomicalunit",
+ ["Electron Volt"] = "electronvolt",
-- special cases (catch doubles, okay, a bit over the top)
+ DegreesCelsius = "celsius",
+ DegreesFahrenheit = "fahrenheit",
+ DegreeCelsius = "celsius",
+ DegreeFahrenheit = "fahrenheit",
+
["Degrees Celsius"] = "celsius",
["Degrees Fahrenheit"] = "fahrenheit",
["Degree Celsius"] = "celsius",
@@ -322,12 +333,14 @@ local long_units = {
Hg = "mercury",
-- ["Millimetre Of Mercury"] = [[mmHg]],
Angstrom = "angstrom", -- strictly Ångström
- ["Nautical Mile"] = "nauticalmile",
+ NauticalMile = "nauticalmile",
Barn = "barn",
Knot = "knot",
Neper = "neper",
Bel = "bel", -- in practice only decibel used
+ ["Nautical Mile"] = "nauticalmile",
+
-- other non-SI units from CGS system (Table 9)
Erg = "erg",
@@ -506,20 +519,20 @@ local packaged_units = {
-- rendering:
-local unitsPUS = context.unitsPUS
-local unitsPU = context.unitsPU
-local unitsPS = context.unitsPS
-local unitsP = context.unitsP
-local unitsUS = context.unitsUS
-local unitsU = context.unitsU
-local unitsS = context.unitsS
-local unitsO = context.unitsO
-local unitsN = context.unitsN
-local unitsC = context.unitsC
-local unitsQ = context.unitsQ
-local unitsNstart = context.unitsNstart
-local unitsNstop = context.unitsNstop
-local unitsNspace = context.unitsNspace
+local ctx_unitsPUS = context.unitsPUS
+local ctx_unitsPU = context.unitsPU
+local ctx_unitsPS = context.unitsPS
+local ctx_unitsP = context.unitsP
+local ctx_unitsUS = context.unitsUS
+local ctx_unitsU = context.unitsU
+local ctx_unitsS = context.unitsS
+local ctx_unitsO = context.unitsO
+local ctx_unitsN = context.unitsN
+local ctx_unitsC = context.unitsC
+local ctx_unitsQ = context.unitsQ
+local ctx_unitsNstart = context.unitsNstart
+local ctx_unitsNstop = context.unitsNstop
+local ctx_unitsNspace = context.unitsNspace
local labels = languages.data.labels
@@ -600,7 +613,7 @@ labels.units = allocate {
electronvolt = { labels = { en = [[eV]] } },
dalton = { labels = { en = [[Da]] } },
atomicmassunit = { labels = { en = [[u]] } },
- astronomicalunit = { labels = { en = [[ua]] } },
+ astronomicalunit = { labels = { en = [[au]] } },
bar = { labels = { en = [[bar]] } },
angstrom = { labels = { en = [[Å]] } }, -- strictly Ångström
nauticalmile = { labels = { en = [[M]] } },
@@ -664,28 +677,28 @@ local function dimpus(p,u,s)
if p ~= "" then
if u ~= "" then
if s ~= "" then
- unitsPUS(p,u,s)
+ ctx_unitsPUS(p,u,s)
else
- unitsPU(p,u)
+ ctx_unitsPU(p,u)
end
elseif s ~= "" then
- unitsPS(p,s)
+ ctx_unitsPS(p,s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
else
if u ~= "" then
if s ~= "" then
- unitsUS(u,s)
+ ctx_unitsUS(u,s)
-- elseif c then
- -- unitsC(u)
+ -- ctx_unitsC(u)
else
- unitsU(u)
+ ctx_unitsU(u)
end
elseif s ~= "" then
- unitsS(s)
+ ctx_unitsS(s)
else
- unitsP(p)
+ ctx_unitsP(p)
end
end
end
@@ -699,7 +712,7 @@ local function dimop(o)
report_units("operator %a",o)
end
if o then
- unitsO(o)
+ ctx_unitsO(o)
end
end
@@ -709,7 +722,7 @@ local function dimsym(s)
end
s = symbol_units[s] or s
if s then
- unitsC(s)
+ ctx_unitsC(s)
end
end
@@ -719,7 +732,7 @@ local function dimpre(p)
end
p = packaged_units[p] or p
if p then
- unitsU(p)
+ ctx_unitsU(p)
end
end
@@ -789,7 +802,7 @@ local function update_parsers() -- todo: don't remap utf sequences
* (V("packaged") / dimpre)
* V("somespace"),
-- someunknown = V("somespace")
- -- * (V("nospace")/unitsU)
+ -- * (V("nospace")/ctx_unitsU)
-- * V("somespace"),
--
combination = V("longprefix") * V("longunit") -- centi meter
@@ -798,22 +811,32 @@ local function update_parsers() -- todo: don't remap utf sequences
+ V("nothing") * V("shortunit")
+ V("longprefix") * V("shortunit") -- centi m
+ V("shortprefix") * V("longunit"), -- c meter
+
+-- combination = ( V("longprefix") -- centi meter
+-- + V("nothing")
+-- ) * V("longunit")
+-- + ( V("shortprefix") -- c m
+-- + V("nothing")
+-- + V("longprefix")
+-- ) * V("shortunit") -- centi m
+-- + ( V("shortprefix") -- c meter
+-- ) * V("longunit"),
+
+
dimension = V("somespace")
* (
V("packaged") / dimpre
+ (V("longsuffix") * V("combination")) / dimspu
+ (V("combination") * (V("shortsuffix") + V("nothing"))) / dimpus
)
- * (V("qualifier") / unitsQ)^-1
+ * (V("qualifier") / ctx_unitsQ)^-1
* V("somespace"),
operator = V("somespace")
* ((V("longoperator") + V("shortoperator")) / dimop)
* V("somespace"),
snippet = V("dimension")
+ V("somesymbol"),
- unit = (
- V("snippet")
- * (V("operator") * V("snippet"))^0
+ unit = ( V("snippet") * (V("operator") * V("snippet"))^0
+ V("somepackaged")
)^1,
}
@@ -824,13 +847,13 @@ local function update_parsers() -- todo: don't remap utf sequences
local number = Cs( P("$") * (1-P("$"))^1 * P("$")
+ P([[\m{]]) * (1-P("}"))^1 * P("}")
+ (1-R("az","AZ")-P(" "))^1 -- todo: catch { } -- not ok
- ) / unitsN
+ ) / ctx_unitsN
- local start = Cc(nil) / unitsNstart
- local stop = Cc(nil) / unitsNstop
- local space = Cc(nil) / unitsNspace
+ local start = Cc(nil) / ctx_unitsNstart
+ local stop = Cc(nil) / ctx_unitsNstop
+ local space = Cc(nil) / ctx_unitsNspace
- -- todo: avoid \unitsNstart\unitsNstop (weird that it can happen .. now catched at tex end)
+ -- todo: avoid \ctx_unitsNstart\ctx_unitsNstop (weird that it can happen .. now catched at tex end)
local p_c_combinedparser = P { "start",
number = start * dleader * (p_c_dparser + number) * stop,
@@ -853,7 +876,7 @@ local p_c_parser = nil
local c_p_parser = nil
local dirty = true
-function commands.unit(str,p_c)
+local function makeunit(str,reverse)
if dirty then
if trace_units then
report_units("initializing parser")
@@ -862,7 +885,7 @@ function commands.unit(str,p_c)
dirty = false
end
local ok
- if p_c == v_reverse then
+ if reverse then
ok = matchlpeg(p_c_parser,str)
else
ok = matchlpeg(c_p_parser,str)
@@ -908,7 +931,7 @@ local mapping = {
packaged = "packaged",
}
-function commands.registerunit(category,list)
+local function registerunit(category,list)
if not list or list == "" then
list = category
category = "unit"
@@ -921,3 +944,11 @@ function commands.registerunit(category,list)
end
-- inspect(tables)
end
+
+physics.units.registerunit = registerunit
+
+implement { name = "digits_normal", actions = makedigits, arguments = "string" }
+implement { name = "digits_reverse", actions = makedigits, arguments = { "string", true } }
+implement { name = "unit_normal", actions = makeunit, arguments = "string"}
+implement { name = "unit_reverse", actions = makeunit, arguments = { "string", true } }
+implement { name = "registerunit", actions = registerunit, arguments = { "string", "string" } }
diff --git a/tex/context/base/phys-dim.mkiv b/tex/context/base/phys-dim.mkiv
index 3de6b2344..232edc2fc 100644
--- a/tex/context/base/phys-dim.mkiv
+++ b/tex/context/base/phys-dim.mkiv
@@ -246,12 +246,12 @@
\unexpanded\def\phys_digits_indeed#1%
{\dontleavehmode
\begingroup
- \ctxcommand{digits(\!!bs\detokenize{#1}\!!es,"\ifcase\c_phys_digits_order \v!normal\else\v!reverse\fi")}%
+ \ifcase\c_phys_digits_order\expandafter\clf_digits_normal\else\expandafter\clf_digits_reverse\fi{\detokenize{#1}}%
\endgroup
\settrue\c_phys_units_dospace}
\unexpanded\def\digits
- {\doifnextbgroupelse\phys_digits_argument\phys_digits_spaced}
+ {\doifelsenextbgroup\phys_digits_argument\phys_digits_spaced}
\def\phys_digits_argument#1%
{\phys_digits_indeed{#1}}
@@ -500,7 +500,8 @@
\to \everyunits
\unexpanded\def\phys_units_indeed#1%
- {\ctxcommand{unit(\!!bs\detokenize{#1}\!!es,"\unitparameter\c!order")}}
+ {\edef\p_order{\unitparameter\c!order}%
+ \ifx\p_order\v!reverse\expandafter\clf_unit_reverse\else\expandafter\clf_unit_normal\fi{\detokenize{#1}}}
\unexpanded\def\unitsPUS#1#2#3{\phys_units_next\prefixtext{#1}\unittext{#2}\unitsraise{\suffixtext{#3}}\c_phys_units_state\plusone} % suffix
\unexpanded\def\unitsPU #1#2{\phys_units_next\prefixtext{#1}\unittext{#2}\c_phys_units_state\plusthree} % unit
@@ -578,7 +579,7 @@
\let\unitsNstartindeed\unitsNstart
\unexpanded\def\unitsNstart
- {\doifnextcharelse\unitsNstop\gobbleoneargument\unitsNstartindeed}
+ {\doifelsenextchar\unitsNstop\gobbleoneargument\unitsNstartindeed}
% End of hack.
@@ -682,10 +683,10 @@
\definelabelclass [prefix] [2]
\definelabelclass [suffix] [2] % This is only a label because we want to show them in a table.
-\ctxcommand{definelabels("prefix", "prefixes" )}
-\ctxcommand{definelabels("unit", "units" )}
-\ctxcommand{definelabels("operator","operators")}
-\ctxcommand{definelabels("suffix", "suffixes" )}
+\clf_definelabels{prefix}{prefixes}\s!false\relax
+\clf_definelabels{unit}{units}\s!false\relax
+\clf_definelabels{operator}{operators}\s!false\relax
+\clf_definelabels{suffix}{suffixes}\s!false\relax
%D You can define additional units:
%D
@@ -716,7 +717,7 @@
{\dodoubleempty\phys_units_register}
\def\phys_units_register[#1][#2]%
- {\ctxcommand{registerunit(\!!bs#1\!!es,\!!bs#2\!!es)}}
+ {\clf_registerunit{#1}{#2}}
%D You can generate a list as follows:
%D
diff --git a/tex/context/base/ppchtex.mkiv b/tex/context/base/ppchtex.mkiv
index 0f42f91ce..d1167d414 100644
--- a/tex/context/base/ppchtex.mkiv
+++ b/tex/context/base/ppchtex.mkiv
@@ -100,11 +100,11 @@
\newconstant\chemicaldrawingmode
-\doifdefinedelse{beginpicture} % PiCTeX
- {\doifdefinedelse{startMPdrawing}
+\doifelsedefined{beginpicture} % PiCTeX
+ {\doifelsedefined{startMPdrawing}
{\chemicaldrawingmode\plustwo } % MetaPost
{\chemicaldrawingmode\zerocount}} % raw
- {\doifdefinedelse{psaxes}
+ {\doifelsedefined{psaxes}
{\chemicaldrawingmode\plusone } % PSTricks
{\chemicaldrawingmode\plusthree}} % unknown
@@ -387,7 +387,7 @@
{\def\maxchemical{#1}}
\def\doifchemicalnumber#1#2#3%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\ifnum#1>\maxchemical\relax
\writestatus{ppchtex}{number #1 is skipped}%
\else
@@ -682,7 +682,7 @@
\edef\@@chemicaltop {\the\!!countc}%
\edef\@@chemicalbottom{\the\!!countd}%
%
- \doifinsetelse\v!on{\@@chemicalframe,\@@chemicalaxis}
+ \doifelseinset\v!on{\@@chemicalframe,\@@chemicalaxis}
{\def\@@chemicalborder{\chemicalframe}}
{\def\@@chemicalborder{\normalchemicalframe}}%
%
@@ -867,14 +867,14 @@
\def\chemicalrepeat {1}
\def\redoprocesschemical[#1#2]%
- {\doifinstringelse{#1}{0123456789.}
+ {\doifelseinstring{#1}{0123456789.}
{\edef\chemicalrepeat{\chemicalrepeat#1}%
\redoprocesschemical[#2]}
{\processchemical[#1#2]%
\def\chemicalrepeat{1}}}
\def\doprocesschemical[#1#2]#3%
- {\doifinstringelse{#1}{0123456789.}
+ {\doifelseinstring{#1}{0123456789.}
{\def\chemicalrepeat{#1}%
\redoprocesschemical[#2]}
{#3}}
@@ -891,9 +891,9 @@
\divide\dimen0 by \@@localchemicalscale
\!!counta=\dimen0
\def\doprocess[##1##2]%
- {\doifinstringelse{##1}{128}
+ {\doifelseinstring{##1}{128}
{\edef\chemicaloffset{\the\!!counta}}
- {\doifinstringelse{##1}{456}
+ {\doifelseinstring{##1}{456}
{\edef\chemicaloffset{-\the\!!counta}}
{\doifelse{##1}{0}
{\edef\chemicaloffset{0}}
@@ -917,9 +917,9 @@
\dimen0=.25\wd0
\divide\dimen0 by \@@localchemicalscale
\!!counta=\dimen0
- \doifinstringelse{#1}{128}
+ \doifelseinstring{#1}{128}
{\edef\chemicaloffset{\the\!!counta}}
- {\doifinstringelse{#1}{456}
+ {\doifelseinstring{#1}{456}
{\edef\chemicaloffset{-\the\!!counta}}
{\doifelse{#1}{0}
{\edef\chemicaloffset{0}}
@@ -959,7 +959,7 @@
\setvalue{\s!angle4.#1}{\dosetchemicalangle{#5}}}
\def\chemicalrotate[#1]%
- {\doifdefinedelse{\s!mirror#1}
+ {\doifelsedefined{\s!mirror#1}
{\getvalue{\s!rotate\chemicalrotation.#1\getvalue{\s!mirror#1}}%
\getvalue{\s!angle\chemicalrotation.#1\getvalue{\s!mirror#1}}}
{\getvalue{\s!rotate\chemicalrotation.#1}%
@@ -984,7 +984,7 @@
\def\processchemicalrotation#1%
{\def\doprocess[##1##2]%
- {\doifnumberelse{##1}
+ {\doifelsenumber{##1}
{\def\chemicalrotation{##1}}
{\unknownchemical{ROT#1}}}%
\doprocess[#1]}
@@ -1067,9 +1067,11 @@
\def\dodoifsinglelocation#1#2\\#3%
{\ifx#2\relax#3\fi}
-\def\doifsinglelocationelse#1%
+\def\doifelsesinglelocation#1%
{\expandafter\dodoifsinglelocationelse#1\relax\\}
+\let\doifsinglelocationelse\doifelsesinglelocation
+
\def\putchemicaltext#1#2%
{\enablechemicalspecials
\ifchemicalpicture
@@ -1706,7 +1708,7 @@
\newif\ifinnerchemical
\def\dosimplechemical#1#2#3%
- {\doifdefinedelse{\??chemical\c!location}
+ {\doifelsedefined{\??chemical\c!location}
{\writestatus{ppchtex}{the {}{}-alternative is not permitted here}}
{\ifinnerchemical
\let\chemicalsign = \chemicalinnersign
@@ -2065,7 +2067,7 @@
HIGH=>\sethighsubscripts,
LOW=>\setlowsubscripts,
\s!default=>,
- \s!unknown=>\doifdefinedelse{\s!executechemical#1}
+ \s!unknown=>\doifelsedefined{\s!executechemical#1}
{\def\chemicalrotation{1}%
\def\chemicaloffset{0}%
\doifdefined{\s!executechemical#1}
@@ -2260,9 +2262,9 @@
{\dosingleargument\dodefinechemical}
\def\getpredefinedchemical#1%
- {\doifdefinedelse{\??chemical#1}
+ {\doifelsedefined{\??chemical#1}
{\getvalue{\??chemical#1}}
- {\doifdefinedelse{#1}
+ {\doifelsedefined{#1}
{\getvalue{#1}}
{\writestatus{ppchtex}{unknown chemical definition #1}}}}
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
new file mode 100644
index 000000000..5a9d48551
--- /dev/null
+++ b/tex/context/base/publ-aut.lua
@@ -0,0 +1,876 @@
+if not modules then modules = { } end modules ['publ-aut'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+end
+
+local lpeg = lpeg
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local utfchar = utf.char
+local utfsub = utf.sub
+local formatters = string.formatters
+
+local P, S, C, V, Cs, Ct, Cg, Cf, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Cc
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local context = context
+----- commands = commands
+
+local implement = interfaces.implement
+local ctx_setmacro = interfaces.setmacro
+
+local publications = publications
+
+local datasets = publications.datasets
+local getcasted = publications.getcasted
+
+local allocate = utilities.storage.allocate
+
+local chardata = characters.data
+
+local trace_hashing = false trackers.register("publications.authorhash", function(v) trace_hashing = v end)
+
+local report = logs.reporter("publications","authors")
+local report_cite = logs.reporter("publications","cite")
+
+local v_last = interfaces.variables.last
+
+-- local function makesplitter(separator)
+-- return Ct { "start",
+-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
+-- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
+-- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
+-- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
+-- }
+-- end
+
+-- authorlist = { authorspec and authorspec and authorspec }
+-- authorspec = composedname
+-- authorspec = surnames, firstnames
+-- authorspec = von, surnames, firstnames
+-- authorspec = von, surnames, jr, firstnames
+-- authorspec = von, surnames, jr, firstnames, initials
+
+local space = lpegpatterns.whitespace
+local comma = P(",")
+local period = P(".")
+local dash = P("-")
+local firstcharacter = lpegpatterns.utf8byte
+local utf8character = lpegpatterns.utf8character
+local p_and = space^1 * (P("and") + P("&&") + P("++")) * space^1
+local p_comma = space^0 * comma * space^0
+local p_space = space^1
+local p_shortone = C((utf8character -dash-period)^1)
+local p_longone = C( utf8character) * (1-dash-period)^0
+
+local p_empty = P("{}")/"" * #(p_space^0 * (P(-1) + P(",")))
+
+local andsplitter = Ct { "start",
+ start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local commasplitter = Ct { "start",
+ start = Cs(V("outer")) + (p_empty + Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local spacesplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local p_initial = p_shortone * period * dash^0
+ + p_longone * (period + dash + P(-1))
+local initialsplitter = p_initial * P(-1) + Ct((p_initial)^1)
+
+local optionsplitter = Cf(Ct("") * Cg(C((1-space)^1) * space^0 * Cc(true))^1,rawset)
+
+local function is_upper(str)
+ local first = lpegmatch(firstcharacter,str)
+ local okay = chardata[first]
+ return okay and okay.category == "lu"
+end
+
+-- local cleaner = Cs( ( P("{}")/"" + P(1) )^1 )
+
+local cache = allocate() -- 33% reuse on tugboat.bib
+local nofhits = 0
+local nofused = 0
+
+publications.authorcache = cache
+
+local function makeinitials(firstnames)
+ if firstnames and #firstnames > 0 then
+ local initials = { }
+ for i=1,#firstnames do
+ initials[i] = lpegmatch(initialsplitter,firstnames[i])
+ end
+ return initials
+ end
+end
+
+local authormap = allocate()
+publications.authormap = authormap
+
+local function splitauthor(author)
+ local detail = cache[author]
+ if detail then
+ return detail
+ end
+ local remapped = authormap[author]
+ if remapped then
+ report("remapping %a to %a",author,remapped)
+ local detail = cache[remapped]
+ if detail then
+ cache[author] = detail
+ return detail
+ end
+ end
+ local author = remapped or author
+ local firstnames, vons, surnames, initials, juniors, options
+ local split = lpegmatch(commasplitter,author)
+ local n = #split
+ detail = {
+ original = author,
+ snippets = n,
+ }
+ if n == 1 then
+ -- {First Middle von Last}
+ local words = lpegmatch(spacesplitter,author)
+ firstnames, vons, surnames = { }, { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ if i <= n then
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ elseif #vons == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ else
+ -- mess
+ end
+ if #surnames == 0 then
+ -- safeguard
+ firstnames = { }
+ vons = { }
+ surnames = { author }
+ else
+ initials = makeinitials(firstnames)
+ end
+ elseif n == 2 then
+ -- {Last, First}
+ -- {von Last, First}
+ firstnames, vons, surnames = { }, { }, { }
+ local words = lpegmatch(spacesplitter,split[1])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ --
+ local words = lpegmatch(spacesplitter,split[2])
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ vons[#vons+1], i = words[i], i + 1
+ end
+ if surnames and firstnames and #surnames == 0 then
+ -- safeguard
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ end
+ initials = makeinitials(firstnames)
+ elseif n == 3 then
+ -- {von Last, First, Jr}
+ surnames = lpegmatch(spacesplitter,split[1])
+ juniors = lpegmatch(spacesplitter,split[2])
+ firstnames = lpegmatch(spacesplitter,split[3])
+ initials = makeinitials(firstnames)
+ elseif n == 4 then
+ -- {Von, Last, First, Jr}
+ vons = lpegmatch(spacesplitter,split[1])
+ surnames = lpegmatch(spacesplitter,split[2])
+ juniors = lpegmatch(spacesplitter,split[3])
+ firstnames = lpegmatch(spacesplitter,split[4])
+ initials = makeinitials(firstnames)
+ elseif n >= 5 then
+ -- {Von, Last, First, Jr, F.}
+ -- {Von, Last, First, Jr, Fr., options}
+ vons = lpegmatch(spacesplitter,split[1])
+ surnames = lpegmatch(spacesplitter,split[2])
+ juniors = lpegmatch(spacesplitter,split[3])
+ firstnames = lpegmatch(spacesplitter,split[4])
+ initials = lpegmatch(spacesplitter,split[5])
+ options = split[6]
+ if options then
+ options = lpegmatch(optionsplitter,options)
+ end
+ end
+ if firstnames and #firstnames > 0 then detail.firstnames = firstnames end
+ if vons and #vons > 0 then detail.vons = vons end
+ if surnames and #surnames > 0 then detail.surnames = surnames end
+ if initials and #initials > 0 then detail.initials = initials end
+ if juniors and #juniors > 0 then detail.juniors = juniors end
+ if options and next(options) then detail.options = options end
+ cache[author] = detail
+ nofhits = nofhits + 1
+ return detail
+end
+
+local function splitauthorstring(str)
+ if not str or str == "" then
+ return
+ end
+ nofused = nofused + 1
+
+ local remapped = authormap[str]
+ if remapped then
+ local detail = cache[remapped]
+ if detail then
+ cache[str] = detail
+ return { detail }
+ end
+ end
+
+ local authors = cache[str]
+ if authors then
+ return { authors } -- we assume one author
+ end
+
+ -- we could cache these too but it can become messy .. leave that for later
+
+ local authors = lpegmatch(andsplitter,str) or { } -- maybe fake an author
+ local nofauthors = #authors
+ for i=1,nofauthors do
+ authors[i] = splitauthor(authors[i])
+ end
+ if nofauthors > 1 and authors[nofauthors].original == "others" then
+ -- only the last one is looked at
+ authors[nofauthors] = nil
+ authors.others = true
+ end
+ return authors
+end
+
+publications.splitoneauthor = splitauthor
+publications.splitauthor = splitauthorstring
+
+local function the_initials(initials,symbol,connector)
+ if not symbol then
+ symbol = "."
+ end
+ if not connector then
+ connector = "-"
+ end
+ local result, r = { }, 0
+ for i=1,#initials do
+ local initial = initials[i]
+ if type(initial) == "table" then
+ local set, s = { }, 0
+ for i=1,#initial do
+ if i > 1 then
+ s = s + 1 ; set[s] = connector
+ end
+ s = s + 1 ; set[s] = initial[i]
+ s = s + 1 ; set[s] = symbol
+ end
+ r = r + 1 ; result[r] = concat(set)
+ else
+ r = r + 1 ; result[r] = initial .. symbol
+ end
+ end
+ return result
+end
+
+local ctx_btxsetconcat = context.btxsetconcat
+local ctx_btxsetoverflow = context.btxsetoverflow
+local ctx_btxsetinitials = context.btxsetinitials
+local ctx_btxsetfirstnames = context.btxsetfirstnames
+local ctx_btxsetvons = context.btxsetvons
+local ctx_btxsetsurnames = context.btxsetsurnames
+local ctx_btxsetjuniors = context.btxsetjuniors
+local ctx_btxciteauthorsetup = context.btxciteauthorsetup
+local ctx_btxlistauthorsetup = context.btxlistauthorsetup
+local ctx_btxsetauthorvariant = context.btxsetauthorvariant
+local ctx_btxstartauthor = context.btxstartauthor
+local ctx_btxstopauthor = context.btxstopauthor
+
+local concatstate = publications.concatstate
+local f_invalid = formatters[""]
+
+local currentauthordata = nil
+local currentauthorsymbol = nil
+
+local manipulators = typesetters.manipulators
+local splitmanipulation = manipulators.splitspecification
+local applymanipulation = manipulators.applyspecification
+local manipulatormethods = manipulators.methods
+
+local function value(i,field)
+ if currentauthordata then
+ local entry = currentauthordata[i]
+ if entry then
+ local value = entry[field]
+ if value and #value > 0 then
+ return value
+ end
+ end
+ end
+end
+
+implement { name = "btxcurrentfirstnames", arguments = "integer", actions = function(i) local v = value(i,"firstnames") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol))) end end }
+implement { name = "btxcurrentjuniors", arguments = "integer", actions = function(i) local v = value(i,"juniors") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentsurnames", arguments = "integer", actions = function(i) local v = value(i,"surnames") if v then context(concat(v," ")) end end }
+implement { name = "btxcurrentvons", arguments = "integer", actions = function(i) local v = value(i,"vons") if v then context(concat(v," ")) end end }
+
+local function btxauthorfield(i,field)
+ if currentauthordata then
+ local entry = currentauthordata[i]
+ if entry then
+ local manipulator, field = splitmanipulation(field)
+ local value = entry[field]
+ if not value or #value == 0 then
+ -- value, no need for message
+ elseif manipulator then
+ for i=1,#value do
+ if i > 1 then
+ context(" ")
+ end
+ context(applymanipulation(manipulator,value) or value)
+ end
+ elseif field == "initials" then
+ context(concat(the_initials(value,currentauthorsymbol)))
+ else
+ context(concat(value," "))
+ end
+ end
+ end
+end
+
+-- This is somewhat tricky: an author is not always an author but
+-- can also be a title or key, depending on the (optional) set it's
+-- in. Also, authors can be combined with years and so and they
+-- might be called upon mixed with other calls.
+
+local function btxauthor(dataset,tag,field,settings)
+ local split, usedfield, kind = getcasted(dataset,tag,field)
+ if kind == "author" then
+ local max = split and #split or 0
+ if max == 0 then
+ return
+ -- error
+ end
+ local absmax = max
+ local etallimit = tonumber(settings.etallimit) or 1000
+ local etaldisplay = tonumber(settings.etaldisplay) or etallimit
+ local etaloption = settings_to_hash(settings.etaloption or "")
+ local etallast = etaloption[v_last]
+ local combiner = settings.combiner
+ local symbol = settings.symbol
+ local index = settings.index
+ if not combiner or combiner == "" then
+ combiner = "normal"
+ end
+ if not symbol then
+ symbol = "."
+ end
+ local ctx_btxsetup = settings.kind == "cite" and ctx_btxciteauthorsetup or ctx_btxlistauthorsetup
+ if max > etallimit and (etaldisplay+(etallast and 1 or 0)) < max then
+ max = etaldisplay
+ else
+ etallast = false
+ end
+ currentauthordata = split
+ currentauthorsymbol = symbol
+
+ local function oneauthor(i,last,justone)
+ local author = split[i]
+ if index then
+ ctx_btxstartauthor(i,1,0)
+ elseif last then
+ ctx_btxstartauthor(i,1,0)
+ ctx_btxsetconcat(0)
+ ctx_btxsetauthorvariant(combiner)
+ else
+ local state = author.state or 0
+ ctx_btxstartauthor(i,max,state)
+ ctx_btxsetconcat(concatstate(i,max))
+ ctx_btxsetauthorvariant(combiner)
+ end
+ local initials = author.initials
+ if initials and #initials > 0 then
+ ctx_btxsetinitials() -- (concat(the_initials(initials,symbol)," "))
+ end
+ local firstnames = author.firstnames
+ if firstnames and #firstnames > 0 then
+ ctx_btxsetfirstnames() -- (concat(firstnames," "))
+ end
+ local vons = author.vons
+ if vons and #vons > 0 then
+ ctx_btxsetvons() -- (concat(vons," "))
+ end
+ local surnames = author.surnames
+ if surnames and #surnames > 0 then
+ ctx_btxsetsurnames() -- (concat(surnames," "))
+ end
+ local juniors = author.juniors
+ if juniors and #juniors > 0 then
+ ctx_btxsetjuniors() -- (concat(juniors," "))
+ end
+ if not index and i == max then
+ if split.others then
+ ctx_btxsetoverflow(1)
+ else
+ local overflow = #split - max
+ if overflow > 0 then
+ ctx_btxsetoverflow(overflow)
+ end
+ end
+ end
+ ctx_btxsetup(combiner)
+ ctx_btxstopauthor()
+ end
+ if index then
+ oneauthor(index)
+ elseif max == 1 then
+ oneauthor(1,false,true)
+ else
+ for i=1,max do
+ oneauthor(i)
+ end
+ if etallast then
+ oneauthor(absmax,true)
+ end
+ end
+ else
+ report("ignored field %a of tag %a, used field %a is no author",field,tag,usedfield)
+ end
+end
+
+implement {
+ name = "btxauthorfield",
+ actions = btxauthorfield,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "btxauthor",
+ actions = btxauthor,
+ arguments = {
+ "string",
+ "string",
+ "string",
+ {
+ { "combiner" },
+ { "kind" },
+ { "etallimit" },
+ { "etaldisplay" },
+ { "etaloption" },
+ { "symbol" },
+ }
+ }
+}
+
+local function components(snippet,short)
+ local vons = snippet.vons
+ local surnames = snippet.surnames
+ local initials = snippet.initials
+ local firstnames = not short and snippet.firstnames
+ local juniors = snippet.juniors
+ return
+ vons and #vons > 0 and concat(vons," ") or "",
+ surnames and #surnames > 0 and concat(surnames," ") or "",
+ initials and #initials > 0 and concat(the_initials(initials)," ") or "",
+ firstnames and #firstnames > 0 and concat(firstnames," ") or "",
+ juniors and #juniors > 0 and concat(juniors, " ") or ""
+end
+
+local collapsers = allocate { }
+
+publications.authorcollapsers = collapsers
+
+local function default(author) -- one author
+ local hash = author.hash
+ if hash then
+ return hash
+ end
+ local original = author.original
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ local result = { }
+ local nofresult = 0
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ local hash = concat(result," ")
+ if trace_hashing then
+ report("hash: %s -> %s",original,hash)
+ end
+ author.hash = hash
+ return hash
+end
+
+local authorhashers = { }
+publications.authorhashers = authorhashers
+
+-- todo: some hashing
+
+local function name(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local surnames = author.surnames
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+table.setmetatableindex(authorhashers,function(t,k)
+ t[k] = name
+ return name
+end)
+
+authorhashers.normal = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+authorhashers.normalshort = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local juniors = author.juniors
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+authorhashers.normalinverted = authorhashers.normal
+authorhashers.invertedshort = authorhashers.normalshort
+
+local p_clean = Cs ( (
+ P("\\btxcmd") / "" -- better keep the argument
+ + S("`~!@#$%^&*()_-+={}[]:;\"\'<>,.?/|\\") / ""
+ + lpeg.patterns.utf8character
+ )^1)
+
+authorhashers.short = function(authors)
+ -- a short is a real dumb hardcodes kind of tag and we only support
+ -- this one because some users might expect it, not because it makes
+ -- sense
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return "unk"
+ elseif n == 1 then
+ local surnames = authors[1].surnames
+ if not surnames or #surnames == 0 then
+ return "err"
+ else
+ local s = surnames[1]
+ local c = lpegmatch(p_clean,s)
+ if s ~= c then
+ report_cite("name %a cleaned to %a for short construction",s,c)
+ end
+ return utfsub(c,1,3)
+ end
+ else
+ local t = { }
+ for i=1,n do
+ if i > 3 then
+ t[#t+1] = "+" -- indeed
+ break
+ end
+ local surnames = authors[i].surnames
+ if not surnames or #surnames == 0 then
+ t[#t+1] = "?"
+ else
+ local s = surnames[1]
+ local c = lpegmatch(p_clean,s)
+ if s ~= c then
+ report_cite("name %a cleaned to %a for short construction",s,c)
+ end
+ t[#t+1] = utfsub(c,1,1)
+ end
+ end
+ return concat(t)
+ end
+ else
+ return utfsub(authors,1,3)
+ end
+end
+
+collapsers.default = default
+
+local function authorwriter(key,index)
+ if not key then
+ return ""
+ end
+ if type(key) == "string" then
+ return key
+ end
+ local n = #key
+ if n == 0 then
+ return ""
+ end
+ if index then
+ if not key[index] then
+ return ""
+ end
+ elseif n == 1 then
+ index = 1
+ end
+ if index then
+ local author = key[index]
+ local options = author.options
+ if options then
+ for option in next, options do
+ local collapse = collapsers[option]
+ if collapse then
+ return collapse(author)
+ end
+ end
+ end
+ local hash = default(author)
+ -- if trace_hashing then
+ -- report("hash: %s",hash)
+ -- end
+ return hash
+ end
+ local t = { }
+ local s = 0
+ for i=1,n do
+ local author = key[i]
+ local options = author.options
+ s = s + 1
+ if options then
+ local done = false
+ for option in next, options do
+ local collapse = collapsers[option]
+ if collapse then
+ t[s] = collapse(author)
+ done = true
+ end
+ end
+ if not done then
+ t[s] = default(author)
+ end
+ else
+ t[s] = default(author)
+ end
+ end
+ local hash = concat(t," & ")
+ -- if trace_hashing then
+ -- report("hash: %s",hash)
+ -- end
+ return hash
+end
+
+local function writer(key)
+ return authorwriter(key) -- discard extra arguments in the caller
+end
+
+publications.writers .author = writer
+publications.casters .author = splitauthorstring
+publications.components.author = components
+
+-- sharedmethods.author = {
+-- { field = "key", default = "", unknown = "" },
+-- { field = "author", default = "", unknown = "" },
+-- { field = "title", default = "", unknown = "" },
+-- }
+
+-- Analysis of the APA by Alan:
+--
+-- first : key author editor publisher title journal volume number pages
+-- second: year suffix title month day journal volume number
+
+publications.sortmethods.authoryear = {
+ sequence = {
+ -- { field = "key", default = "ZZZZ", unknown = "ZZZZ" },
+ { field = "author", default = "", unknown = "" },
+ { field = "year", default = "9998", unknown = "9999" },
+ -- { field = "suffix", default = " ", unknown = " " },
+ { field = "month", default = "13", unknown = "14" },
+ { field = "day", default = "32", unknown = "33" },
+ { field = "journal", default = "", unknown = "" },
+ { field = "volume", default = "", unknown = "" },
+ -- { field = "number", default = "", unknown = "" },
+ { field = "pages", default = "", unknown = "" },
+ { field = "title", default = "", unknown = "" },
+ { field = "index", default = "", unknown = "" },
+ },
+}
+
+implement {
+ name = "btxremapauthor",
+ arguments = { "string", "string" },
+ actions = function(k,v)
+ publications.authormap[k] = v
+ end
+}
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
new file mode 100644
index 000000000..36ba15000
--- /dev/null
+++ b/tex/context/base/publ-dat.lua
@@ -0,0 +1,1194 @@
+if not modules then modules = { } end modules ['publ-dat'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
+-- todo: store bibroot and bibrootdt
+-- todo: dataset = datasets[dataset] => current = datasets[dataset]
+-- todo: maybe split this file
+
+--[[ldx--
+This is a prelude to integrated bibliography support. This file just loads
+bibtex files and converts them to xml so that the we access the content
+in a convenient way. Actually handling the data takes place elsewhere.
+--ldx]]--
+
+if not characters then
+ dofile(resolvers.findfile("char-utf.lua"))
+ dofile(resolvers.findfile("char-tex.lua"))
+end
+
+local chardata = characters.data
+local lowercase = characters.lower
+
+local lower, find, sub = string.lower, string.find, string.sub
+local concat, copy, tohash = table.concat, table.copy, table.tohash
+local next, type, rawget = next, type, rawget
+local utfchar = utf.char
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local textoutf = characters and characters.tex.toutf
+local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
+local formatters = string.formatters
+local sortedkeys, sortedhash, keys = table.sortedkeys, table.sortedhash, table.keys
+local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xml.convert
+local setmetatableindex = table.setmetatableindex
+
+-- todo: more allocate
+
+local P, R, S, V, C, Cc, Cs, Ct, Carg, Cmt, Cp = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg, lpeg.Cmt, lpeg.Cp
+
+local p_whitespace = lpegpatterns.whitespace
+local p_utf8character = lpegpatterns.utf8character
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local trace_duplicates = true trackers.register("publications.duplicates", function(v) trace = v end)
+
+local report = logs.reporter("publications")
+local report_duplicates = logs.reporter("publications","duplicates")
+
+local allocate = utilities.storage.allocate
+
+local commands = commands
+local implement = interfaces.implement
+
+publications = publications or { }
+local publications = publications
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+local writers = publications.writers or { }
+publications.writers = writers
+
+local tables = publications.tables or { }
+publications.tables = tables
+
+publications.statistics = publications.statistics or { }
+local publicationsstats = publications.statistics
+
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+local casters = { }
+publications.casters = casters
+
+-- local sorters = { }
+-- publications.sorters = sorters
+--
+-- local indexers = { }
+-- publications.indexers = indexers
+
+local components = { }
+publications.components = components -- register components
+
+local enhancers = publications.enhancers or { }
+publications.enhancers = enhancers
+
+local enhancer = publications.enhancer or utilities.sequencers.new { arguments = "dataset" }
+publications.enhancer = enhancer
+
+utilities.sequencers.appendgroup(enhancer,"system") -- private
+
+publicationsstats.nofbytes = 0
+publicationsstats.nofdefinitions = 0
+publicationsstats.nofshortcuts = 0
+publicationsstats.nofdatasets = 0
+
+local privates = allocate {
+ category = true,
+ tag = true,
+ index = true,
+ suffix = true,
+ specification = true,
+}
+
+local specials = allocate {
+ key = true,
+ crossref = true,
+ keywords = true,
+ language = true,
+ comment = true,
+}
+
+local implicits = allocate {
+ category = "implicit",
+ tag = "implicit",
+ key = "implicit",
+ keywords = "implicit",
+ language = "implicit",
+ crossref = "implicit",
+}
+
+local origins = allocate {
+ "optional",
+ "extra",
+ "required",
+ "virtual",
+}
+
+local virtuals = allocate {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+}
+
+local defaulttypes = allocate {
+ author = "author",
+ editor = "author",
+ publisher = "author",
+ page = "pagenumber",
+ pages = "pagenumber",
+ keywords = "keyword",
+ doi = "url",
+ url = "url",
+}
+
+local defaultsets = allocate {
+ page = { "page", "pages" },
+}
+
+tables.implicits = implicits
+tables.origins = origins
+tables.virtuals = virtuals
+tables.types = defaulttypes
+tables.sets = defaultsets
+tables.privates = privates
+tables.specials = specials
+
+local variables = interfaces and interfaces.variables or setmetatableindex("self")
+
+local v_all = variables.all
+local v_default = variables.default
+
+if not publications.usedentries then
+ function publications.usedentries()
+ return { }
+ end
+end
+
+local xmlplaceholder = "\n"
+
+local defaultshortcuts = allocate {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+local space = p_whitespace^0
+local separator = space * "+" * space
+local l_splitter = lpeg.tsplitat(separator)
+local d_splitter = lpeg.splitat (separator)
+
+local unknownfield = function(t,k)
+ local v = "extra"
+ t[k] = v
+ return v
+end
+
+local unknowncategory = function(t,k)
+ local v = {
+ required = false,
+ optional = false,
+ virtual = false,
+ fields = setmetatableindex(unknownfield), -- this will remember them
+ types = unknowntypes,
+ sets = setmetatableindex(defaultsets), -- new, but rather small
+ }
+ t[k] = v
+ return v
+end
+
+local unknowntype = function(t,k)
+ local v = "string"
+ t[k] = v
+ return v
+end
+
+local default = {
+ name = name,
+ version = "1.00",
+ comment = "unknown specification.",
+ author = "anonymous",
+ copyright = "no one",
+ categories = setmetatableindex(unknowncategory),
+ types = setmetatableindex(defaulttypes,unknowntype),
+}
+
+-- maybe at some point we can have a handlers table with per field
+-- a found, fetch, ... method
+
+local function checkfield(specification,category,data)
+ local list = setmetatableindex({},implicits)
+ data.fields = list
+ data.category = category
+ local sets = data.sets or { }
+ for i=1,#origins do
+ local t = origins[i]
+ local d = data[t]
+ if d then
+ for i=1,#d do
+ local di = d[i]
+ di = sets[di] or di
+ if type(di) == "table" then
+ for i=1,#di do
+ list[di[i]] = t
+ end
+ else
+ list[di] = t
+ end
+ end
+ else
+ data[t] = { }
+ end
+ end
+ return data
+end
+
+local specifications = setmetatableindex(function(t,name)
+ if not name then
+ return default -- initializer
+ end
+ local filename = formatters["publ-imp-%s.lua"](name)
+ local fullname = resolvers.findfile(filename) or ""
+ if fullname == "" then
+ report("no data definition file %a for %a",filename,name)
+ return default
+ end
+ local specification = table.load(fullname)
+ if not specification then
+ report("invalid data definition file %a for %a",fullname,name)
+ return default
+ end
+ --
+ local categories = specification.categories
+ if not categories then
+ categories = { }
+ specification.categories = categories
+ end
+ setmetatableindex(categories,unknowncategory)
+ --
+ local types = specification.types
+ if not types then
+ types = defaulttypes
+ specification.types = types
+ end
+ setmetatableindex(types,unknowntype)
+ --
+ local fields = setmetatableindex(unknownfield)
+ specification.fields = fields
+ --
+ local virtual = specification.virtual
+ if virtual == nil then -- so false is valid
+ virtual = { }
+ elseif virtual == false then
+ virtual = { }
+ elseif type(virtual) ~= table then
+ virtual = virtuals
+ end
+ specification.virtual = virtual
+ specification.virtualfields = tohash(virtual)
+ --
+ for category, data in next, categories do
+ categories[category] = checkfield(specification,category,copy(data)) -- we make sure we have no clones
+ end
+ --
+ t[name] = specification
+ --
+ return specification
+end)
+
+publications.specifications = specifications
+
+function publications.setcategory(target,category,data)
+ local specification = specifications[target]
+ specification.categories[category] = checkfield(specification,category,data)
+end
+
+function publications.parenttag(dataset,tag)
+ if not dataset or not tag then
+ report("error in specification, dataset %a, tag %a",dataset,tag)
+ elseif find(tag,"%+") then
+ local tags = lpegmatch(l_splitter,tag)
+ local parent = tags[1]
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local first = luadata[parent]
+ if first then
+ local detail = details[parent]
+ local children = detail.children
+ if not children then
+ children = { }
+ detail.children = children
+ end
+ -- add new ones but only once
+ for i=2,#tags do
+ local tag = tags[i]
+ for j=1,#children do
+ if children[j] == tag then
+ tag = false
+ end
+ end
+ if tag then
+ local entry = luadata[tag]
+ if entry then
+ local detail = details[tag]
+ children[#children+1] = tag
+ if detail.parent then
+ report("error in combination, dataset %a, tag %a, parent %a, ignored %a",dataset,tag,detail.parent,parent)
+ else
+ report("combining, dataset %a, tag %a, parent %a",dataset,tag,parent)
+ detail.parent = parent
+ end
+ end
+ end
+ end
+ return parent
+ end
+ end
+ return tag or ""
+end
+
+function publications.new(name)
+ publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
+ local dataset = {
+ name = name or "dataset " .. publicationsstats.nofdatasets,
+ nofentries = 0,
+ shortcuts = { },
+ luadata = { },
+ suffixes = { },
+ xmldata = xmlconvert(xmlplaceholder),
+ details = { },
+ ordered = { },
+ nofbytes = 0,
+ entries = nil, -- empty == all
+ sources = { },
+ loaded = { },
+ fields = { },
+ userdata = { },
+ used = { },
+ commands = { }, -- for statistical purposes
+ status = {
+ resources = false,
+ userdata = false,
+ },
+ specifications = {
+ -- used specifications
+ },
+ suffixed = false,
+ }
+ -- we delay details till we need it (maybe we just delay the
+ -- individual fields but that is tricky as there can be some
+ -- depedencies)
+ return dataset
+end
+
+setmetatableindex(datasets,function(t,k)
+ if type(k) == "table" then
+ return k -- so we can use this accessor as checker
+ else
+ local v = publications.new(k)
+ datasets[k] = v
+ return v
+ end
+end)
+
+local function getindex(dataset,luadata,tag)
+ local found = luadata[tag]
+ if found then
+ local index = found.index or 0
+ dataset.ordered[tag] = index
+ return index
+ else
+ local index = dataset.nofentries + 1
+ dataset.nofentries = index
+ dataset.ordered[index] = tag
+ return index
+ end
+end
+
+publications.getindex = getindex
+
+do
+
+ -- we apply some normalization
+
+ local space = S(" \t\n\r\f") -- / " "
+ local collapsed = space^1/" "
+ ----- csletter = R("az","AZ")
+ local csletter = lpegpatterns.csletter
+
+ ----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+ ----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ ----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local command = P("\\") * (Carg(1) * C(csletter^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local whatever = P("\\") * P(" ")^1 / " "
+ + P("\\") * ( P("hbox") + P("raise") ) -- bah
+ local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+ ----- character = lpegpatterns.utf8character
+ local any = P(1)
+ local done = P(-1)
+ -- local one_l = P("{") / ""
+ -- local one_r = P("}") / ""
+ -- local two_l = P("{{") / ""
+ -- local two_r = P("}}") / ""
+ local zero_l_r = P("{}") / "" * #P(1)
+ local special = P("#") / "\\letterhash "
+
+ local filter_0 = S('\\{}#')
+ local filter_1 = (1-filter_0)^0 * filter_0
+ local filter_2 = Cs(
+ -- {{...}} ... {{...}}
+ -- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+ -- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (
+ somemath +
+ whatever +
+ command +
+ special +
+ collapsed +
+ zero_l_r +
+ any
+ )^0
+ )
+
+ -- Currently we expand shortcuts and for large ones (like the acknowledgements
+ -- in tugboat.bib) this is not that efficient. However, eventually strings get
+ -- hashed again.
+
+ local function do_shortcut(key,value,dataset)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ dataset.shortcuts[key] = value
+ end
+
+ -- todo: categories : metatable that lowers and also counts
+ -- todo: fields : metatable that lowers
+
+ local tags = table.setmetatableindex("table")
+
+ local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ if tag == "" then
+ tag = "no-tag-set"
+ end
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ local hashtag = tag
+ if luadata[tag] then
+ local t = tags[tag]
+ local d = dataset.name
+ local n = (t[d] or 0) + 1
+ t[d] = n
+ hashtag = tag .. "-" .. n
+ if trace_duplicates then
+ local p = { }
+ for k, v in sortedhash(t) do
+ p[#p+1] = formatters["%s:%s"](k,v)
+ end
+ report_duplicates("tag %a is present multiple times: % t, assigning hashtag %a",tag,p,hashtag)
+ end
+ end
+ local index = getindex(dataset,luadata,hashtag)
+ local entries = {
+ category = lower(category),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
+ end
+ -- if entries[normalized] then
+ if rawget(entries,normalized) then
+ if trace_duplicates then
+ report_duplicates("redundant field %a is ignored for tag %a in dataset %a",normalized,tag,dataset.name)
+ end
+ else
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
+ end
+ entries[normalized] = value
+ end
+ end
+ luadata[hashtag] = entries
+ end
+
+ local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+ end
+
+ local pattern = p_whitespace^0
+ * C(P("message") + P("warning") + P("error") + P("comment")) * p_whitespace^0 * P(":")
+ * p_whitespace^0
+ * C(P(1)^1)
+
+ local function do_comment(s,dataset)
+ local how, what = lpegmatch(pattern,s)
+ if how and what then
+ local t = string.splitlines(utilities.strings.striplines(what))
+ local b = file.basename(dataset.fullname or dataset.name or "unset")
+ for i=1,#t do
+ report("%s > %s : %s",b,how,t[i])
+ end
+ end
+ end
+
+ local percent = P("%")
+ local start = P("@")
+ local comma = P(",")
+ local hash = P("#")
+ local escape = P("\\")
+ local single = P("'")
+ local double = P('"')
+ local left = P('{')
+ local right = P('}')
+ local both = left + right
+ local lineending = S("\n\r")
+ local space = S(" \t\n\r\f") -- / " "
+ local spacing = space^0
+ local equal = P("=")
+ ----- collapsed = (space^1)/ " "
+ local collapsed = p_whitespace^1/" "
+ local nospaces = p_whitespace^1/""
+
+ local p_left = (p_whitespace^0 * left) / ""
+ local p_right = (right * p_whitespace^0) / ""
+
+ local balanced = P {
+ [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right))^1 + V(2))^0,
+ [2] = left * V(1) * right,
+ }
+
+ -- local unbalanced = P {
+ -- [1] = left * V(2) * right,
+ -- [2] = ((escape * (left+right)) + (collapsed + 1 - (left+right))^1 + V(1))^0,
+ -- }
+
+ local unbalanced = (left/"") * balanced * (right/"") * P(-1)
+
+ local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
+ local key = C((1-space-equal)^1)
+ local tag = C((1-space-comma)^0)
+ local reference = keyword
+ local category = C((1-space-left)^1)
+ local s_quoted = ((escape*single) + collapsed + (1-single))^0
+ local d_quoted = ((escape*double) + collapsed + (1-double))^0
+
+ local b_value = p_left * balanced * p_right
+ -- local u_value = p_left * unbalanced * p_right -- get rid of outer { }
+ -- local s_value = (single/"") * (u_value + s_quoted) * (single/"")
+ -- local d_value = (double/"") * (u_value + d_quoted) * (double/"")
+ local s_value = (single/"") * (unbalanced + s_quoted) * (single/"")
+ local d_value = (double/"") * (unbalanced + d_quoted) * (double/"")
+ local r_value = reference * Carg(1) /resolve
+
+ local somevalue = d_value + b_value + s_value + r_value
+ local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+
+ value = value / function(s) return lpegmatch(lpegpatterns.stripper,s) end
+
+ local forget = percent^1 * (1-lineending)^0
+ local spacing = spacing * forget^0 * spacing
+ local assignment = spacing * key * spacing * equal * spacing * value * spacing
+ local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
+
+ local crapword = C((1-space-left)^1)
+ local shortcut = Cmt(crapword,function(_,p,s) return lower(s) == "string" and p end) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
+ local comment = Cmt(crapword,function(_,p,s) return lower(s) == "comment" and p end) * spacing * lpegpatterns.argument * Carg(1) / do_comment
+
+ local casecrap = #S("sScC") * (shortcut + comment)
+
+ local bibtotable = (space + forget + P("@") * (casecrap + definition) + 1)^0
+
+ -- todo \%
+
+ -- loadbibdata -> dataset.luadata
+ -- loadtexdata -> dataset.luadata
+ -- loadluadata -> dataset.luadata
+
+ -- converttoxml -> dataset.xmldata from dataset.luadata
+
+ function publications.loadbibdata(dataset,content,source,kind)
+ if not source then
+ report("invalid source for dataset %a",dataset)
+ return
+ end
+ local current = datasets[dataset]
+ local size = #content
+ if size == 0 then
+ report("empty source %a for dataset %a",source,current.name)
+ else
+ report("adding bib data to set %a from source %a",current.name,source)
+ end
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + size
+ current.nofbytes = current.nofbytes + size
+ if source then
+ table.insert(current.sources, { filename = source, checksum = md5.HEX(content) })
+ current.loaded[source] = kind or true
+ end
+ current.newtags = #current.luadata > 0 and { } or current.newtags
+ lpegmatch(bibtotable,content or "",1,current)
+ statistics.stoptiming(publications)
+ end
+
+end
+
+do
+
+ -- we could use xmlescape again
+
+ local cleaner_0 = S('<>&')
+ local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+ local cleaner_2 = Cs ( (
+ P("<") / "<" +
+ P(">") / ">" +
+ P("&") / "&" +
+ P(1)
+ )^0)
+
+ local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+ function publications.converttoxml(dataset,nice,dontstore,usedonly,subset) -- we have fields !
+ local current = datasets[dataset]
+ local luadata = subset or (current and current.luadata)
+ if luadata then
+ statistics.starttiming(publications)
+ --
+ local result, r, n = { }, 0, 0
+ local usedonly = usedonly and publications.usedentries()
+ --
+ r = r + 1 ; result[r] = ""
+ r = r + 1 ; result[r] = ""
+ --
+ if nice then -- will be default
+ local f_entry_start = formatters[" "]
+ local s_entry_stop = " "
+ local f_field = formatters[" %s"]
+ for tag, entry in sortedhash(luadata) do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
+ end
+ end
+ else
+ local f_entry_start = formatters[""]
+ local s_entry_stop = ""
+ local f_field = formatters["%s"]
+ for tag, entry in next, luadata do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
+ end
+ end
+ end
+ --
+ r = r + 1 ; result[r] = ""
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ if dontstore then
+ -- indeed
+ else
+ statistics.starttiming(xml)
+ current.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- & -> &
+ utfize_entities = true,
+ } )
+ statistics.stoptiming(xml)
+ if lxml then
+ lxml.register(formatters["btx:%s"](current.name),current.xmldata)
+ end
+ end
+ statistics.stoptiming(publications)
+ return result, n
+ end
+ end
+
+end
+
+do
+
+ local function resolvedname(dataset,filename)
+ local current = datasets[dataset]
+ if type(filename) ~= "string" then
+ report("invalid filename %a",tostring(filename))
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if fullname == "" then
+ fullname = resolvers.findfile(filename) -- let's not be too picky
+ end
+ if not fullname or fullname == "" then
+ report("no file %a",filename)
+ current.fullname = filename
+ return current, false
+ else
+ current.fullname = fullname
+ return current, fullname
+ end
+ end
+
+ publications.resolvedname = resolvedname
+
+ local cleaner = false
+ local cleaned = false
+
+ function loaders.registercleaner(what,fullname)
+ if not fullname or fullname == "" then
+ report("no %s file %a",what,fullname)
+ return
+ end
+ local list = table.load(fullname)
+ if not list then
+ report("invalid %s file %a",what,fullname)
+ return
+ end
+ list = list.replacements
+ if not list then
+ report("no replacement table in %a",fullname)
+ return
+ end
+ if cleaned then
+ report("adding replacements from %a",fullname)
+ for k, v in next, list do
+ cleaned[k] = v
+ end
+ else
+ report("using replacements from %a",fullname)
+ cleaned = list
+ end
+ cleaner = true
+ end
+
+ function loaders.bib(dataset,filename,kind)
+ local dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local data = io.loaddata(fullname) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",fullname)
+ return
+ end
+ if cleaner == true then
+ cleaner = Cs((lpeg.utfchartabletopattern(keys(cleaned)) / cleaned + p_utf8character)^1)
+ end
+ if cleaner ~= false then
+ data = lpegmatch(cleaner,data)
+ end
+ if trace then
+ report("loading file %a",fullname)
+ end
+ publications.loadbibdata(dataset,data,fullname,kind)
+ end
+
+ function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ local current, data, fullname
+ if type(filename) == "table" then
+ current = datasets[dataset]
+ data = filename
+ else
+ dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ current = datasets[dataset]
+ data = table.load(fullname)
+ end
+ if data then
+ local luadata = current.luadata
+ -- we want the same index each run
+ for tag, entry in sortedhash(data) do
+ if type(entry) == "table" then
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
+ end
+
+ function loaders.buffer(dataset,name) -- if filename is a table we load that one
+ local current = datasets[dataset]
+ local barename = file.removesuffix(name)
+ local data = buffers.getcontent(barename) or ""
+ if data == "" then
+ report("empty buffer %a, nothing loaded",barename)
+ return
+ end
+ if trace then
+ report("loading buffer",barename)
+ end
+ publications.loadbibdata(current,data,barename,"bib")
+ end
+
+ function loaders.xml(dataset,filename)
+ local dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local root = xml.load(fullname)
+ for bibentry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = bibentry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category,
+ tag = tag, -- afterwards also set, to prevent overload
+ index = 0, -- prelocated
+ }
+ for field in xmlcollected(bibentry,"/field") do
+ entry[field.at.name] = field.dt[1] -- no cleaning yet | xmltext(field)
+ end
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry
+ end
+ end
+
+ setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[filetype] = v
+ return v
+ end)
+
+ function publications.load(specification)
+ local current = datasets[specification.dataset or v_default]
+ local files = settings_to_array(specification.filename)
+ local kind = specification.kind
+ local dataspec = specification.specification
+ statistics.starttiming(publications)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ if filename then
+ if not filetype or filetype == "" then
+ filetype = "bib"
+ end
+ if file.suffix(filename) == "" then
+ file.addsuffix(filename,filetype)
+ end
+ loaders[filetype](current,filename)
+ if kind then
+ current.loaded[current.fullname or filename] = kind
+ end
+ if dataspec then
+ current.specifications[dataspec] = true
+ end
+ end
+ end
+ local runner = enhancer.runner
+ if runner then
+ runner(current)
+ end
+ statistics.stoptiming(publications)
+ return current
+ end
+
+end
+
+do
+
+ function enhancers.order(dataset)
+ local luadata = dataset.luadata
+ local ordered = dataset.ordered
+ for i=1,#ordered do
+ local tag = ordered[i]
+ if type(tag) == "string" then
+ ordered[i] = luadata[tag]
+ end
+ end
+ end
+
+ function enhancers.details(dataset)
+ local luadata = dataset.luadata
+ local details = dataset.details
+ for tag, entry in next, luadata do
+ if not details[tag] then
+ details[tag] = { }
+ end
+ end
+ end
+
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.order")
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.details")
+
+end
+
+do
+
+ local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+ local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+ function publications.analyze(dataset)
+ local current = datasets[dataset]
+ local data = current.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
+ end
+ current.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
+ end
+
+end
+
+function publications.tags(dataset)
+ return sortedkeys(datasets[dataset].luadata)
+end
+
+function publications.sortedentries(dataset)
+ return sortedhash(datasets[dataset].luadata)
+end
+
+-- a helper:
+
+function publications.concatstate(i,n)
+ if i == 0 then
+ return 0
+ elseif i == 1 then
+ return 1
+ elseif i == 2 and n == 2 then
+ return 4
+ elseif i == n then
+ return 3
+ else
+ return 2
+ end
+end
+
+-- savers
+
+do
+
+ local savers = { }
+
+ local s_preamble = [[
+% this is an export from context mkiv
+
+@preamble{
+ \ifdefined\btxcmd
+ % we're probably in context
+ \else
+ \def\btxcmd#1{\csname#1\endcsname}
+ \fi
+}
+
+]]
+
+ function savers.bib(dataset,filename,tobesaved)
+ local f_start = formatters["@%s{%s,\n"]
+ local f_field = formatters[" %s = {%s},\n"]
+ local s_stop = "}\n\n"
+ local result = { s_preamble }
+ local n, r = 0, 1
+ for tag, data in sortedhash(tobesaved) do
+ r = r + 1 ; result[r] = f_start(data.category or "article",tag)
+ for key, value in sortedhash(data) do
+ if not privates[key] then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ r = r + 1 ; result[r] = s_stop
+ n = n + 1
+ end
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,concat(result))
+ end
+
+ function savers.lua(dataset,filename,tobesaved)
+ local list = { }
+ local n = 0
+ for tag, data in next, tobesaved do
+ local t = { }
+ for key, value in next, data do
+ if not privates[key] then
+ d[key] = value
+ end
+ end
+ list[tag] = t
+ n = n + 1
+ end
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ table.save(filename,list)
+ end
+
+ function savers.xml(dataset,filename,tobesaved)
+ local result, n = publications.converttoxml(dataset,true,true,false,tobesaved)
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,result)
+ end
+
+ function publications.save(specification)
+ local dataset = specification.dataset
+ local filename = specification.filename
+ local filetype = specification.filetype
+ local criterium = specification.criterium
+ statistics.starttiming(publications)
+ if not filename or filename == "" then
+ report("no filename for saving given")
+ return
+ end
+ if not filetype or filetype == "" then
+ filetype = file.suffix(filename)
+ end
+ if not criterium or criterium == "" then
+ criterium = v_all
+ end
+ local saver = savers[filetype]
+ if saver then
+ local current = datasets[dataset]
+ local luadata = current.luadata or { }
+ local tobesaved = { }
+ local result = structures.lists.filter({criterium = criterium, names = "btx"}) or { }
+ for i=1,#result do
+ local userdata = result[i].userdata
+ if userdata then
+ local set = userdata.btxset or v_default
+ if set == dataset then
+ local tag = userdata.btxref
+ if tag then
+ tobesaved[tag] = luadata[tag]
+ end
+ end
+ end
+ end
+ saver(dataset,filename,tobesaved)
+ else
+ report("unknown format %a for saving %a",filetype,dataset)
+ end
+ statistics.stoptiming(publications)
+ return dataset
+ end
+
+ implement {
+ name = "btxsavedataset",
+ actions = publications.save,
+ arguments = {
+ {
+ { "dataset" },
+ { "filename" },
+ { "filetype" },
+ { "criterium" },
+ }
+ }
+ }
+
+end
+
+-- casters
+
+do
+
+ publications.detailed = setmetatableindex(function(detailed,kind)
+ local values = setmetatableindex(function(values,value)
+ local caster = casters[kind]
+ local cast = caster and caster(value) or value
+ values[value] = cast
+ return cast
+ end)
+ detailed[kind] = values
+ return values
+ end)
+
+ local keywordsplitter = utilities.parsers.groupedsplitat(";,")
+
+ casters.keyword = function(str)
+ return lpegmatch(keywordsplitter,str)
+ end
+
+
+ writers.keyword = function(k)
+ if type(k) == "table" then
+ return concat(p,";")
+ else
+ return k
+ end
+ end
+
+ local pagessplitter = lpeg.splitat(P("-")^1)
+
+ casters.range = function(str)
+ local first, last = lpegmatch(pagessplitter,str)
+ return first and last and { first, last } or str
+ end
+
+ writers.range = function(p)
+ if type(p) == "table" then
+ return concat(p,"-")
+ else
+ return p
+ end
+ end
+
+ casters.pagenumber = casters.range
+ writers.pagenumber = writers.range
+
+end
diff --git a/tex/context/base/publ-fnd.lua b/tex/context/base/publ-fnd.lua
new file mode 100644
index 000000000..32d0c11be
--- /dev/null
+++ b/tex/context/base/publ-fnd.lua
@@ -0,0 +1,298 @@
+if not modules then modules = { } end modules ['publ-fnd'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-utf.lua"))
+end
+
+-- this tracker is only for real debugging and not for the average user
+
+local trace_match = false trackers.register("publications.match", function(v) trace_match = v end)
+
+local publications = publications
+
+local tonumber, next, type = tonumber, next, type
+local find = string.find
+local P, R, S, C, Cs, Cp, Cc, Carg, Ct, V = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Carg, lpeg.Ct, lpeg.V
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local concat = table.concat
+
+local formatters = string.formatters
+local lowercase = characters.lower
+local topattern = string.topattern
+
+publications = publications or { } -- for testing
+
+local report = logs.reporter("publications","match")
+
+local colon = P(":")
+local dash = P("-")
+local lparent = P("(")
+local rparent = P(")")
+local space = lpegpatterns.whitespace
+local utf8char = lpegpatterns.utf8character
+local valid = 1 - colon - space - lparent - rparent
+----- key = C(valid^1)
+local key = C(R("az","AZ")^1)
+local wildcard = C("*")
+local word = Cs(lpegpatterns.unquoted + lpegpatterns.argument + valid^1)
+local simple = C(valid^1)
+local number = C(valid^1)
+
+local key = C(R("az","AZ")^1)
+local contains = S(":~")
+local exact = P("=")
+local valid = (1 - space - lparent -rparent)^1
+local wildcard = P("*") / ".*"
+local single = P("?") / "."
+local dash = P("-") / "%."
+local percent = P("-") / "%%"
+local word = Cs(lpegpatterns.unquoted + lpegpatterns.argument + valid)
+local range = P("<") * space^0 * C((1-space)^1) * space^1 * C((1-space- P(">"))^1) * space^0 * P(">")
+
+local f_key_fld = formatters[" local kf_%s = get(entry,%q) \n if kf_%s then kf_%s = lower(kf_%s) end"]
+local f_key_set = formatters[" local ks_%s = get(entry,%q,categories)\n if ks_%s then ks_%s = lower(ks_%s) end"]
+local f_number_fld = formatters[" local nf_%s = tonumber(get(entry,%q))"]
+local f_number_set = formatters[" local ns_%s = tonumber(get(entry,%q,categories))"]
+
+local f_fld_exact = formatters["(kf_%s == %q)"]
+local f_set_exact = formatters["(ks_%s == %q)"]
+local f_fld_contains = formatters["(kf_%s and find(kf_%s,%q))"]
+local f_set_contains = formatters["(ks_%s and find(ks_%s,%q))"]
+local f_fld_between = formatters["(nf_%s and nf_%s >= %s and nf_%s <= %s)"]
+local f_set_between = formatters["(ns_%s and ns_%s >= %s and ns_%s <= %s)"]
+
+local f_all_match = formatters["anywhere(entry,%q)"]
+
+local function test_key_value(keys,where,key,first,last)
+ if not key or key == "" then
+ return "(false)"
+ elseif key == "*" then
+ last = "^.*" .. topattern(lowercase(last)) .. ".*$" -- todo: make an lpeg
+ return f_all_match(last)
+ elseif first == false then
+ -- exact
+ last = lowercase(last)
+ if where == "set" then
+ keys[key] = f_key_set(key,key,key,key,key)
+ return f_set_exact(key,last)
+ else
+ keys[key] = f_key_fld(key,key,key,key,key)
+ return f_fld_exact(key,last)
+ end
+ elseif first == true then
+ -- contains
+ last = "^.*" .. topattern(lowercase(last)) .. ".*$"
+ if where == "set" then
+ keys[key] = f_key_set(key,key,key,key,key)
+ return f_set_contains(key,key,last)
+ else
+ keys[key] = f_key_fld(key,key,key,key,key)
+ return f_fld_contains(key,key,last)
+ end
+ else
+ -- range
+ if where == "set" then
+ keys[key] = f_number_set(key,key)
+ return f_set_between(key,key,tonumber(first),key,tonumber(last))
+ else
+ keys[key] = f_number_fld(key,key)
+ return f_fld_between(key,key,tonumber(first),key,tonumber(last))
+ end
+ end
+end
+
+local p_compare = P { "all",
+ all = (V("one") + V("operator") + V("nested") + C(" "))^1,
+ nested = C("(") * V("all") * C(")"), -- C really needed?
+ operator = C("and")
+ + C("or")
+ + C("not"),
+ one = Carg(1)
+ * V("where")
+ * V("key")
+ * (V("how") * V("word") + V("range"))
+ / test_key_value,
+ key = key
+ + C("*"),
+ where = C("set") * P(":")
+ + Cc(""),
+ how = contains * Cc(true)
+ + exact * Cc(false),
+ word = word,
+ range = range,
+}
+
+-- local p_combine = space^0 * (P(",")/" or ") * space^0
+
+-- local pattern = Cs((P("match")/"" * space^0 * p_compare + p_combine)^1)
+
+local comma = P(",")
+local p_spaces = space^0
+local p_combine = p_spaces * comma * p_spaces / " or "
+local p_expression = P("match")/"" * Cs(p_compare)
+ + Carg(1)
+ * Cc("")
+ * Cc("tag")
+ * Cc(false)
+ * (
+ P("tag") * p_spaces * P("(") * Cs((1-S(")")-space)^1) * p_spaces * P(")")
+ + p_spaces * Cs((1-space-comma)^1) * p_spaces
+ ) / test_key_value
+
+local pattern = Cs {
+ [1] = V(2) * (p_combine * V(2))^0,
+ [2] = p_expression,
+}
+
+-- -- -- -- -- -- -- -- -- -- -- -- --
+-- -- -- -- -- -- -- -- -- -- -- -- --
+
+function publications.anywhere(entry,str) -- helpers
+ for k, v in next, entry do
+ if find(lowercase(v),str) then
+ return true
+ end
+ end
+end
+
+-- todo: use an environment instead of
+
+-- table={
+-- { "match", "((kf_editor and find(kf_editor,\"^.*braslau.*$\")))" },
+-- { "hash", "foo1234" },
+-- { "tag", "bar5678" },
+-- }
+
+local f_template = formatters[ [[
+local find = string.find
+local lower = characters.lower
+local anywhere = publications.anywhere
+local get = publications.getfuzzy
+local specification = publications.currentspecification
+local categories = specification and specification.categories
+return function(entry)
+%s
+ return %s and true or false
+end
+]] ]
+
+local function compile(dataset,expr)
+ local keys = { }
+ -- local expression = lpegmatch(pattern,expr,start,keys)
+ local expression = lpegmatch(pattern,expr,1,keys)
+ if trace_match then
+ report("compiling expression: %s",expr)
+ end
+ local definitions = { }
+ for k, v in next, keys do
+ definitions[#definitions+1] = v
+ end
+ if #definitions == 0 then
+ report("invalid expression: %s",expr)
+ elseif trace_match then
+ for i=1,#definitions do
+ report("% 3i : %s",i,definitions[i])
+ end
+ end
+ definitions = concat(definitions,"\n")
+ local code = f_template(definitions,expression)
+ if trace_match then
+ report("generated code: %s",code)
+ end
+ local finder = loadstring(code) -- use an environment
+ if type(finder) == "function" then
+ finder = finder()
+ if type(finder) == "function" then
+ return finder, code
+ end
+ end
+ report("invalid expression: %s",expr)
+ return false
+end
+
+-- local function test(str)
+-- local keys = { }
+-- local definitions = { }
+-- local expression = lpegmatch(pattern,str,1,keys)
+-- for k, v in next, keys do
+-- definitions[#definitions+1] = v
+-- end
+-- definitions = concat(definitions,"\n")
+-- print(f_template(definitions,expression))
+-- end
+
+-- test("match(foo:bar and (foo:bar or foo:bar))")
+-- test("match(foo=bar and (foo=bar or foo=bar))")
+-- test("match(set:foo:bar),match(set:foo:bar)")
+-- test("match(set:foo=bar)")
+-- test("match(foo:{bar bar})")
+-- test("match(foo={bar bar})")
+-- test("match(set:foo:'bar bar')")
+-- test("match(set:foo='bar bar')")
+-- test("match(set:foo<1000 2000>)")
+-- test("match(set:foo<1000 2000>)")
+-- test("match(*:foo)")
+-- test("match(*:*)")
+
+local trigger = (P("match") + P("tag")) * p_spaces * P("(")
+local check = (1-trigger)^0 * trigger
+
+local function finder(dataset,expression)
+ local found = lpegmatch(check,expression) and compile(dataset,expression) or false
+ if found then
+ local okay, message = pcall(found,{})
+ if not okay then
+ found = false
+ report("error in match: %s",message)
+ end
+ end
+ return found
+end
+
+-- finder("match(author:foo)")
+-- finder("match(author:foo and author:bar)")
+-- finder("match(author:foo or (author:bar and page:123))")
+-- finder("match(author:foo),match(author:foo)")
+
+publications.finder = finder
+
+function publications.search(dataset,expression)
+ local find = finder(dataset,expression)
+ if find then
+ local ordered = dataset.ordered
+ local target = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ if not target[tag] then
+ -- we always take the first
+ target[tag] = entry
+ end
+ end
+ end
+ return target
+ else
+ return { } -- { dataset.luadata[expression] } -- ?
+ end
+end
+
+-- local d = publications.datasets.default
+--
+-- local d = publications.load {
+-- dataset = "default",
+-- filename = "t:/manuals/mkiv/hybrid/tugboat.bib"
+-- }
+--
+-- inspect(publications.search(d,[[match(author:hagen)]]))
+-- inspect(publications.search(d,[[match(author:hagen and author:hoekwater and year:1990-2010)]]))
+-- inspect(publications.search(d,[[match(author:"Bogusław Jackowski")]]))
+-- inspect(publications.search(d,[[match(author:"Bogusław Jackowski" and (tonumber(field:year) or 0) > 2000)]]))
+-- inspect(publications.search(d,[[Hagen:TB19-3-304]]))
diff --git a/tex/context/base/publ-imp-apa.lua b/tex/context/base/publ-imp-apa.lua
new file mode 100644
index 000000000..1d894f261
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.lua
@@ -0,0 +1,523 @@
+local specification = {
+ --
+ -- metadata
+ --
+ name = "apa",
+ version = "1.00",
+ comment = "APA specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ --
+ -- list of fields that are interpreted as names: "NAME [and NAME]" where
+ -- NAME is one of the following:
+ --
+ -- First vons Last
+ -- vons Last, First
+ -- vons Last, Jrs, First
+ -- Vons, Last, Jrs, First
+ --
+ author = "author", -- interpreted as name(s)
+ editor = "author",
+ artist = "author",
+ composer = "author",
+ producer = "author",
+ director = "author",
+ doi = "url", -- an external link
+ url = "url",
+ page = "pagenumber", -- number or range: f--t
+ pages = "pagenumber",
+ volume = "range",
+ number = "range",
+ keywords = "keyword", -- comma|-|separated list
+ year = "number",
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- categories are added below
+ --
+ },
+}
+
+local generic = {
+ --
+ -- A set returns the first field (in order of position below) that is found
+ -- present in an entry. A set having the same name as a field conditionally
+ -- allows the substitution of an alternate field.
+ --
+ -- note that anything can get assigned a doi or be available online.
+ doi = { "doi", "url" },
+ editionset = { "edition", "volume", "number", "pages" },
+}
+
+-- Definition of recognized categories and the fields that they contain.
+-- Required fields should be present; optional fields may also be rendered;
+-- all other fields will be ignored.
+
+-- Sets contain either/or in order of precedence.
+--
+-- For a category *not* defined here yet present in the dataset, *all* fields
+-- are taken as optional. This allows for flexibility in the addition of new
+-- categories.
+
+local categories = specification.categories
+
+-- an article from a journal
+
+categories.article = {
+ sets = {
+ author = { "author", "editor", "title" },
+ doi = generic.doi,
+ },
+ required = {
+ "author"
+ },
+ optional = {
+ "year",
+ "subtitle", "type", "file",
+ "journal", "volume", "number", "pages",
+ "doi", "note",
+ },
+}
+
+-- an article from a magazine
+
+categories.magazine = {
+ sets = categories.article.sets,
+ required = {
+ "author",
+ "year",
+ "journal",
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "number",
+ "month", "day",
+ "doi", "note",
+ },
+}
+
+categories.newspaper = categories.magazine
+
+-- (from jabref) to be identified and setup ...
+
+categories.periodical = {
+ sets = {
+ author = { "editor", "publisher" },
+ doi = generic.doi,
+ },
+ required = {
+ "title",
+ "year",
+ },
+ optional = {
+ "author",
+ "subtitle", "file",
+ "series", "volume", "number", "month",
+ "organization",
+ "doi", "note",
+ },
+}
+
+-- (from jabref) to be identified and setup ...
+
+categories.standard = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year",
+ "title", "subtitle",
+ "doi", "note",
+ },
+ optional = {
+ },
+}
+
+-- a book with an explicit publisher.
+
+categories.book = {
+ sets = {
+ author = { "author", "editor", "publisher", "title" },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = { "author" },
+ optional = {
+ "year", "month", "day",
+ "subtitle", "type", "file",
+ "editionset", "series",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+
+categories.inbook = {
+ sets = {
+ author = { "author", "editor", "publisher", "title", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year" ,
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "booktitle",
+ -- APA ignores this: "chapter",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a book having its own title as part of a collection.
+-- (like inbook, but we here make booktitle required)
+
+categories.incollection = {
+ sets = {
+ author = { "author", "editor", "publisher", "title", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "booktitle",
+ "year",
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "editionset", "series",
+ -- APA ignores this: "chapter",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- a work that is printed and bound, but without a named publisher or sponsoring institution.
+
+categories.booklet = {
+ sets = {
+ author = { "author", "title", },
+ publisher = { "howpublished" }, -- no "publisher"!
+ doi = generic.doi,
+ },
+ required = {
+ "author"
+ },
+ optional = {
+ "publisher",
+ "year", "month",
+ "subtitle", "type", "file",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- the proceedings of a conference.
+
+categories.proceedings = {
+ sets = {
+ author = { "editor", "organization", "publisher", "title" }, -- no "author"!
+ publisher = { "publisher", "organization" },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "year"
+ },
+ optional = {
+ "publisher",
+ "subtitle", "file",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+-- an article in a conference proceedings.
+
+categories.inproceedings = {
+ sets = categories.incollection.sets,
+ required = categories.incollection.required,
+ optional = {
+ "subtitle", "type", "file",
+ "month",
+ "edition", "series",
+ "address", "organization",
+ "doi", "note",
+ },
+}
+
+categories.conference = categories.inproceedings
+
+-- a thesis (of course).
+
+categories.thesis = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "school",
+ "year",
+ "type"
+ },
+ optional = {
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+
+categories.mastersthesis = {
+ sets = categories.thesis.sets,
+ required = {
+ "author",
+ "title",
+ "school",
+ "year"
+ },
+ optional = {
+ "type",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "note",
+ },
+}
+categories.phdthesis = categories.mastersthesis
+
+-- a report published by a school or other institution, usually numbered within a series.
+
+categories.techreport = {
+ sets = {
+ author = { "author", "institution", "publisher", "title" },
+ publisher = { "publisher", "institution", },
+ editionset = { "type", "volume", "number", "pages" }, -- no "edition"!
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "institution",
+ "year"
+ },
+ optional = {
+ "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset",
+ "month",
+ "doi", "note",
+ },
+}
+
+-- technical documentation.
+
+categories.manual = {
+ sets = {
+ author = { "author", "organization", "publisher", "title" },
+ publisher = { "publisher", "organization", },
+ editionset = generic.editionset,
+ doi = generic.doi,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "author", "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset", "month", "year",
+ "doi", "note",
+-- "abstract",
+ },
+}
+
+-- a patent (of course).
+
+categories.patent = {
+ sets = {
+ author = { "author", "assignee", },
+ publisher = { "publisher", "assignee", },
+ year = { "year", "yearfiled", },
+ month = { "month", "monthfiled", },
+ day = { "day", "dayfiled", },
+ doi = generic.doi,
+ },
+ required = {
+ "nationality",
+ "number",
+ "year",
+ },
+ optional = {
+ "type",
+ --check this: "language",
+ "author", "publisher",
+ "title", "subtitle", "file",
+ "address",
+ "day", "month",
+ "doi", "note"
+ },
+}
+
+-- a document having an author and title, but not formally published.
+
+categories.unpublished = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "note"
+ },
+ optional = {
+ "subtitle", "file",
+ "year", "month",
+ "doi"
+ },
+}
+
+-- like misc below but includes organization.
+
+categories.electronic = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "year", "month",
+ "author",
+ "address",
+ "organization",
+ "howpublished",
+ "doi", "note"
+ },
+}
+
+-- not bibtex categories...
+
+categories.film = {
+ sets = {
+ doi = generic.doi,
+ author = { "producer", "director", },
+ },
+ required = {
+ "author",
+ "title",
+ "year",
+ "address", "publisher", -- aka studio
+ },
+ optional = {
+ "type",
+ "note",
+ "doi",
+ },
+}
+
+categories.music = {
+ sets = {
+ doi = generic.doi,
+ author = { "composer", "artist", "title", "album" },
+ title = { "title", "album", },
+ },
+ required = {
+ "author",
+ "title",
+ "year",
+ "address", "publisher", -- aka label
+ },
+ optional = {
+ "type",
+ "note",
+ "doi",
+ },
+}
+
+-- use this type when nothing else fits.
+
+categories.misc = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ -- nothing is really important here
+ },
+ optional = {
+ "author",
+ "title", "subtitle", "file",
+ "year", "month",
+ "howpublished",
+ "doi", "note",
+ },
+}
+
+-- other (whatever jabref does not know!)
+
+categories.other = {
+ sets = {
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "title",
+ "year"
+ },
+ optional = {
+ "subtitle", "file",
+ "doi", "note",
+ },
+}
+
+-- if all else fails to match:
+
+categories.literal = {
+ sets = {
+ author = { "key" },
+ doi = generic.doi,
+ },
+ required = {
+ "author",
+ "text"
+ },
+ optional = {
+ "doi", "note"
+ },
+ virtual = false,
+}
+
+-- done
+
+return specification
diff --git a/tex/context/base/publ-imp-apa.mkvi b/tex/context/base/publ-imp-apa.mkvi
new file mode 100644
index 000000000..9ab624bf8
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.mkvi
@@ -0,0 +1,1465 @@
+%D \module
+%D [ file=publ-imp-apa,
+%D version=2013.12.12,
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\startbtxrenderingdefinitions[apa]
+
+\ifdefined\c!translate \else \def\c!translate{translate} \fi
+
+%D Reference:
+%D \startTEX
+%D @Book{APA2010,
+%D title ={Publication Manual of the American Psychological Association},
+%D year ={2010},
+%D edition ={Sixth},
+%D address ={Washington, DC},
+%D publisher={American Psychological Association},
+%D Xpages ={291},
+%D url ={http://www.apa.org/books/},
+%D }
+%D \stopTEX
+
+% set ALL specific APA compliant values
+
+\definebtx
+ [apa]
+ [\c!default=default,
+ \c!specification=apa,
+ \c!otherstext={\space\btxlabeltext{apa:others}},
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!month]
+
+% First, define list and rendering parameters
+
+% The APA style sorts the unnumbered rendered list by authoryear
+
+\definebtxrendering
+ [apa]
+ [\c!specification=apa,
+ \c!sorttype=authoryear,
+ \c!numbering=\v!no]
+
+\setupbtxlist
+ [apa]
+ [\c!alternative=\v!paragraph,
+ %\c!width=\v!fit,
+ %\c!distance=.5\emwidth,
+ \c!margin=3.5\emwidth]
+
+\definebtx
+ [apa:\s!list]
+ [apa]
+ [\c!otherstext={,\nobreakspace\textellipsis\space},
+ \c!etallimit=7,
+ \c!etaldisplay=6,
+ \c!etaloption=last,
+ \c!authorconversion=invertedshort,
+ \c!separator:names:2={,\space}, % aka namesep - in this namespace
+ \c!separator:names:3={,\nobreakspace\textampersand\space}, % comma separated list
+ \c!separator:names:4={\nobreakspace\textampersand\space}] % last of two, no comma!
+
+% First, we define a namespace for a few special fields
+
+\definebtx
+ [apa:\s!list:author]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:editor]
+ [apa:\s!list:author]
+
+\definebtx
+ [apa:\s!list:suffix]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:url]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:doi]
+ [apa:\s!list]
+
+\definebtx
+ [apa:\s!list:\s!page]
+ [apa:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space},
+ \c!separator:4={\space\btxlabeltext{apa:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis},
+ \c!command={\wordright}]
+
+\definebtx
+ [apa:\s!list:numbering]
+ [apa:\s!list]
+ [\c!right={\space}]
+
+\definebtx
+ [apa:\s!list:numbering:default]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:num]
+ [apa:\s!list:numbering]
+ [\c!stopper={.}]
+
+\definebtx
+ [apa:\s!list:numbering:short]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:tag]
+ [apa:\s!list:numbering]
+
+\definebtx
+ [apa:\s!list:numbering:index]
+ [apa:\s!list:numbering]
+
+% Next, we define a namespace for each category
+
+%D In order to be able to get journals expanded (or normalized or abbreviated) you need
+%D to load a list:
+%D
+%D \starttyping
+%D \btxloadjournallist[journals.txt] % the jabref list
+%D \stoptyping
+
+\definebtx
+ [apa:\s!list:journal]
+ [apa:\s!list]
+ [\c!style=\v!italic]
+ %command=\btxexpandedjournal] % btxabbreviatedjournal
+
+\definebtx
+ [apa:\s!list:volume]
+ [apa:\s!list]
+ [\c!style=\v!italic]
+
+\definebtx
+ [apa:\s!list:title]
+ [apa:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word,
+ \c!translate=\v!yes]
+
+\definebtx
+ [apa:\s!list:title:article]
+ [apa:\s!list:title]
+ [\c!style=] % journal is set in italics
+
+\definebtx
+ [apa:\s!list:title:magazine]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:newspaper]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:periodical]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:standard]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:book]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:inbook]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:incollection]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:proceedings]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:inproceedings]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:conference]
+ [apa:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [apa:\s!list:title:thesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:phdthesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:mastersthesis]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:booklet]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:manual]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:techreport]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:unpublished]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:patent]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:electronic]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:music]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:film]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:other]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:misc]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:title:literal]
+ [apa:\s!list:title]
+
+\definebtx
+ [apa:\s!list:type]
+ [\c!command=\Word]
+
+% Then define and set cite parameters.
+
+\definebtx
+ [apa:\s!cite]
+ [apa]
+ [\c!alternative=authoryear,
+ \c!otherstext={,\space\btxlabeltext{apa:others}},
+ \c!etallimit=5,
+ \c!etaldisplay=1, % TODO: when 2-4, show all first time, etaldisplay subsequently...
+ \c!authorconversion=\v!name,
+ \c!sorttype=normal,
+ \c!compress=\v!yes, % note that cite sorts only work with compress=yes.
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:names:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:author]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:authoryear]
+ [apa:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [apa:\s!cite:default]
+ [apa:\s!cite:authoryear]
+
+\definebtx
+ [apa:\s!cite:authoryears]
+ [apa:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space}]
+
+\definebtx
+ [apa:\s!cite:authornum]
+ [apa:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [apa:\s!cite:author:num] % todo
+ [apa:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:author:year] % todo
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:author:years] % todo
+ [apa:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [apa:\s!cite:year]
+ [apa:\s!cite]
+ [\c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:title]
+ [apa:\s!cite]
+ [\c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
+ \c!style=\v!italic]
+
+\definebtx
+ [apa:\s!cite:booktitle]
+ [apa:\s!cite:title]
+
+\definebtx
+ [apa:\s!cite:tag]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+
+\definebtx
+ [apa:\s!cite:index]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:page]
+ [apa:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{apa:and}\space}, % not \textampersand
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}] % not \textampersand
+
+\definebtx
+ [apa:\s!cite:pages]
+ [apa:\s!cite:page]
+
+\definebtx
+ [apa:\s!cite:keywords]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:short]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:category]
+ [apa:\s!cite]
+
+\definebtx
+ [apa:\s!cite:url]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [apa:\s!cite:doi]
+ [apa:\s!cite:url]
+
+\definebtx
+ [apa:\s!cite:num]
+ [apa:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ \c!separator:2={,}, % no space
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [apa:\s!cite:textnum]
+ [apa:\s!cite:num]
+ [\c!left={Ref.\nbsp},
+ \c!right=,
+ \c!separator:2={,\space},
+ \c!separator:3={\space\btxlabeltext{apa:and}\space},
+ \c!separator:4={\space\btxlabeltext{apa:and}\space}]
+
+\definebtx
+ [apa:\s!cite:entry]
+ [apa:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space},
+ \c!separator:2={;\space},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+% Now we setup for the details of the renderings
+
+%D Sometimes we have verbose injections in an entry and these can be language
+%D dependent, so we use labels.
+%D
+%D Because we want to mix rendering (in the manual) we need a namespace in label
+%D texts:
+
+\setupbtxlabeltext
+ [en]
+ [apa:and=and,
+ apa:number={no.},
+ apa:edition={ed.},
+ apa:Editor={Ed.},
+ apa:Editors={Eds.},
+ apa:Volume={Vol.},
+ apa:Volumes={Vols.},
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={n.d.}, % no date
+ apa:mastersthesis={Master's thesis},
+ apa:phdthesis={Doctoral dissertation},
+ apa:technicalreport={Tech. Rep.}, % Technical report
+ apa:supplement={Suppl.}, % Supplement (not used?)
+ apa:patent=Patent,
+ apa:MotionPicture={Motion picture},
+ apa:Producer=Producer,
+ apa:Producers=Producers,
+ apa:Director=Director,
+ apa:Directors=Directors,
+ apa:Recordedby={Recorded by},
+ apa:Author=Author,
+ apa:Translator={Trans.}, % Translator(s) (not used?)
+ apa:Advanced={Advanced online publication},
+ apa:Retrieved={Available from}, % {Retrieved from},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [nl]
+ [apa:and=en,
+ apa:number={nr.},
+ apa:edition={ed.}, % editie
+ apa:Editor=Editor, % Ed./Eds.
+ apa:Editors=Editors,
+ apa:Volume={Vol.},
+ apa:Volumes={Vols.},
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={g.d.} % geen datum
+ apa:mastersthesis=Masterproef,
+ apa:phdthesis=Proefschrift,
+ apa:technicalreport={Technisch rapport}, % Technical report
+ apa:supplement=Supplement,
+ apa:patent=Octrooi,
+ apa:MotionPicture=Film, % ?
+ apa:Producer=Producent, % ?
+ apa:Producers=Producents, % ?
+ apa:Director=Directeur,
+ apa:Directors=Directeurs,
+ apa:Recordedby={Opgenomen door}, % ?
+ apa:Author=Auteur,
+ apa:Translator=Vertaler,
+ apa:Advanced={Geavanceerde online publicatie},
+ apa:Retrieved={Beschikbaar vanaf}, % {Ontvangen van},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [fr]
+ [apa:and=et,
+ apa:number={n\high{o}},
+ apa:edition={édition},
+ apa:Editor=Éditeur,
+ apa:Editors=Éditeurs,
+ apa:Volume=Volume,
+ apa:Volumes=Volumes,
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.d.} % sans date
+ apa:mastersthesis={Thèse de master (DEA, DESS, master)},
+ apa:phdthesis={Thèse de doctorat},
+ apa:technicalreport={Rapport technique},
+ apa:supplement=Supplément,
+ apa:patent=Brevet,
+ apa:MotionPicture={Film cinématographique},
+ apa:Producer=Producteur,
+ apa:Producers=Producteurs,
+ apa:Director=Réalisateur,
+ apa:Directors=Réalisateurs,
+ apa:Recordedby={Enregistré par},
+ apa:Author=Auteur,
+ apa:Translator=Traducteur,
+ apa:Advanced={Publication en ligne anticipée},
+ apa:Retrieved={Disponible à}, % {Téléchargé de},
+ apa:In=Dans]
+
+\setupbtxlabeltext
+ [de]
+ [apa:and=und,
+ apa:number={nr.},
+ apa:edition=Auf\/lage,
+ apa:Editor=Herausgeber, % Hrsg./Hg.
+ apa:Editors=Herausgeber,
+ apa:Volume=Band, % Bd.
+ apa:Volumes={Bände},
+ apa:others={et al.},
+ apa:page={S.},
+ apa:pages={S.},
+ apa:nd={o.D.}, % ohne Datum (mostly: o.J. / ohne Jahr)
+ apa:mastersthesis={Masterarbeit},
+ apa:phdthesis={Dissertation},
+ apa:technicalreport={Technischer Bericht},
+ apa:supplement={Beilage}, % Supplement
+ apa:patent=Patent,
+ apa:MotionPicture=Kinofilm, % ?
+ apa:Producer=Producer, % ?
+ apa:Producers=Produzenten, % ?
+ apa:Director=Director, % ?
+ apa:Directors=Directors, % ?
+ apa:Recordedby={per Einschreiben}, % ?
+ apa:Author=Autor,
+ apa:Translator={Übersetzer}, % Übers.
+ apa:Advanced={Erweiterte Online-Publikation},
+ apa:Retrieved={heruntergeladen von},
+ apa:In=In]
+
+% thanks: Andrea Valle
+
+\setupbtxlabeltext
+ [it]
+ [apa:and=e,
+ apa:number={nº},
+ apa:edition={ed.}, % edizione
+ apa:Editor={A cura di},
+ apa:Editors={A cura di},
+ apa:Volume={Vol.}, % Volume
+ apa:Volumes={Vol.}, % Volumi
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.d.}, % senza data
+ apa:mastersthesis={Tesi di laurea},
+ apa:phdthesis={Tesi di dottorato},
+ apa:technicalreport={Relazione tecnica},
+ apa:supplement={Supplemento},
+ apa:patent=Brevetto,
+ apa:MotionPicture=Film, % ?
+ apa:Producer=Produttore,
+ apa:Producers=Produttori,
+ apa:Director=Direttore,
+ apa:Directors=Direttori,
+ apa:Recordedby={Registrato da},
+ apa:Author=Autore,
+ apa:Translator={Trad.}, % Translator(s)
+ apa:Advanced={Pre-pubblicazione on line},
+ apa:Retrieved={Accessible online},
+ apa:In=In]
+
+\setupbtxlabeltext
+ [es]
+ [apa:and=y,
+ apa:number={nº},
+ apa:edition={ed.}, % edición
+ apa:Editor=Editor, % Ed./Eds.
+ apa:Editors=Editores,
+ apa:Volume={Vol.}, % Volumen
+ apa:Volumes={Vols.}, % Volúmenes
+ apa:others={et al.},
+ apa:page={p.},
+ apa:pages={pp.},
+ apa:nd={s.f.}, % sin fecha
+ apa:mastersthesis={Tesis de maestría},
+ apa:phdthesis={Tesis doctoral},
+ apa:technicalreport={Informe técnico},
+ apa:supplement=Suplemento,
+ apa:patent=Patente,
+ apa:MotionPicture=Cinematográfica,
+ apa:Producer=Productor,
+ apa:Producers=Productores,
+ apa:Director=Director,
+ apa:Directors=Directores,
+ apa:Recordedby={Grabada por},
+ apa:Author=Autor,
+ apa:Translator=Traductor,
+ apa:Advanced={Publicación en línea avanzada},
+ apa:Retrieved={Disponible desde}, % {Obtenido de},
+ apa:In=En]
+
+% cite setups
+
+% The following differs from the default returning n.d. if year is empty
+
+\startsetups btx:apa:nd
+ \btxlabeltext{apa:nd}
+\stopsetups
+
+\startsetups btx:apa:cite:author:year
+ \texdefinition{\s!btx:\s!cite:concat}
+ %\btxparameter\c!left
+ \ifx\currentbtxfirst\empty
+ \fastsetup{btx:apa:nd}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!range
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \currentbtxsecond
+ }
+ \fi
+ \btxflushsuffix
+ \fi
+ %\btxparameter\c!right
+\stopsetups
+
+\startsetups btx:apa:cite:author:years
+ \fastsetup{btx:apa:cite:author:year}
+\stopsetups
+
+% used in publ-imp-page.mkvi
+
+\startsetups [btx:apa:list:page-or-pages]
+ \ifx\currentbtxlastpage\empty
+ \btxlabeltext{apa:page}
+ \else
+ \btxlabeltext{apa:pages}
+ \fi
+ \btxnbsp
+\stopsetups
+
+% The sameauthor feature may not be APA compliant
+% (there is nothing in the manual cited above).
+% It can be removed using the command:
+% \resetsetups [apa:list:sameauthor]
+
+% :rule, :empty or :ditto ...
+
+\startsetups apa:list:sameauthor
+ \fastsetup{apa:list:sameauthor:rule}
+\stopsetups
+
+\startsetups apa:list:sameauthor:rule
+ \blackrule
+ [\c!width=\dimexpr\listparameter\c!margin-\interwordspace\relax,
+ \c!height=1.5\linewidth]% \linewidth is just too thin with respect to font strokes...
+\stopsetups
+
+\startsetups [apa:list:sameauthor:\v!empty]
+ \kern\dimexpr\listparameter\c!margin-\interwordspace\relax
+\stopsetups
+
+% horrible !
+
+\startsetups apa:list:sameauthor:ditto
+ \inframed
+ [\c!width=\dimexpr\listparameter\c!margin-\interwordspace\relax,
+ \c!frame=\v!off,
+ \c!align=\v!middle]
+ {\doubleprime}
+\stopsetups
+
+%D Instead of texdefinitions without arguments, we could have used setups but in my
+%D editor (hh, scite) the commands stand out better. It also saves an additional
+%D component in the name (e.g. common:) because commands and setups have a different
+%D namespace, so similar calls don't clash. Performance of definitions is somewhat
+%D better.
+
+%D We use "texdefinitions" (with eventual arguments) for helpers that are used
+%D in the rendering "setups" defined for each category below.
+
+%D Note that \btxdoif... and \btxflush rely on the definitions in
+%D publ-imp-apa.lua: fields that are not listed as required nor optional are
+%D IGNORED. We also make heavy use of the notion of sets - comma-separated lists
+%D of alternative fields to be used in hierarchal order. For example:
+%D author = { "author", "editor", "publisher", "title" }, will return the
+%D author field if it exists; if not, the editor field will be returned, if it
+%D exists; if not, the publisher field will be returned, if it exists; if not,
+%D the title field will be returned, it it exists; if not, nothing will be
+%D returned. In lua syntax, it can be understood as
+%D author or editor or publisher or title or ""
+
+% #title can be title or booktitle
+
+\starttexdefinition btx:apa:translated-title #title
+ \ifx\currentbtxlanguage\empty
+ % no need for an extra
+ \else\ifx\mainbtxlanguage\currentbtxlanguage
+ % no need for an extra
+ \else
+ \btxdoif {#title:\mainbtxlanguage} {
+ \begingroup
+ \language[\mainbtxlanguage]
+ \btxleftbracket
+ \btxusecommand [apa:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title:\mainbtxlanguage}
+ }
+ \btxrightbracket
+ \endgroup
+ }
+ \fi\fi
+\stoptexdefinition
+
+\starttexdefinition btx:apa:composed-title #title
+ \btxstartstyleandcolor [apa:\s!list:title:\currentbtxcategory]
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxusecommand [apa:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title}
+ \btxdoif {sub#title} {
+ \btxcolon
+ \btxflush{sub#title}
+ }
+ }
+ \endgroup
+ % which namespace?
+ %\doif{\btxparameter{translate}}\v!yes {
+ \texdefinition{btx:apa:translated-title}{#title}
+ %}
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:apa:title
+ \setmode{btx:apa:title-placed}
+ % we make the title active, opening "file"
+ \btxdoifelse {file} {
+ \texdefinition{btx:format:inject}
+ {url(file:\btxflush{file})}
+ {
+ \texdefinition{btx:apa:composed-title}{title}
+ }
+ } {
+ \texdefinition{btx:apa:composed-title}{title}
+ }
+ \btxdoif {title} {
+ \btxperiod
+ % TODO: this period may NOT be wanted, as in: Title (2nd ed.).
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:title-if-not-placed
+ \doifelsemode {btx:apa:title-placed} {
+ \resetmode{btx:apa:title-placed}
+ } {
+ \btxdoif {title} {
+ \btxspace
+ \texdefinition {btx:apa:title}
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:suffixedyear
+ \btxdoifelse {year} {
+ \btxflush{year}
+ \btxflushsuffix
+ } {
+ \btxlabeltext{apa:nd}
+ }
+\stoptexdefinition
+
+% #author may be author(set) or editor
+
+\starttexdefinition btx:apa:author-or-editor #author
+ \btxdoif {#author} {
+ \btxflush{#author}
+ \doifelse {\btxfoundname{#author}} {editor} {
+ \btxleftparenthesis
+ \btxsingularorplural {editor} {
+ \btxlabeltext{apa:Editor}
+ } {
+ \btxlabeltext{apa:Editors}
+ }
+ \btxrightparenthesisperiod
+ } {
+ \doifelse {\btxfoundname{#author}} {producer} {
+ \btxleftparenthesis
+ \btxsingularorplural {producer} {
+ \btxlabeltext{apa:Producer}
+ } {
+ \btxlabeltext{apa:Producers}
+ }
+ \btxrightparenthesis
+ \btxdoifelse {director} {
+ \removeunwantedspaces
+ \btxparameter{\c!separator:names:3}
+ \btxflush{director}
+ \btxleftparenthesis
+ \btxsingularorplural {director} {
+ \btxlabeltext{apa:Director}
+ } {
+ \btxlabeltext{apa:Directors}
+ }
+ \btxrightparenthesisperiod
+ } {
+ \btxperiod
+ }
+ } {
+ \doif {\btxfoundname{#author}} {director} {
+ \btxleftparenthesis
+ \btxsingularorplural {director} {
+ \btxlabeltext{apa:Director}
+ } {
+ \btxlabeltext{apa:Directors}
+ }
+ \btxrightparenthesisperiod
+ }
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:authoryear
+ % we make the authoryear active, pointing to the citation
+ \texdefinition{btx:format:inject}
+ {internal(\currentbtxinternal)}
+ {
+ \doifelsesetups{apa:list:sameauthor} {
+ \btxdoifelsesameasprevious {author} {
+ \fastsetup{apa:list:sameauthor}
+ } {
+ \texdefinition{btx:apa:author-or-editor} {author}
+ }
+ } {
+ \texdefinition{btx:apa:author-or-editor} {author}
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:apa:suffixedyear}
+ \btxrightparenthesis
+ }
+ % outside of interaction
+ \btxperiod
+ \doif {\btxfoundname{author}} {title} {
+ \setmode{btx:apa:title-placed}
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:editor-in
+ \btxdoif {booktitle} {
+ \btxlabeltext{apa:In}
+ \doifnot {\btxfoundname{author}} {editor} {
+ \btxspace
+ \texdefinition{btx:apa:author-or-editor} {editor}
+ }
+ \btxspace
+ \texdefinition{btx:apa:composed-title} {booktitle}
+ \btxperiod
+ }
+\stoptexdefinition
+
+% TODO: The title is terminated with period. However,
+% we probably don't want this before the parenthesis.
+
+\starttexdefinition btx:apa:leftparenthesis-or-comma
+ \doifelsemode {btx:apa:editionset-is-empty} {
+ \btxleftparenthesis
+ \resetmode{btx:apa:editionset-is-empty}
+ } {
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:editionset
+ \setmode{btx:apa:editionset-is-empty}
+ \doif {\currentbtxcategory} {techreport} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxdoifelse {type} {
+ \btxusecommand [apa:\s!list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{apa:technicalreport}
+ }
+ }
+ \btxdoif {volume} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxoneorrange {volume} {
+ \btxlabeltext{apa:Volume}
+ } {
+ \btxlabeltext{apa:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ }
+ \btxdoif {number} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxlabeltext{apa:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxdoif {edition} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{apa:edition}
+ }
+ \btxdoif {pages} {
+ \texdefinition{btx:apa:leftparenthesis-or-comma}
+ \btxoneorrange {pages} {
+ \btxlabeltext{apa:page}
+ } {
+ \btxlabeltext{apa:pages}
+ }
+ \btxnbsp
+ \btxflush{pages}
+ }
+ \doifnotmode {btx:apa:editionset-is-empty} {
+ \btxrightparenthesisperiod
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:journal
+ \btxstartstyleandcolor[apa:\s!list:journal]
+ \btxusecommand [apa:\s!list:journal] {
+ \btxflush{journal}
+ }
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:apa:volume
+ \btxstartstyleandcolor[apa:\s!list:volume]
+ \btxflush{volume}
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+ % this could be simplified!
+
+\starttexdefinition btx:apa:journal-volume-number-pages
+ \btxdoif {journal} {
+ \btxspace
+ \texdefinition{btx:apa:journal}
+ \btxdoifelse {volume} {
+ \btxcomma
+ \texdefinition{btx:apa:volume}
+ \btxdoif {number} {
+ %\btxleftparenthesis
+ (\btxflush{number}
+ \btxrightparenthesis
+ }
+ } {
+ \btxdoif {number} {
+ \btxcomma
+ \btxleftparenthesis
+ \btxflush{number}
+ \btxrightparenthesis
+ }
+ }
+ \btxdoif {pages} {
+ \btxcomma
+ \doif {\currentbtxcategory} {newspaper} {
+ \btxoneorrange {pages} {
+ \btxlabeltext{apa:page}
+ } {
+ \btxlabeltext{apa:pages}
+ }
+ \btxnbsp
+ }
+ \btxflush{pages}
+ }
+ \btxperiod
+ \doifnot {\currentbtxcategory} {newspaper} {
+ \btxdoifnot {volume} {
+ \btxdoifnot {number} {
+ \btxdoifnot {pages} {
+ \btxdoif {doi} {%set: doi or url
+ \btxspace
+ \btxlabeltext{apa:Advanced}
+ \btxperiod
+ }
+ }
+ }
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:wherefrom-publisher
+ \btxdoifelse {address} {
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ \btxcolon
+ } {
+ \btxdoif {country} {
+ \btxflush{country}
+ \btxcolon
+ }
+ }
+ \doifelse {\btxfoundname{author}} {\btxfoundname{publisher}} {
+ \btxlabeltext{apa:Author}
+ } {
+ \btxdoifelse {publisher} {
+ \btxflush{publisher}
+ } {
+ \btxlabeltext{apa:Author}
+ }
+ }
+ \btxperiod
+\stoptexdefinition
+
+\definebreakpoints[doi]
+\definebreakpoint [doi][:][nleft=3,type=1]
+\definebreakpoint [doi][/][nleft=3,type=1]
+\definebreakpoint [doi][-][nleft=3,type=1]
+\definebreakpoint [doi][.][nleft=3,type=1]
+
+% use \btxentry here?
+
+\starttexdefinition btx:apa:url
+ \btxspace
+ \btxlabeltext{apa:Retrieved}
+ \btxspace
+ \begingroup
+ \setbreakpoints[doi]
+ \ifconditional\btxinteractive
+ \goto {
+ \hyphenatedurl{\btxflush{url}}
+ } [
+ url(\btxflush{url})
+ ]
+ \else
+ \hyphenatedurl{\btxflush{url}}
+ \fi
+ \endgroup
+\stoptexdefinition
+
+% use \btxentry here?
+
+\starttexdefinition btx:apa:doi
+ \btxspace
+ \begingroup
+ \setbreakpoints[doi]
+ \ifconditional\btxinteractive
+ \goto {
+ \hyphenatedurl{doi:\btxflush{doi}}
+ } [
+ url(http://dx.doi.org/\btxflush{doi})
+ ]
+ \else
+ \hyphenatedurl{doi:\btxflush{doi}}
+ \fi
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:apa:note
+ \btxdoif {note} {
+ \btxleftparenthesis
+ \btxflush{note}
+ \btxrightparenthesis
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:apa:url-doi-note
+ \doif {\btxfoundname{doi}} {url} {
+ \texdefinition{btx:apa:url}
+ }
+ \doif {\btxfoundname{doi}} {doi} {
+ \texdefinition{btx:apa:doi}
+ }
+ \texdefinition{btx:apa:note}
+ \removeunwantedspaces
+\stoptexdefinition
+
+% Then setups, by category
+
+% An article from a journal
+% Required fields: author or editor or title, journal, (year).
+% Optional fields: volume, number, pages, type, doi, url, note.
+% Note that bibtex (and tools) do not include editor (e.g. special issue or section)
+
+\startsetups btx:apa:list:article
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {type} {
+ \btxleftbracket
+ \btxflush{type}
+ \btxrightbracketperiod
+ }
+ \texdefinition{btx:apa:journal-volume-number-pages}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% An article from a magazine.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:apa:list:magazine
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% An article from a newspaper.
+% Required fields: author or title, journal, (year).
+% Optional fields: volume, number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:apa:list:newspaper
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% A complete issue of a periodical, such as a special issue of a journal.
+% Required fields: title, year
+% Optional fields: editor, publisher, subtitle, series, volume, number, month, organization, doi, url, issn, note
+
+% needs to be tuned...
+
+\startsetups btx:apa:list:periodical
+ \fastsetup{btx:apa:list:article}
+\stopsetups
+
+% National and international standards issued by a standards body
+% Required fields: author, institution, or organization, year, title
+% Optional fields: subtitle, doi, url, note
+
+\startsetups btx:apa:list:standard
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% A book with an explicit publisher.
+% Required fields: author or editor or publisher, title, (year).
+% Optional fields: volume or number, series, address, edition, month, day, note.
+% APA ignores: month, day
+
+% todo: series?
+
+\startsetups btx:apa:list:book
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editionset}
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% There is some debate about how inbook should differ from incollection
+
+% A part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+% (note that inbook is handled differently by bibtex and biblatex)
+% Required fields: author or editor, title, chapter and/or pages, publisher, year.
+% Optional fields: volume or number, series, type, address, edition, month, note.
+% We add optional: booktitle.
+% APA ignores: chapter, month
+
+\startsetups btx:apa:list:inbook
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editor-in}
+ \texdefinition{btx:apa:editionset}
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% A part of a book having its own title.
+% Required fields: author, title, booktitle, publisher, year.
+% Optional fields: editor, volume or number, series, type, chapter, pages, address, edition, month, note.
+% APA ignores: chapter, month
+
+\startsetups btx:apa:list:incollection
+ \fastsetup{btx:apa:list:inbook}
+\stopsetups
+
+% The proceedings of a conference.
+% Required fields: title, year.
+% Optional fields: editor, volume or number, series, address, month, organization, publisher, note.
+% todo: series?
+
+\startsetups btx:apa:list:proceedings
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% An article in a conference proceedings.
+% Required fields: author, title, booktitle, year.
+% Optional fields: editor, volume or number, series, pages, address, month, organization, publisher, note.
+
+\startsetups btx:apa:list:inproceedings
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:editor-in}
+ \texdefinition{btx:apa:editionset}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+\startsetups btx:apa:list:conference
+ \fastsetup{btx:apa:list:inproceedings}
+\stopsetups
+
+% A thesis.
+% Required fields: author, title, school, year.
+% Optional fields: type, address, month, note.
+
+\startsetups btx:apa:list:thesis
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxleftparenthesis
+ \btxdoifelse {type} {
+ \btxusecommand [apa:\s!list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{apa:\currentbtxcategory}
+ }
+ \btxrightparenthesis
+ \btxdoif {school} {
+ \btxperiod
+ \btxflush{school}
+ }
+ \btxdoif {address} {
+ \btxdoifelse {school} {
+ \btxcomma
+ } {
+ \btxperiod
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ }
+ \btxperiod
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+\startsetups btx:apa:list:phdthesis
+ \fastsetup{btx:apa:list:thesis}
+\stopsetups
+
+\startsetups btx:apa:list:mastersthesis
+ \fastsetup{btx:apa:list:thesis}
+\stopsetups
+
+% A work that is printed and bound, but without a named publisher or sponsoring institution.
+% Required field: title.
+% Optional fields: author, howpublished, address, month, year, note.
+
+\startsetups btx:apa:list:booklet
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% Technical documentation.
+% Required field: title.
+% Optional fields: author, organization, address, edition, month, year, note.
+
+\startsetups btx:apa:list:manual
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A report published by a school or other institution, usually numbered within a series.
+% Required fields: author, title, institution, year.
+% Optional fields: type, number, address, month, note.
+
+\startsetups btx:apa:list:techreport
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A document having an author and title, but not formally published.
+% Required fields: author, title, note.
+% Optional fields: month, year.
+
+\startsetups btx:apa:list:unpublished
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% A patent. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: nationality, number, year, yearfiled
+% Optional fields: author, title, assignee, address, type, number, day, dayfiled, month, monthfiled, note, url
+% Also optional: publisher
+
+% todo: yearfiled, monthfiled, dayfiled
+
+\startsetups btx:apa:list:patent
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \begingroup
+ \it
+ \btxdoif {nationality} {
+ \btxspace
+ \btxflush{nationality}
+ }
+ \btxspace
+ \btxlabeltext{apa:patent}
+ \btxdoif {number} {
+ \btxspace
+ \btxlabeltext{apa:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxperiod
+ \italiccorrection
+ \endgroup
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url}
+ \texdefinition{btx:apa:note}
+\stopsetups
+
+% Electronic. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: title
+% Optional fields: address, author, howpublished, month, note, organization, url, year, doi
+% Also optional: type
+
+% Like Misc below but includes organization.
+
+\startsetups btx:apa:list:electronic
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxperiod
+ }
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ \btxperiod
+ }
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Film. Note that this category was not defined with BIBTEX.
+% Required fields: producer, director, title, year, address, publisher
+% Optional fields: subtitle, type, note, url, doi
+
+\startsetups btx:apa:list:film
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition {btx:apa:title}
+ \btxleftbracket
+ \btxdoifelse {type} {
+ \btxflush{type}
+ } {
+ \btxlabeltext{apa:MotionPicture}
+ }
+ \btxrightbracketperiod
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Music. Note that this category was not defined with BIBTEX.
+% Required fields: composer, artist, title, album, year, address, publisher
+% Optional fields: subtitle, type, note, url, doi
+
+\startsetups btx:apa:list:music
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \doifnot {\btxfoundname{author}} {artist} {
+ \btxdoif {artist} {
+ \btxleftbracket
+ \btxlabeltext{apa:Recordedby}
+ \btxspace
+ \btxflush{artist}
+ \btxrightbracketperiod
+ }
+ }
+ \doifnot {\btxfoundname{title}} {album} {
+ \btxdoif {album} {
+ \btxlabeltext{apa:In}
+ \btxspace
+ \btxflush{album}
+ \btxperiod
+ }
+ }
+ \btxdoif {type} {
+ \btxleftbracket
+ \btxflush{type}
+ \btxrightbracketperiod
+ }
+ \texdefinition{btx:apa:wherefrom-publisher}
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% Other. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: author or title, year
+% Optional fields: note, doi, url
+
+\startsetups btx:apa:list:other
+ \fastsetup{btx:apa:list:book}
+\stopsetups
+
+% Use this type when nothing else fits.
+% Required fields: none.
+% Optional fields: author, title, howpublished, month, year, note.
+
+\startsetups btx:apa:list:misc
+ \texdefinition{btx:apa:authoryear}
+ \texdefinition{btx:apa:title-if-not-placed}
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ \btxperiod
+ }
+ \texdefinition{btx:apa:url-doi-note}
+\stopsetups
+
+% If all else fails to match:
+
+\startsetups btx:apa:list:literal
+ %\btxleftparenthesis
+ \removeunwantedspaces(
+ \btxflush{tag}
+ \btxrightparenthesis
+ \btxdoif {text} {
+ \btxflush{text}
+ }
+\stopsetups
+
+% HH: an example of setting up translations using a sub rendering. Keep it here
+% till we find another spot as otherwise I forget about it and I don't want to
+% waste hours reinventing a wheel when something like this is needed.
+%
+% \definebtx
+% [apa:cite:title:translated]
+% [apa:cite:title]
+% [left=\btxleftbracket,
+% right=\btxrightbracket,
+% style=\v!bolditalic]
+%
+% \startsetups btx:apa:cite:title
+% % need to add concat, etc.
+% \btxcitereference
+% \currentbtxfirst
+% \doifmode {btx:apa:translatedtitles} {
+% \ifx\currentbtxlanguage\empty
+% % no need for an extra
+% \else\ifx\mainbtxlanguage\currentbtxlanguage
+% % no need for an extra
+% \else
+% \btxdoif {title:\mainbtxlanguage} {
+% \btxstartciterendering[title:translated]
+% \language[\mainbtxlanguage]
+% \btxflush{title:\mainbtxlanguage}
+% \btxstopciterendering
+% }
+% \fi\fi
+% }
+% \stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-aps.lua b/tex/context/base/publ-imp-aps.lua
new file mode 100644
index 000000000..c15ffe918
--- /dev/null
+++ b/tex/context/base/publ-imp-aps.lua
@@ -0,0 +1,479 @@
+local specification = {
+ --
+ -- metadata
+ --
+ name = "aps",
+ version = "1.00",
+ comment = "APS specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ --
+ -- list of fields that are interpreted as names: "NAME [and NAME]" where
+ -- NAME is one of the following:
+ --
+ -- First vons Last
+ -- vons Last, First
+ -- vons Last, Jrs, First
+ -- Vons, Last, Jrs, First
+ --
+ author = "author", -- interpreted as name(s)
+ editor = "author",
+ doi = "url", -- an external link
+ url = "url",
+ page = "pagenumber", -- number or range: f--t
+ pages = "pagenumber",
+ volume = "range",
+ number = "range",
+ keywords = "keyword", -- comma|-|separated list
+ year = "number",
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- categories are added below
+ --
+ },
+}
+
+local generic = {
+ --
+ -- A set returns the first field (in order of position below) that is found
+ -- present in an entry. A set having the same name as a field conditionally
+ -- allows the substitution of an alternate field.
+ --
+ -- note that anything can get assigned a doi or be available online.
+ editionset = { "edition", "volume", "number", "pages" },
+}
+
+-- Note that the APS specification allows an additional field "collaboration"
+-- to be rendered following the author list (if the collaboration name appears
+-- in the byline of the cited article).
+
+-- Definition of recognized categories and the fields that they contain.
+-- Required fields should be present; optional fields may also be rendered;
+-- all other fields will be ignored.
+
+-- Sets contain either/or in order of precedence.
+--
+-- For a category *not* defined here yet present in the dataset, *all* fields
+-- are taken as optional. This allows for flexibility in the addition of new
+-- categories.
+
+local categories = specification.categories
+
+-- an article from a journal
+
+categories.article = {
+ sets = {
+ author = { "author", "editor" },
+ },
+ required = {
+ "author",
+ },
+ optional = {
+ "collaboration",
+ "year",
+ "title", "subtitle", "type", "file",
+ "journal", "volume", "number", "pages",
+ "doi", "url", "note",
+ },
+}
+
+-- an article from a magazine
+
+categories.magazine = {
+ sets = categories.article.sets,
+ required = {
+ "author",
+ "year",
+ "journal",
+ },
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "number", "pages",
+ "month", "day",
+ "doi", "url", "note",
+ },
+}
+
+categories.newspaper = categories.magazine
+
+-- (from jabref) to be identified and setup ...
+
+categories.periodical = {
+ sets = {
+ author = { "editor", "publisher" },
+ },
+ required = {
+ "title",
+ "year",
+ },
+ optional = {
+ "author",
+ "collaboration",
+ "subtitle", "file",
+ "series", "volume", "number", "month",
+ "organization",
+ "doi", "url", "note",
+ },
+}
+
+-- (from jabref) to be identified and setup ...
+
+categories.standard = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ },
+ required = {
+ "author",
+ "year",
+ "title", "subtitle",
+ "doi", "note",
+ },
+ optional = {
+ "collaboration",
+ "url",
+ },
+}
+
+-- a book with an explicit publisher.
+
+categories.book = {
+ sets = {
+ author = { "author", "editor", "publisher" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "title",
+ },
+ optional = {
+ "collaboration",
+ "year", "month", "day",
+ "title", "subtitle", "type", "file",
+ "editionset", "series",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+
+categories.inbook = {
+ sets = {
+ author = { "author", "editor", "publisher", },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "year" ,
+ "title",
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "type", "file",
+ "booktitle",
+ -- "chapter",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a book having its own title as part of a collection.
+-- (like inbook, but we here make booktitle required)
+
+categories.incollection = {
+ sets = {
+ author = { "author", "editor", "publisher" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "booktitle",
+ "year",
+ },
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "editionset", "series",
+ "chapter",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- a work that is printed and bound, but without a named publisher or sponsoring institution.
+
+categories.booklet = {
+ sets = {
+ publisher = { "howpublished" }, -- no "publisher"!
+ },
+ required = {
+ "author",
+ "title",
+ },
+ optional = {
+ "publisher",
+ "collaboration",
+ "year", "month",
+ "subtitle", "type", "file",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- the proceedings of a conference.
+
+categories.proceedings = {
+ sets = {
+ author = { "editor", "organization", "publisher" }, -- no "author"!
+ publisher = { "publisher", "organization" },
+ editionset = generic.editionset,
+ },
+ required = {
+ "author",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "publisher",
+ "title", "subtitle", "file",
+ "editionset", "series",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+-- an article in a conference proceedings.
+
+categories.inproceedings = {
+ sets = categories.incollection.sets,
+ required = categories.incollection.required,
+ optional = {
+ "collaboration",
+ "title", "subtitle", "type", "file",
+ "month",
+ "edition", "series",
+ "address", "organization",
+ "doi", "url", "note",
+ },
+}
+
+categories.conference = categories.inproceedings
+
+-- a thesis (of course).
+
+categories.thesis = {
+ required = {
+ "author",
+ "title",
+ "school",
+ "year",
+ "type"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+
+categories.mastersthesis = {
+ sets = categories.thesis.sets,
+ required = {
+ "author",
+ "title",
+ "school",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "type",
+ "subtitle", "file",
+ "month",
+ "address",
+ "doi", "url", "note",
+ },
+}
+categories.phdthesis = categories.mastersthesis
+
+-- a report published by a school or other institution, usually numbered within a series.
+
+categories.techreport = {
+ sets = {
+ author = { "author", "institution", "publisher" },
+ publisher = { "publisher", "institution", },
+ editionset = { "type", "volume", "number", "pages" }, -- no "edition"!
+ },
+ required = {
+ "author",
+ "title",
+ "institution",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "publisher",
+ "address",
+ "subtitle", "file",
+ "editionset",
+ "month",
+ "doi", "url", "note",
+ },
+}
+
+-- technical documentation.
+
+categories.manual = {
+ sets = {
+ author = { "author", "organization", "publisher" },
+ publisher = { "publisher", "organization", },
+ editionset = generic.editionset,
+ },
+ required = {
+ "title"
+ },
+ optional = {
+ "author", "publisher",
+ "collaboration",
+ "address",
+ "subtitle", "file",
+ "editionset", "month", "year",
+ "doi", "url", "note",
+ },
+}
+
+-- a patent (of course).
+
+categories.patent = {
+ sets = {
+ author = { "author", "assignee", },
+ publisher = { "publisher", "assignee", },
+ year = { "year", "yearfiled", },
+ month = { "month", "monthfiled", },
+ day = { "day", "dayfiled", },
+ },
+ required = {
+ "nationality",
+ "number",
+ "year",
+ },
+ optional = {
+ "type",
+ --check this: "language",
+ "author", "publisher",
+ "collaboration",
+ "title", "subtitle", "file",
+ "address",
+ "day", "month",
+ "doi", "url", "note",
+ },
+}
+
+-- a document having an author and title, but not formally published.
+
+categories.unpublished = {
+ required = {
+ "author",
+ "title",
+ "note"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "year", "month",
+ "doi", "url",
+ },
+}
+
+-- like misc below but includes organization.
+
+categories.electronic = {
+ required = {
+ "title"
+ },
+ optional = {
+ "subtitle", "type", "file",
+ "year", "month",
+ "author",
+ "collaboration",
+ "address",
+ "organization",
+ "howpublished",
+ "doi", "url", "note",
+ },
+}
+
+-- use this type when nothing else fits.
+
+categories.misc = {
+ required = {
+ -- nothing is really important here
+ },
+ optional = {
+ "author",
+ "collaboration",
+ "title", "subtitle", "file",
+ "year", "month",
+ "howpublished",
+ "doi", "url", "note",
+ },
+}
+
+-- other (whatever jabref does not know!)
+
+categories.other = {
+ required = {
+ "author",
+ "title",
+ "year"
+ },
+ optional = {
+ "collaboration",
+ "subtitle", "file",
+ "doi", "url", "note",
+ },
+}
+
+-- if all else fails to match:
+
+categories.literal = {
+ sets = {
+ author = { "tag" }, -- need to check this!
+ },
+ required = {
+ "text"
+ },
+ optional = {
+ "author",
+ "doi", "url", "note"
+ },
+ virtual = false,
+}
+
+-- done
+
+return specification
diff --git a/tex/context/base/publ-imp-aps.mkvi b/tex/context/base/publ-imp-aps.mkvi
new file mode 100644
index 000000000..28af82e74
--- /dev/null
+++ b/tex/context/base/publ-imp-aps.mkvi
@@ -0,0 +1,1110 @@
+%D \module
+%D [ file=publ-imp-aps,
+%D version=2015.03.22,
+%D title=APS bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\startbtxrenderingdefinitions[aps]
+
+%D Reference:
+%D \startTEX
+%D @Book{APS2011,
+%D title ={Physical Review Style and Notation Guide}
+%D year ={2011},
+%D month ={June}
+%D edition ={Revised},
+%D editor ={Waldron, A and Judd, P. and Miller, V.},
+%D address ={Ridge, NY},
+%D publisher={American Physical Society},
+%D Xpages ={26},
+%D url ={http://journals.aps.org/files/styleguide-pr.pdf}
+%D }
+%D \stopTEX
+
+% set ALL specific APS compliant values
+
+\definebtx
+ [aps]
+ [\c!default=default,
+ \c!specification=aps,
+ \c!otherstext={\space{\it\btxlabeltext{aps:others}}},
+ \c!etallimit=10,
+ \c!etaldisplay=\btxparameter\c!etallimit,
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!month,
+ \c!title=\v!yes,
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={,\space\btxlabeltext{aps:and}\space}, % not \textampersand
+ \c!separator:names:4= {\space\btxlabeltext{aps:and}\space}] % not \textampersand
+
+% First, define and set list and rendering parameters
+
+\definebtxrendering
+ [aps]
+ [\c!specification=aps,
+ \c!sorttype=\v!default,
+ \c!numbering=num]
+
+\setupbtxlist
+ [aps]
+ [\c!alternative=b] % spaces
+
+\definebtx
+ [aps:\s!list]
+ [aps]
+ [\c!authorconversion=normalshort]
+
+\definebtx
+ [aps:\s!list:author]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:editor]
+ [aps:\s!list:author]
+
+\definebtx
+ [aps:\s!list:suffix]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:url]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:doi]
+ [aps:\s!list]
+
+\definebtx
+ [aps:\s!list:\s!page]
+ [aps:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{aps:and}\space},
+ \c!separator:4={\space\btxlabeltext{aps:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis},
+ \c!command={\wordright}]
+
+\definebtx
+ [aps:\s!list:numbering]
+ [aps:\s!list]
+ [left={[},
+ right={]}]
+
+\definebtx
+ [aps:\s!list:numbering:default]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:num]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:short]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:tag]
+ [aps:\s!list:numbering]
+
+\definebtx
+ [aps:\s!list:numbering:index]
+ [aps:\s!list:numbering]
+
+%D In order to be able to get journals expanded (or normalized or abbreviated) you need
+%D to load a list:
+%D
+%D \starttyping
+%D \btxloadjournallist[journals.txt] % the jabref list
+%D \stoptyping
+
+% TODO
+
+\definebtx
+ [aps:\s!list:journal]
+ [aps:\s!list]
+ [\c!style=\v!italic]
+ %command=\btxexpandedjournal] % btxabbreviatedjournal
+
+\definebtx
+ [aps:\s!list:volume]
+ [aps:\s!list]
+ [\c!style=\v!bold]
+
+\definebtx
+ [aps:\s!list:title]
+ [aps:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word]
+
+\definebtx
+ [aps:\s!list:title:article]
+ [aps:\s!list:title]
+ [\c!style=] % journal is set in italics
+
+\definebtx
+ [aps:\s!list:title:magazine]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:newspaper]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:periodical]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:standard]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:book]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:inbook]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:incollection]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:proceedings]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:inproceedings]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:conference]
+ [aps:\s!list:title]
+ [\c!style=] % booktitle is set in italics
+
+\definebtx
+ [aps:\s!list:title:thesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:phdthesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:mastersthesis]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:booklet]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:manual]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:techreport]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:unpublished]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:patent]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:electronic]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:other]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:misc]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:title:literal]
+ [aps:\s!list:title]
+
+\definebtx
+ [aps:\s!list:type]
+ [\c!command=\Word]
+
+% Then define and set all cite parameters
+
+\definebtx
+ [aps:\s!cite]
+ [aps]
+ [\c!authorconversion=\v!name,
+ \c!compress=\v!yes,
+ \c!sorttype=normal]
+
+\definebtx
+ [aps:\s!cite:author]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:authoryear]
+ [aps:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [aps:\s!cite:authoryears]
+ [aps:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space}]
+
+\definebtx
+ [aps:\s!cite:authornum]
+ [aps:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:author:num] % todo
+ [aps:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:author:year] % todo
+ [aps:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [aps:\s!cite:author:years] % todo
+ [aps:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:year]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:title]
+ [aps:\s!cite]
+ [\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!sorttype=none,
+ \c!style=\v!italic]
+
+\definebtx
+ [aps:\s!cite:booktitle]
+ [aps:\s!cite:title]
+
+\definebtx
+ [aps:\s!cite:tag]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:index]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:page]
+ [aps:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!separator:2={,\space}, % :0 and :1 - between items of a list
+ \c!separator:3={,\space\btxlabeltext{aps:and}\space}, % not \textampersand
+ \c!separator:4= {\space\btxlabeltext{aps:and}\space}] % not \textampersand
+
+\definebtx
+ [aps:\s!cite:pages]
+ [aps:\s!cite:page]
+
+\definebtx
+ [aps:\s!cite:keywords]
+ [aps:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [aps:\s!cite:short]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:category]
+ [aps:\s!cite]
+
+\definebtx
+ [aps:\s!cite:url]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [aps:\s!cite:doi]
+ [aps:\s!cite:url]
+
+\definebtx
+ [aps:\s!cite:num]
+ [aps:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ %\c!left=, % TODO: PRB uses superscript references...
+ %\c!right=, % and after punctuation, PRA, C, D, E, and L are before!
+ %\c!command={\high},
+ \c!separator:2={\btxcommabreak},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [aps:\s!cite:default]
+ [aps:\s!cite:num]
+
+\definebtx
+ [aps:\s!cite:textnum]
+ [aps:\s!cite:num]
+ [\c!left={Ref.\nbsp},
+ \c!command=,
+ \c!separator:2={,\space},
+ \c!separator:3={\space\btxlabeltext{aps:and}\space},
+ \c!separator:4={\space\btxlabeltext{aps:and}\space}]
+
+\definebtx
+ [aps:\s!cite:entry]
+ [aps:\s!cite]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween={\space},
+ \c!separator:2={;\space},
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+%D Sometimes we have verbose injections in an entry and these can be language
+%D dependent, so we use labels.
+%D
+%D Because we want to mix rendering (in the manual) we need a namespace in label
+%D texts:
+
+\setupbtxlabeltext
+ [en]
+ [aps:and=and,
+ aps:number={no.},
+ aps:edition={ed.},
+ aps:Editor={Ed.},
+ aps:Editors={Eds.},
+ aps:Volume={Vol.},
+ aps:Volumes={Vols.},
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Master's thesis},
+ aps:phdthesis={Doctoral dissertation},
+ aps:technicalreport={Tech. Rep.}, % Technical report
+ aps:supplement={Suppl.}, % Supplement (not used?)
+ aps:patent=Patent,
+ aps:inpress={in press},
+ aps:tobe={to be published},
+ aps:unpublished={unpublished},
+ aps:In=In]
+
+% Check this (google translate!!):
+
+\setupbtxlabeltext
+ [nl]
+ [aps:and=en,
+ aps:number={nr.},
+ aps:edition={ed.}, % editie
+ aps:Editor=Editor, % Ed./Eds.
+ aps:Editors=Editors,
+ aps:Volume={Vol.},
+ aps:Volumes={Vols.},
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis=Masterproef,
+ aps:phdthesis=Proefschrift,
+ aps:technicalreport={Technisch rapport}, % Technical report
+ aps:supplement=Supplement,
+ aps:patent=Octrooi,
+ aps:inpress={in press}, % CHECK THESE!
+ aps:tobe={worden gepubliceerd},
+ aps:unpublished={onuitgegeven},
+ aps:In=In]
+
+\setupbtxlabeltext
+ [fr]
+ [aps:and=et,
+ aps:number={n\high{o}},
+ aps:edition={édition},
+ aps:Editor=Éditeur,
+ aps:Editors=Éditeurs,
+ aps:Volume=Volume,
+ aps:Volumes=Volumes,
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Thèse de master (DEA, DESS, master)},
+ aps:phdthesis={Thèse de doctorat},
+ aps:technicalreport={Rapport technique},
+ aps:supplement=Supplément,
+ aps:patent=Brevet,
+ aps:inpress={sous impression},
+ aps:tobe={à paraître},
+ aps:unpublished={inédit}, % pour un livre
+ aps:In=Dans]
+
+\setupbtxlabeltext
+ [de]
+ [aps:and=und,
+ aps:number={nr.},
+ aps:edition=Auf\/lage,
+ aps:Editor=Herausgeber, % Hrsg./Hg.
+ aps:Editors=Herausgeber,
+ aps:Volume=Band, % Bd.
+ aps:Volumes={Bände},
+ aps:others={et al.},
+ aps:page={S.},
+ aps:pages={S.},
+ aps:mastersthesis={Masterarbeit},
+ aps:phdthesis={Dissertation},
+ aps:technicalreport={Technischer Bericht},
+ aps:supplement={Beilage}, % Supplement
+ aps:patent=Patent,
+ aps:inpress={in der Presse}, % CHECK THESE!
+ aps:tobe={veröffentlicht werden},
+ aps:unpublished={unveröffentlicht},
+ aps:In=In]
+
+% thanks: Andrea Valle
+
+\setupbtxlabeltext
+ [it]
+ [aps:and=e,
+ aps:number={nº},
+ aps:edition={ed.}, % edizione
+ aps:Editor={A cura di},
+ aps:Editors={A cura di},
+ aps:Volume={Vol.}, % Volume
+ aps:Volumes={Vol.}, % Volumi
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Tesi di laurea},
+ aps:phdthesis={Tesi di dottorato},
+ aps:technicalreport={Relazione tecnica},
+ aps:supplement={Supplemento},
+ aps:patent=Brevetto,
+ aps:inpress={in press}, % CHECK THESE!
+ aps:tobe={da pubblicare},
+ aps:unpublished={inedito},
+ aps:In=In]
+
+\setupbtxlabeltext
+ [es]
+ [aps:and=y,
+ aps:number={nº},
+ aps:edition={ed.}, % edición
+ aps:Editor=Editor, % Ed./Eds.
+ aps:Editors=Editores,
+ aps:Volume={Vol.}, % Volumen
+ aps:Volumes={Vols.}, % Volúmenes
+ aps:others={et al.},
+ aps:page={p.},
+ aps:pages={pp.},
+ aps:mastersthesis={Tesis de maestría},
+ aps:phdthesis={Tesis doctoral},
+ aps:technicalreport={Informe técnico},
+ aps:supplement=Suplemento,
+ aps:patent=Patente,
+ aps:inpress={en prensa}, % CHECK THESE!
+ aps:tobe={que se publicará},
+ aps:unpublished={inédito},
+ aps:In=En]
+
+% cite setups
+
+\startsetups btx:aps:nd
+ \doifelse {\currentbtxcategory} {article} {
+ \btxlabeltext{aps:tobe}
+ } {
+ \doifelse {\currentbtxcategory} {book} {
+ \btxlabeltext{aps:inpress}
+ } {
+ \btxlabeltext{aps:unpublished}
+ }
+ }
+\stopsetups
+
+\startsetups btx:aps:cite:author:year
+ \texdefinition{\s!btx:\s!cite:concat}
+ \ifx\currentbtxfirst\empty
+ \fastsetup{btx:aps:nd}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!inbetween
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \currentbtxsecond
+ }
+ \fi
+ \btxflushsuffix
+ \fi
+\stopsetups
+
+\startsetups btx:aps:cite:author:years
+ \fastsetup{btx:aps:cite:author:year}
+\stopsetups
+
+% used in publ-imp-page.mkvi
+
+\startsetups [btx:aps:list:page-or-pages]
+ \ifx\currentbtxlastpage\empty
+ \btxlabeltext{aps:page}
+ \else
+ \btxlabeltext{aps:pages}
+ \fi
+ \btxnbsp
+\stopsetups
+
+%D Instead of texdefinitions without arguments, we could have used setups but in my
+%D editor (hh, scite) the commands stand out better. It also saves an additional
+%D component in the name (e.g. common:) because commands and setups have a different
+%D namespace, so similar calls don't clash. Performance of definitions is somewhat
+%D better.
+
+%D We use "texdefinitions" (with eventual arguments) for helpers that are used
+%D in the rendering "setups" defined for each category below.
+
+%D Note that \btxdoif... and \btxflush rely on the definitions in
+%D publ-imp-aps.lua: fields that are not listed as required nor optional are
+%D IGNORED. We also make heavy use of the notion of sets - comma-separated lists
+%D of alternative fields to be used in hierarchal order. For example:
+%D author = { "author", "editor", "publisher", "title" }, will return the
+%D author field if it exists; if not, the editor field will be returned, if it
+%D exists; if not, the publisher field will be returned, if it exists; if not,
+%D the title field will be returned, it it exists; if not, nothing will be
+%D returned. In lua syntax, it can be understood as
+%D author or editor or publisher or title or ""
+
+\starttexdefinition btx:aps:composed-title #title
+ \btxstartstyleandcolor [aps:\s!list:title:\currentbtxcategory]
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxusecommand [aps:\s!list:title:\currentbtxcategory] {
+ \btxflush{#title}
+ \btxdoif {sub#title} {
+ \btxcolon
+ \btxflush{sub#title}
+ }
+ }
+ \endgroup
+ \btxstopstyleandcolor
+\stoptexdefinition
+
+\starttexdefinition btx:aps:title
+ \btxdoif {title} {
+ % we make the title active, opening file
+ \btxdoifelse {file} {
+ \texdefinition{btx:format:inject}
+ {url(file:\btxflush{file})}
+ {
+ \texdefinition{btx:aps:composed-title}{title}
+ }
+ } {
+ \texdefinition{btx:aps:composed-title}{title}
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:optional-title
+ \doif{\btxparameter{\c!title}}\v!yes {
+ \texdefinition {btx:aps:title}
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:year
+ \btxdoifelse {year} {
+ \btxflush{year}
+ } {
+ \fastsetup{btx:aps:nd}
+ }
+\stoptexdefinition
+
+% #author may be author(set) or editor
+
+\starttexdefinition btx:aps:author-or-editor #author
+ \btxdoif {#author} {
+ \btxflush{#author}
+ \doifelse {\btxfoundname{#author}} {editor} {
+ \btxleftparenthesis
+ \btxsingularorplural {editor} {
+ \btxlabeltext{aps:Editor}
+ } {
+ \btxlabeltext{aps:Editors}
+ }
+ \btxrightparenthesis
+ } {
+ \btxdoif {collaboration} {
+ \btxleftparenthesis
+ \btxflush{collaboration}
+ \btxrightparenthesis
+ }
+ }
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:author
+ \btxflush{author}
+ \btxcomma
+\stoptexdefinition
+
+\starttexdefinition btx:aps:editor-in
+ \btxdoif {booktitle} {
+ \btxlabeltext{aps:In}
+ \doifnot {\btxfoundname{author}} {editor} {
+ \btxspace
+ \texdefinition{btx:aps:author-or-editor} {editor}
+ }
+ \btxspace
+ \texdefinition{btx:aps:composed-title} {booktitle}
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:editionset
+ \doif {\currentbtxcategory} {techreport} {
+ \btxdoifelse {type} {
+ \btxusecommand [\currentbtx:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{aps:technicalreport}
+ }
+ \btxcomma
+ }
+ \btxdoif {volume} {
+ \btxoneorrange {volume} {
+ \btxlabeltext{aps:Volume}
+ } {
+ \btxlabeltext{aps:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ \btxcomma
+ }
+ \btxdoif {number} {
+ \btxlabeltext{aps:number}
+ \btxspace
+ \btxflush{number}
+ \btxcomma
+ }
+ \btxdoif {edition} {
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{aps:edition}
+ \btxcomma
+ }
+ \btxdoif {pages} {
+ \btxoneorrange {pages} {
+ \btxlabeltext{aps:page}
+ } {
+ \btxlabeltext{aps:pages}
+ }
+ \btxnbsp
+ \btxflush{pages}
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:journal-volume-year
+ \btxdoif {journal} {
+ \btxstartstyleandcolor [aps:\s!list:journal]
+ % expandedjournal abbreviatedjournal
+ \btxflush{expandedjournal -> journal}
+ \btxstopstyleandcolor
+ \btxdoifelse {volume} {
+ \btxspace
+ \btxstartstyleandcolor [aps:\s!list:volume]
+ \btxflush{volume}
+ \btxstopstyleandcolor
+ \btxdoif {number} {
+ \removeunwantedspaces(
+ \btxflush{number}
+ \btxrightparenthesiscomma
+ }
+ } {
+ \btxdoif {number} {
+ \btxspace
+ \btxflush{number}
+ \btxcomma
+ }
+ }
+ \btxdoif {pages} {
+ \btxspace
+ \btxflush{pages}
+ }
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+\stoptexdefinition
+
+\starttexdefinition btx:aps:publisher-wherefrom-year
+ \removeunwantedspaces
+ \removepunctuation
+ \btxleftparenthesis
+ \btxflush{publisher}
+ \btxdoifelse {address} {
+ \btxdoif {publisher} {
+ \btxcomma
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ \btxcomma
+ } {
+ \btxdoif {publisher} {
+ \btxcomma
+ }
+ }
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+\stoptexdefinition
+
+\starttexdefinition btx:aps:note
+ \btxperiod
+ \btxdoif {note} {
+ \btxleftparenthesis
+ \btxflush{note}
+ \btxrightparenthesis
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:aps:doi-url #text
+ \ifconditional\btxinteractive
+ \btxdoifelse {doi} {
+ \goto {#text} [url(http://dx.doi.org/\btxflush{doi})]
+ } {
+ \btxdoifelse {url} {
+ \goto {#text} [url(\btxflush{url})]
+ } {
+ #text
+ }
+ }
+ \else
+ #text
+ \fi
+
+\stoptexdefinition
+
+% Then setups, by category
+
+% An article from a journal
+% Required fields: author or editor or title, journal, (year).
+% Optional fields: volume, number, pages, type, doi, url, note.
+% Note that bibtex (and tools) do not include editor (e.g. special issue or section)
+
+\startsetups btx:aps:list:article
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:optional-title}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:journal-volume-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% An article from a magazine.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:aps:list:magazine
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% An article from a newspaper.
+% Required fields: author or title, journal, (year).
+% Optional fields: number, pages, type, month, day, doi, url, note.
+
+\startsetups btx:aps:list:newspaper
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% A complete issue of a periodical, such as a special issue of a journal.
+% Required fields: title, year
+% Optional fields: editor, publisher, subtitle, series, volume, number, month, organization, doi, url, issn, note
+
+% needs to be tuned...
+\startsetups btx:aps:list:periodical
+ \fastsetup{btx:aps:list:article}
+\stopsetups
+
+% National and international standards issued by a standards body
+% Required fields: author, institution, or organization, year, title
+% Optional fields: subtitle, doi, url, note
+
+\startsetups btx:aps:list:standard
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:note}
+\stopsetups
+% year?
+
+% A book with an explicit publisher.
+% Required fields: author or editor or publisher, title, (year).
+% Optional fields: volume or number, series, address, edition, month, day, note.
+% APS? ignores: month, day
+
+% todo: series?
+
+\startsetups btx:aps:list:book
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editionset}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% There is some debate about how inbook should differ from incollection
+
+% A part of a book, which may be a chapter (or section or whatever) and/or a range of pages.
+% (note that inbook is handled differently by bibtex and biblatex)
+% Required fields: author or editor, title, chapter and/or pages, publisher, year.
+% Optional fields: volume or number, series, type, address, edition, month, note.
+% We add optional: booktitle.
+% APS? ignores: chapter, month
+
+\startsetups btx:aps:list:inbook
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editor-in}
+ \texdefinition{btx:aps:editionset}
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+% chapter?
+
+% A part of a book having its own title.
+% Required fields: author, title, booktitle, publisher, year.
+% Optional fields: editor, volume or number, series, type, chapter, pages, address, edition, month, note.
+% APS? ignores: chapter, month
+
+\startsetups btx:aps:list:incollection
+ \fastsetup{btx:aps:list:inbook}
+\stopsetups
+
+% The proceedings of a conference.
+% Required fields: title, year.
+% Optional fields: editor, volume or number, series, address, month, organization, publisher, note.
+% todo: series?
+
+\startsetups btx:aps:list:proceedings
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% An article in a conference proceedings.
+% Required fields: author, title, booktitle, year.
+% Optional fields: editor, volume or number, series, pages, address, month, organization, publisher, note.
+
+\startsetups btx:aps:list:inproceedings
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \texdefinition{btx:aps:editor-in}
+ \texdefinition{btx:aps:editionset}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+\startsetups btx:aps:list:conference
+ \fastsetup{btx:aps:list:inproceedings}
+\stopsetups
+
+% A thesis.
+% Required fields: author, title, school, year.
+% Optional fields: type, address, month, note.
+
+\startsetups btx:aps:list:thesis
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \btxleftparenthesis
+ \btxdoifelse {type} {
+ \btxusecommand [aps:list:type] {
+ \btxflush{type}
+ }
+ } {
+ \btxlabeltext{aps:\currentbtxcategory}
+ }
+ \btxrightparenthesis
+ \btxdoif {school} {
+ \btxperiod
+ \btxflush{school}
+ }
+ \btxdoif {address} {
+ \btxdoifelse {school} {
+ \btxcomma
+ } {
+ \btxperiod
+ }
+ \btxflush{address}
+ \btxdoif {country} {
+ \btxcomma
+ \btxflush{country}
+ }
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+\startsetups btx:aps:list:phdthesis
+ \fastsetup{btx:aps:list:thesis}
+\stopsetups
+
+\startsetups btx:aps:list:mastersthesis
+ \fastsetup{btx:aps:list:thesis}
+\stopsetups
+
+% A work that is printed and bound, but without a named publisher or sponsoring institution.
+% Required field: title.
+% Optional fields: author, howpublished, address, month, year, note.
+
+\startsetups btx:aps:list:booklet
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% Technical documentation.
+% Required field: title.
+% Optional fields: author, organization, address, edition, month, year, note.
+
+\startsetups btx:aps:list:manual
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A report published by a school or other institution, usually numbered within a series.
+% Required fields: author, title, institution, year.
+% Optional fields: type, number, address, month, note.
+
+\startsetups btx:aps:list:techreport
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A document having an author and title, but not formally published.
+% Required fields: author, title, note.
+% Optional fields: month, year.
+
+\startsetups btx:aps:list:unpublished
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% A patent. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: nationality, number, year, yearfiled
+% Optional fields: author, title, assignee, address, type, number, day, dayfiled, month, monthfiled, note, url
+% Also optional: publisher
+
+% todo: yearfiled, monthfiled, dayfiled
+
+\startsetups btx:aps:list:patent
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \begingroup
+ \it
+ \btxdoif {nationality} {
+ \btxspace
+ \btxflush{nationality}
+ }
+ \btxspace
+ \btxlabeltext{aps:patent}
+ \btxdoif {number} {
+ \btxspace
+ \btxlabeltext{aps:number}
+ \btxspace
+ \btxflush{number}
+ }
+ \btxperiod
+ \italiccorrection
+ \endgroup
+ \texdefinition{btx:aps:doi-url} {\texdefinition{btx:aps:publisher-wherefrom-year}}
+ \texdefinition{btx:aps:url}
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% Electronic. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: title
+% Optional fields: address, author, howpublished, month, note, organization, url, year, doi
+% Also optional: type
+
+% Like Misc below but includes organization.
+
+\startsetups btx:aps:list:electronic
+ \texdefinition{btx:aps:author}
+ \texdefinition{btx:aps:title}
+ \btxdoif {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxperiod
+ }
+ \btxdoif {howpublished} {
+ \btxspace
+ \btxflush{howpublished}
+ }
+ \btxleftparenthesis
+ \texdefinition{btx:aps:year}
+ \btxrightparenthesis
+ \texdefinition{btx:aps:note}
+\stopsetups
+
+% Other. Note that this category was not defined with BIBTEX. Below from JabRef:
+% Required fields: author or title, year
+% Optional fields: note, doi, url
+
+\startsetups btx:aps:list:other
+ \fastsetup{btx:aps:list:book}
+\stopsetups
+
+% Use this type when nothing else fits.
+% Required fields: none.
+% Optional fields: author, title, howpublished, month, year, note.
+
+\startsetups btx:aps:list:misc
+ \fastsetup{btx:aps:list:electronic}
+ % Note that organization is an "ignored" field.
+\stopsetups
+
+% If all else fails to match:
+
+\startsetups btx:aps:list:literal
+ %\btxleftparenthesis
+ \removeunwantedspaces(
+ \btxflush{tag}
+ \btxrightparenthesis
+ \btxdoif {text} {
+ \btxflush{text}
+ }
+\stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-author.mkvi b/tex/context/base/publ-imp-author.mkvi
new file mode 100644
index 000000000..7529c7aa9
--- /dev/null
+++ b/tex/context/base/publ-imp-author.mkvi
@@ -0,0 +1,303 @@
+% TODO: MAKE default
+
+%D \module
+%D [ file=publ-imp-author,
+%D version=2014.06.23,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Authors,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+% these can be used instead of the macros and they accept manipulator prefixes
+%
+% \currentbtxinitials : \btxauthorfield{initials}
+% \currentbtxfirstnames : \btxauthorfield{firstnames}
+% \currentbtxvons : \btxauthorfield{vons}
+% \currentbtxsurnames : \btxauthorfield{surnames}
+% \currentbtxjuniors : \btxauthorfield{juniors}
+
+\starttexdefinition \s!btx:\s!cite:\s!author:\s!de
+ \ifx\currentbtxlanguage\s!de
+ \setmode{\s!btx:\s!de}
+ \fi
+\stoptexdefinition
+
+\startsetups \s!btx:\s!cite:\s!author:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:names:\number\currentbtxconcat}
+ \else\ifnum\currentbtxauthorindex>\plusone
+ \btxparameter{\c!separator:names:2}
+ \fi\fi
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:others
+ \ifcase\currentbtxoverflow \else
+ \btxparameter\c!otherstext
+ \fi
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:normal
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxfirstnames\empty \else
+ \currentbtxfirstnames
+ \btxparameter{\c!separator:firstnames}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:normalshort
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxinitials\empty \else
+ \currentbtxinitials
+ \btxparameter{\c!separator:initials}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:inverted
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxfirstnames\empty
+ % firstnames are optional
+ \else
+ \btxparameter{\c!separator:invertedfirstnames}
+ \currentbtxfirstnames
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifnotmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:invertedshort
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxinitials\empty
+ % initials are optional
+ \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:\s!author:name
+ \fastsetup{\s!btx:\s!cite:\s!author:concat}
+ % is this treated differently in german?
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ \fi
+ \currentbtxsurnames
+ \ifcase\currentbtxauthorstate \else
+ % potential clash of names so we force initials
+ \ifx\currentbtxinitials\empty \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!cite:\s!author:others}
+\stopsetups
+
+% list (mostly the same)
+
+\startsetups \s!btx:\s!list:\s!author:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:names:\number\currentbtxconcat}
+ \else\ifnum\currentbtxauthorindex>\plusone
+ \btxparameter{\c!separator:names:2}
+ \fi\fi
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:others
+ \ifcase\currentbtxoverflow \else
+ \btxparameter\c!otherstext
+ \fi
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:normal
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxfirstnames\empty \else
+ \currentbtxfirstnames
+ \btxparameter{\c!separator:firstnames}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:normalshort
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxinitials\empty \else
+ \currentbtxinitials
+ \btxparameter{\c!separator:initials}
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \ifx\currentbtxsurnames\empty \else
+ \btxparameter{\c!separator:vons}
+ \fi
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:inverted
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxfirstnames\empty
+ % firstnames are optional
+ \else
+ \btxparameter{\c!separator:invertedfirstnames}
+ \currentbtxfirstnames
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:invertedshort
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ \ifx\currentbtxvons\empty \else
+ \texdefinition{\s!btx:\s!cite:\s!author:\s!de}
+ \doifnotmode {\s!btx:\s!de} {
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ }
+ \fi
+ \ifx\currentbtxsurnames\empty \else
+ \currentbtxsurnames
+ \ifx\currentbtxjuniors\empty \else
+ \btxparameter{\c!separator:juniors}
+ \currentbtxjuniors
+ \fi
+ \fi
+ \ifx\currentbtxinitials\empty
+ % initials are optional
+ \else
+ \btxparameter{\c!separator:invertedinitials}
+ \currentbtxinitials
+ \fi
+ \ifx\currentbtxvons\empty \else
+ \doifmode {\s!btx:\s!de} {
+ \btxparameter{\c!separator:vons}
+ \currentbtxvons
+ }
+ \fi
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\startsetups \s!btx:\s!list:\s!author:name
+ \fastsetup{\s!btx:\s!list:\s!author:concat}
+ % is this treated differently in german?
+ \ifx\currentbtxvons\empty \else
+ \currentbtxvons
+ \btxparameter{\c!separator:vons}
+ \fi
+ \currentbtxsurnames
+ \fastsetup{\s!btx:\s!list:\s!author:others}
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-cite.mkvi b/tex/context/base/publ-imp-cite.mkvi
new file mode 100644
index 000000000..7ce9ea0da
--- /dev/null
+++ b/tex/context/base/publ-imp-cite.mkvi
@@ -0,0 +1,281 @@
+%D \module
+%D [ file=publ-imp-cite,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Citations,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\starttexdefinition btx:cite:inject #content
+ \ifconditional\btxinteractive
+ \ifx\currentbtxinternal\empty
+ #content
+ \else
+ \goto {
+ #content
+ } [
+ \s!internal(\currentbtxinternal)
+ ]
+ \fi
+ \else
+ #content
+ \fi
+\stoptexdefinition
+
+\starttexdefinition btx:cite:checkconcat
+ \ifnum\currentbtxcount>\zerocount
+ \let\currentbtxinternal\empty
+ \let\currentbtxbacklink\empty
+ \fi
+\stoptexdefinition
+
+% The null case:
+
+\startsetups btx:cite:none
+ \btxcitereference
+ % dummy
+\stopsetups
+
+\startsetups btx:cite:nocite
+ \dontleavehmode
+ \btxcitereference
+\stopsetups
+
+\startsetups btx:cite:unknown
+ \begingroup
+ \tttf
+ \btxcitereference
+ \currentbtxfirst
+ \endgroup
+\stopsetups
+
+\startsetups btx:cite:empty
+ \btxcitereference
+
+\stopsetups
+
+\startsetups btx:cite:invalid
+ \btxcitereference
+ {\tt <\currentbtxreference>}
+\stopsetups
+
+\starttexdefinition btx:cite:concat
+ \btxparameter{\c!separator:\number\currentbtxconcat}
+\stoptexdefinition
+
+% when we have an author-year combination, the first and seconds is not
+% fields data but something more complex (that itself calls for a setup)
+
+% \startsetups btx:cite:normal
+% \texdefinition{\s!btx:\s!cite:concat}
+% \currentbtxlefttext
+% \ifx\currentbtxfirst\empty
+% \fastsetup{\s!btx:\s!cite:\s!empty}
+% \else
+% \texdefinition {\s!btx:\s!cite:inject} {
+% \btxcitereference
+% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+% \currentbtxfirst
+% }
+% }
+% \ifx\currentbtxsecond\empty
+% \btxflushsuffix
+% \else
+% \btxparameter\c!inbetween
+% \texdefinition {\s!btx:\s!cite:inject} {
+% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+% \currentbtxsecond
+% }
+% }
+% % quite likely an error
+% \btxflushsuffix
+% \fi
+% \fi
+% \currentbtxrighttext
+% \stopsetups
+
+\startsetups btx:cite:normal
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else\ifx\currentbtxsecond\empty
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \btxflushsuffix
+ }
+ }
+ \else
+ % \texdefinition {\s!btx:\s!cite:inject} {
+ % \btxcitereference
+ % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ % \currentbtxfirst
+ % }
+ % }
+ % \btxparameter\c!inbetween
+ % \texdefinition {\s!btx:\s!cite:inject} {
+ % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ % \currentbtxsecond
+ % }
+ % }
+ % \btxflushsuffix
+ %
+ % probably better:
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \btxparameter\c!inbetween
+ \currentbtxsecond
+ \btxflushsuffix
+ }
+ }
+ \fi\fi
+ \currentbtxrighttext
+\stopsetups
+
+
+\startsetups btx:cite:range
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
+ \currentbtxfirst
+ \ifx\currentbtxsecond\empty \else
+ \btxparameter\c!range
+ \currentbtxsecond
+ \fi
+ \btxflushsuffix
+ }
+ }
+ \fi
+ \currentbtxrighttext
+\stopsetups
+
+% somehow related to keywords:
+
+\startsetups btx:cite:listelement
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else
+ \texdefinition {\s!btx:\s!cite:inject} {
+ \btxcitereference
+ \currentbtxfirst
+ }
+ \fi
+ \currentbtxrighttext
+\stopsetups
+
+\startsetups \s!btx:\s!cite:entry
+ \texdefinition{\s!btx:\s!cite:concat}
+ \currentbtxlefttext
+ \btxhandleciteentry
+ \currentbtxrighttext
+\stopsetups
+
+% these three are goodies to get something but are not set up as it makes no
+% sense to have something root for combinations like this (esp not because one
+% gets default anyway
+
+% AB: not so sure about that. Why define them in default rather than here?
+
+\startsetups \s!btx:\s!cite:author
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authoryear
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authoryears
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:authornum
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:num
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:default
+ \fastsetup{\s!btx:\s!cite:num}
+\stopsetups
+\startsetups \s!btx:\s!cite:textnum
+ \fastsetup{\s!btx:\s!cite:num}
+\stopsetups
+\startsetups \s!btx:\s!cite:year
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:num
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:year
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:author:years
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+
+\startsetups \s!btx:\s!cite:index
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+\startsetups \s!btx:\s!cite:category
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:short
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:tag
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+
+% the following correspond to fields, but can be used in many renderings
+
+\startsetups \s!btx:\s!cite:keywords
+ \fastsetup{\s!btx:\s!cite:list}
+\stopsetups
+\startsetups \s!btx:\s!cite:title
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+\startsetups \s!btx:\s!cite:pages
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
+
+% is the next one used?
+% Yes, bibtex is a mess and one can have pages or sometimes page
+
+\startsetups \s!btx:\s!cite:page
+ \fastsetup{\s!btx:\s!cite:normal}
+\stopsetups
+
+% the following is kind of specific, but can be used in many renderings
+
+\startsetups btx:cite:url
+ \ifx\currentbtxfirst\empty
+ \fastsetup{\s!btx:\s!cite:\s!empty}
+ \else\ifconditional\btxinteractive
+ \goto {
+ \btxcitereference
+ \hyphenatedurl{\doif{\currentbtxcitealternative}{doi}{doi:}\currentbtxfirst}
+ } [
+ url(\doif{\currentbtxcitealternative}{doi}{http://dx.doi.org/}\currentbtxfirst)
+ ]
+ \else
+ \btxcitereference
+ \hyphenatedurl{\doif{\currentbtxcitealternative}{doi}{doi:}\currentbtxfirst}
+ \fi\fi
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-commands.mkvi b/tex/context/base/publ-imp-commands.mkvi
new file mode 100644
index 000000000..14e2dbae1
--- /dev/null
+++ b/tex/context/base/publ-imp-commands.mkvi
@@ -0,0 +1,15 @@
+\unprotect
+
+% for tugboat
+
+\definebtxcommand\hbox {\hbox}
+\definebtxcommand\vbox {\vbox}
+\definebtxcommand\llap {\llap}
+\definebtxcommand\rlap {\rlap}
+\definebtxcommand\url #1{\hyphenatedurl{#1}}
+\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
+
+\let\<<
+\let\<>
+
+\protect \endinput
diff --git a/tex/context/base/publ-imp-default.lua b/tex/context/base/publ-imp-default.lua
new file mode 100644
index 000000000..61b08f30c
--- /dev/null
+++ b/tex/context/base/publ-imp-default.lua
@@ -0,0 +1,124 @@
+-- For the moment I put this here as example. When writing the publication modules we
+-- explored several approached: pure tex, pure lua, a mix with xml, etc. In the end
+-- each has advantages and drawbacks so we ended up with readable tex plus helpers in
+-- lua. Anyway here is a lua variant of a setup ... it doesn't look nicer. An alternative
+-- can be to build a table with characters but then we need to pass left, right and
+-- other separators so again no real gain.
+
+-- function publications.maybe.default.journal(currentdataset,currenttag)
+-- if publications.okay(currentdataset,currenttag,"journal") then
+-- context.btxspace()
+-- context.startbtxstyle("italic")
+-- commands.btxflush(currentdataset,currenttag,"expandedjournal -> journal")
+-- context.stopbtxstyle()
+-- if publications.okay(currentdataset,currenttag,"volume") then
+-- context.btxspace()
+-- commands.btxflush(currentdataset,currenttag,"volume")
+-- if publications.okay(currentdataset,currenttag,"number") then
+-- context.ignorespaces()
+-- context.btxleftparenthesis()
+-- commands.btxflush(currentdataset,currenttag,"number")
+-- context.btxrightparenthesis()
+-- end
+-- elseif publications.okay(currentdataset,currenttag,"number") then
+-- context.btxlabeltext("default:number")
+-- context.btxspace()
+-- commands.btxflush(currentdataset,currenttag,"number")
+-- end
+-- if publications.okay(currentdataset,currenttag,"pages") then
+-- context.btxcomma()
+-- commands.btxflush(currentdataset,currenttag,"pages")
+-- end
+-- context.btxcomma()
+-- end
+-- end
+
+return {
+ --
+ -- metadata
+ --
+ name = "default",
+ version = "1.00",
+ comment = "DEFAULT specification",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ --
+ -- derived (combinations of) fields (all share the same default set)
+ --
+ virtual = {
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "num",
+ "suffix",
+ },
+ --
+ -- special datatypes
+ --
+ types = {
+ author = "author", -- interpreted as name(s)
+ editor = "author", -- interpreted as name(s)
+ page = "pagenumber", -- number or range: f--t -- maybe just range
+ pages = "pagenumber", -- number or range: f--t -- maybe just range
+ volume = "range", -- number or range: f--t
+ number = "range", -- number or range: f--t
+ keywords = "keyword", -- comma|-|separated list
+ },
+ --
+ -- categories with their specific fields
+ --
+ categories = {
+ --
+ -- the following fields are for documentation and testing purposes
+ --
+ ["demo-a"] = {
+ sets = {
+ author = { "author", "institution", "organization" },
+ },
+ required = { "author", "title", "year" },
+ optional = { "subtitle" },
+ },
+ ["demo-b"] = {
+ sets = {
+ authors = { "author", "institution", "organization" },
+ },
+ required = { "authors", "title", "year" },
+ optional = { "subtitle" },
+ },
+ --
+ -- we only provide article and book (maybe a few more later) and we keep it
+ -- real simple. See the apa and aps definitions for more extensive examples
+ --
+ article = {
+ sets = {
+ author = { "author", "editor" },
+ },
+ required = {
+ "author", -- a set
+ "year",
+ },
+ optional = {
+ "title",
+ "keywords",
+ "journal", "volume", "number", "pages",
+ },
+ },
+ book = {
+ sets = {
+ author = { "author", "editor", },
+ editionset = { "edition", "volume", "number" },
+ },
+ required = {
+ "author", -- a set
+ "title",
+ "year",
+ },
+ optional = {
+ "subtitle",
+ "keywords",
+ "publisher", "address",
+ "editionset",
+ },
+ },
+ },
+}
diff --git a/tex/context/base/publ-imp-default.mkvi b/tex/context/base/publ-imp-default.mkvi
new file mode 100644
index 000000000..c7c88d74a
--- /dev/null
+++ b/tex/context/base/publ-imp-default.mkvi
@@ -0,0 +1,482 @@
+%D \module
+%D [ file=publ-imp-default,
+%D version=2014.02.06,
+%D title=Default bibliography style,
+%D subtitle=Publications,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+%D This default style defines only a few categories: book and article.
+%D If you want more, you need to load a more complete style such as \type {apa},
+%D \type {aps}, etc. The default style is used in the manuals that ship with
+%D \CONTEXT. This file is always loaded.
+
+\startbtxrenderingdefinitions[\s!default]
+
+\definebtxrendering
+ [\s!default]
+ [\c!specification=\s!default,
+ \c!sorttype=\v!default,
+ \c!numbering=num]
+
+\definebtx
+ [\s!default]
+ [\c!default=, % we do not want to fall|-|back on ourself.
+ \c!otherstext={\space\btxlabeltext{default:others}},
+ %c!journalconversion=\v!normal,
+ \c!monthconversion=\v!number,
+ \c!separator:names:2={,\space},
+ \c!separator:names:3={\space\btxlabeltext{default:and}\space},
+ \c!separator:names:4={\space\btxlabeltext{default:and}\space}]
+
+\definebtx
+ [\s!default:\s!list]
+ [\s!default]
+ [\c!authorconversion=normalshort]
+
+\definebtx
+ [\s!default:\s!cite]
+ [\s!default]
+ [\c!alternative=num,
+ [\c!compress=\v!yes,
+ \c!sorttype=normal,
+ \c!authorconversion=\v!name]
+
+% List variants, some having specific settings:
+
+\definebtx
+ [\s!default:\s!list:\s!page]
+ [\s!default:\s!list]
+ [\c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{default:and}\space},
+ \c!separator:4={\space\btxlabeltext{default:and}\space},
+ \c!left={\btxleftparenthesis},
+ \c!right={\btxrightparenthesis}]
+
+\definebtx
+ [\s!default:\s!list:numbering]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:numbering:default]
+ [\s!default:\s!list:numbering]
+
+\definebtx
+ [\s!default:\s!list:numbering:num]
+ [\s!default:\s!list:numbering]
+
+\definebtx
+ [\s!default:\s!list:numbering:short]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:tag]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:numbering:index]
+ [\s!default:\s!list:numbering:num]
+
+\definebtx
+ [\s!default:\s!list:author]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:editor]
+ [\s!default:\s!list:author]
+
+\definebtx
+ [\s!default:\s!list:url]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:doi]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:short]
+ [\s!default:\s!list]
+
+\definebtx
+ [\s!default:\s!list:journal]
+ [\s!default:\s!list]
+ [\c!style=\v!italic]
+
+\definebtx
+ [\s!default:\s!list:title]
+ [\s!default:\s!list]
+ [\c!style=\v!italic,
+ \c!command=\Word]
+
+\definebtx
+ [\s!default:\s!list:title:article]
+ [\s!default:\s!list:title]
+ [\c!style=, % journal is set in italics
+ \c!command={\quotation\Word}]
+
+\definebtx
+ [\s!default:\s!list:title:book]
+ [\s!default:\s!list:title]
+
+% Citation variants, some having specific settings :
+
+\definebtx
+ [\s!default:\s!cite:author]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:authornum]
+ [\s!default:\s!cite:author]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:authoryear]
+ [\s!default:\s!cite:author]
+ [\c!left={(},
+ \c!right={)},
+ \c!inbetween={,\space}]
+
+\definebtx
+ [\s!default:\s!cite:authoryears]
+ [\s!default:\s!cite:author]
+
+\definebtx
+ [\s!default:\s!cite:author:num] % todo
+ [\s!default:\s!cite:authornum]
+ [\c!left={\space[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:author:year] % todo
+ [\s!default:\s!cite:authoryear]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [\s!default:\s!cite:author:years] % todo
+ [\s!default:\s!cite:authoryears]
+ [\c!inbetween=,
+ \c!left={\space(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:year]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:title]
+ [\s!default:\s!cite]
+ [\c!command={\language[\currentbtxlanguage]}, % BAH
+ \c!style=\v!italic]
+
+\definebtx
+ [\s!default:\s!cite:tag]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:index]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:page]
+ [\s!default:\s!cite]
+ [\c!left=,
+ \c!right=]
+
+\definebtx
+ [\s!default:\s!cite:pages]
+ [\s!default:\s!cite:page]
+
+\definebtx
+ [\s!default:\s!cite:keywords]
+ [\s!default:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtx
+ [\s!default:\s!cite:short]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:category]
+ [\s!default:\s!cite]
+
+\definebtx
+ [\s!default:\s!cite:url]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtx
+ [\s!default:\s!cite:doi]
+ [\s!default:\s!cite:url]
+
+\definebtx
+ [\s!default:\s!cite:num]
+ [\s!default:\s!cite]
+ [\c!left={[},
+ \c!right={]},
+ \c!separator:2=\btxcommabreak,
+ \c!separator:3=\btxparameter{\c!separator:2},
+ \c!separator:4=\btxparameter{\c!separator:2}]
+
+\definebtx
+ [\s!default:\s!cite:default]
+ [\s!default:\s!cite:num]
+
+\definebtx
+ [\s!default:\s!cite:textnum]
+ [\s!default:\s!cite:num]
+ [\c!left=, % in apa: {Ref.\nbsp} or so
+ \c!right=,
+ \c!separator:2={,\space},
+ \c!separator:3={,\space\btxlabeltext{default:and}\space},
+ \c!separator:4= {\space\btxlabeltext{default:and}\space}]
+
+\definebtx
+ [\s!default:\s!cite:entry]
+ [\s!default:\s!cite]
+ [\c!left={(},
+ \c!right={)}]
+
+% Multilingual text strings
+
+\setupbtxlabeltext
+ [en]
+ [\s!default:and=and,
+ \s!default:number={no.},
+ \s!default:edition={ed.},
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editors,
+ \s!default:Volume={Vol.},
+ \s!default:Volumes={Vols.},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [nl]
+ [\s!default:and=en,
+ \s!default:number={nr.},
+ \s!default:edition={ed.}, % editie
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editors,
+ \s!default:Volume={Vol.},
+ \s!default:Volumes={Vols.},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [fr]
+ [\s!default:and=et,
+ \s!default:others={et al.},
+ \s!default:number={n\high{o}},
+ \s!default:edition={édition},
+ \s!default:Editor=Éditeur,
+ \s!default:Editors=Éditeurs,
+ \s!default:Volume=Volume,
+ \s!default:Volumes=Volumes,
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [de]
+ [\s!default:and=und,
+ \s!default:number={nr.},
+ \s!default:edition=Auf\/lage,
+ \s!default:Editor=Herausgeber, % Hrsg./Hg.
+ \s!default:Editors=Herausgeber,
+ \s!default:Volume=Band, % Bd.
+ \s!default:Volumes={Bände},
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [it]
+ [\s!default:and=e,
+ \s!default:number={nº},
+ \s!default:edition={ed.}, % edizione
+ \s!default:Editor={A cura di},
+ \s!default:Editors={A cura di},
+ \s!default:Volume={Vol.}, % Volume
+ \s!default:Volumes={Vol.}, % Volumi
+ \s!default:others={et al.}]
+
+\setupbtxlabeltext
+ [es]
+ [\s!default:and=y,
+ \s!default:number={nº},
+ \s!default:edition={ed.}, % edición
+ \s!default:Editor=Editor, % Ed./Eds.
+ \s!default:Editors=Editores,
+ \s!default:Volume={Vol.}, % Volumen
+ \s!default:Volumes={Vols.}, % Volúmenes
+ \s!default:others={et al.}]
+
+% First some helpers:
+
+\starttexdefinition btx:default:composed-title
+ \begingroup
+ \language[\currentbtxlanguage]
+ \btxflush{title}
+ \btxdoif {subtitle} {
+ \btxcolon
+ \btxflush{subtitle}
+ }
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:default:title
+ \btxdoif {title} {
+ \btxspace
+ \btxstartstyleandcolor [default:list:title:\currentbtxcategory]
+ \btxusecommand [default:list:title:\currentbtxcategory] {
+ \texdefinition{btx:default:composed-title}
+ }
+ \btxstopstyleandcolor
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:author
+ \btxdoif {author} {
+ \btxflush{author}
+ \doif {\btxfoundname{author}} {editor} {
+ \btxcomma
+ \btxsingularorplural {editor} {
+ \btxlabeltext{default:Editor}
+ } {
+ \btxlabeltext{default:Editors}
+ }
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:year
+ \btxflush{year}
+ \btxflushsuffix
+\stoptexdefinition
+
+\starttexdefinition btx:default:journal
+ \btxdoif {journal} {
+ \btxspace
+ \btxstartstyleandcolor [default:list:journal]
+ \btxusecommand [default:list:journal] {
+ \btxflush{journal}
+ }
+ \btxstopstyleandcolor
+ \btxdoifelse {volume} {
+ \btxspace
+ \btxflush{volume}
+ \btxdoif {number} {
+ \ignorespaces % brrr
+ \btxleftparenthesis
+ \btxflush{number}
+ \btxrightparenthesis
+ }
+
+ } {
+ \btxdoif {number} {
+ \btxlabeltext{default:number}
+ \btxspace
+ \btxflush{number}
+ }
+ }
+ \btxdoif {pages} {
+ \btxcomma
+ \btxflush{pages}
+ }
+ \btxcomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:editionset
+ \btxdoif {editionset} {
+ \removeunwantedspaces
+ \removepunctuation
+ \btxleftparenthesis
+ \doif {\btxfoundname{editionset}} {edition} {
+ \btxflush{edition}
+ \btxspace
+ \btxlabeltext{default:edition}
+ \btxcomma
+ }
+ \btxdoif {volume} {
+ \btxoneorrange {volume} {
+ \btxlabeltext{default:Volume}
+ } {
+ \btxlabeltext{default:Volumes}
+ }
+ \btxspace
+ \btxflush{volume}
+ \btxcomma
+ }
+ \btxdoifelse {number} {
+ \btxlabeltext{default:number}
+ \btxspace
+ \btxflush{number}
+ } {
+ \removeunwantedspaces
+ \removepunctuation
+ }
+ \btxrightparenthesiscomma
+ }
+\stoptexdefinition
+
+\starttexdefinition btx:default:publisher
+ \btxdoif {publisher} {
+ \btxflush{publisher}
+ \btxcomma
+ }
+ \btxdoif {address} {
+ \btxflush{address}
+ \btxcomma
+ }
+\stoptexdefinition
+
+% Then a minimal number of setups:
+
+\startsetups btx:default:list:article
+ \texdefinition{btx:default:author}
+ \texdefinition{btx:default:title}
+ \texdefinition{btx:default:journal}
+ \texdefinition{btx:default:year}
+ \removeunwantedspaces
+ \removepunctuation
+ \btxperiod
+\stopsetups
+
+\startsetups btx:default:list:book
+ \texdefinition{btx:default:author}
+ \texdefinition{btx:default:title}
+ \texdefinition{btx:default:editionset}
+ \texdefinition{btx:default:publisher}
+ \texdefinition{btx:default:year}
+ \removeunwantedspaces
+ \removepunctuation
+ \btxperiod
+\stopsetups
+
+\startsetups btx:default:list:unknown
+ \currentbtxcategory\btxcolon
+ \btxshowentryinline
+\stopsetups
+
+%D Experiment:
+
+\startsetups btx:default:lefttext
+ \currentbtxlefttext
+\stopsetups
+
+\startsetups btx:default:righttext
+ \currentbtxrighttext
+\stopsetups
+
+\stopbtxrenderingdefinitions
diff --git a/tex/context/base/publ-imp-definitions.mkvi b/tex/context/base/publ-imp-definitions.mkvi
new file mode 100644
index 000000000..8dfa931b3
--- /dev/null
+++ b/tex/context/base/publ-imp-definitions.mkvi
@@ -0,0 +1,123 @@
+%D \module
+%D [ file=publ-imp-def,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Definitions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Here we collect some helper setups. We assume that checking of a field
+%D happens in the calling setup, if only because that is the place where
+%D fences are also dealt with.
+
+% These will become texdefinitions
+
+\unprotect
+
+\startxmlsetups btx:format:crossref
+ \cite[\btxfield{crossref}]
+\stopxmlsetups
+
+\startxmlsetups btx:format:key
+ \btxfield{short}
+\stopxmlsetups
+
+\starttexdefinition btx:format:inject #link #content
+ \ifx\currentbtxinternal\empty
+ #content
+ \else\ifconditional\btxinteractive
+ \goto {#content} [#link]
+ \else
+ #content
+ \fi\fi
+\stoptexdefinition
+
+\startxmlsetups btx:format:doi
+ \edef\currentbtxfielddoi{\btxfield{doi}}
+ \ifx\currentbtxfielddoi\empty
+ {\tttf no-doi}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
+ \else
+ \hyphenatedurl{\currentbtxfielddoi}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:url
+ \edef\currentbtxfieldurl{\btxfield{url}}
+ \ifx\currentbtxfieldurl\empty
+ {\tttf no-url}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
+ \else
+ \hyphenatedurl{\currentbtxfieldurl}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:year
+ \edef\currentbtxfieldyear{\btxfield{year}}
+ \ifx\currentbtxfieldyear\empty
+ \btxlabeltext{\currentbtxspecification:nd}
+ \fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:month
+ \edef\currentbtxfieldmonth{\btxfield{month}}
+ \ifx\currentbtxfieldmonth\empty
+ {\tttf no-month}
+ \else
+ \edef\p_monthconversion{\btxparameter\c!monthconversion}
+ \ifx\p_monthconversion\empty % month month:mnem
+ \currentbtxfieldmonth
+ \else
+ \doifelsenumber \currentbtxfieldmonth {
+ \convertnumber\p_monthconversion\currentbtxfieldmonth
+ } {
+ \currentbtxfieldmonth
+ }
+ \fi
+ \fi
+\stopxmlsetups
+
+% macros:
+
+\starttexdefinition btx:style:italic #content
+ \dontleavehmode
+ \begingroup
+ \it
+ #content
+ \italiccorrection
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:style:bold #content
+ \dontleavehmode
+ \begingroup
+ \bf
+ #content
+ \endgroup
+\stoptexdefinition
+
+\starttexdefinition btx:style:quote #content
+ \dontleavehmode
+ \startquote
+ #content
+ \stopquote
+\stoptexdefinition
+
+\starttexdefinition btx:style #style #content
+ \doifelsedefined {btx:style:#style} {
+ \texdefinition{btx:style:#style} {
+ #content
+ }
+ } {
+ #content
+ }
+\stoptexdefinition
+
+\protect
diff --git a/tex/context/base/publ-imp-list.mkvi b/tex/context/base/publ-imp-list.mkvi
new file mode 100644
index 000000000..23256de33
--- /dev/null
+++ b/tex/context/base/publ-imp-list.mkvi
@@ -0,0 +1,96 @@
+%D \module
+%D [ file=publ-imp-list,
+%D version=2014.06.16,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Lists,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\starttexdefinition btx:list:inject #content
+ \ifconditional\btxinteractive
+ \ifx\currentbtxinternal\empty
+ #content
+ \else
+ \goto {
+ #content
+ } [
+ \s!internal(\currentbtxinternal)
+ ]
+ \fi
+ \else
+ #content
+ \fi
+\stoptexdefinition
+
+\starttexdefinition btx:list:helpers:concat
+ \space
+\stoptexdefinition
+
+\startsetups[\s!btx:\s!list:\s!page]
+ \texdefinition{\s!btx:\s!list:concat}
+ \texdefinition{\s!btx:\s!list:inject} {
+ % real pagenumber: todo, userpage
+ \currentbtxfirst
+ % order
+ \high{\currentbtxsecond}
+ }
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering]
+ \texdefinition {\s!btx:\s!list:inject} {
+ \currentbtxfirst
+ \btxparameter\c!stopper
+ }
+\stopsetups
+
+% Hans: can the following setups be condensed to one using some variable?
+
+\startsetups[\s!btx:\s!list:\s!numbering:default]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:default]
+ \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:default] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:num]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:num]
+ \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:num] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:short]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:short]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:short] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ \btxflushsuffix
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:tag]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:tag]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:tag] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\startsetups[\s!btx:\s!list:\s!numbering:index]
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:index]
+ \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:index] {
+ \fastsetup{\s!btx:\s!list:\s!numbering}
+ }
+ \btxstopstyleandcolor
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-page.mkvi b/tex/context/base/publ-imp-page.mkvi
new file mode 100644
index 000000000..2d9040b10
--- /dev/null
+++ b/tex/context/base/publ-imp-page.mkvi
@@ -0,0 +1,51 @@
+%D \module
+%D [ file=publ-imp-page,
+%D version=2014.11.05,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Page numbers,
+%D author=Alan Braslau and Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\startsetups \s!btx:\s!list:\s!page:concat
+ \ifcase\currentbtxoverflow
+ \btxparameter{\c!separator:\number\currentbtxconcat}
+ \fi
+\stopsetups
+
+% for the moment we have only one variant
+
+\startsetups [\s!btx:\s!list:\s!page]
+ \fastsetup{\s!btx:\s!list:\s!page:concat}
+ \fastsetup{\s!btx:\currentbtxspecification:\s!list:page-or-pages}
+ \ifconditional\btxinteractive
+ \goto {
+ \currentbtxfirstpage
+ } [
+ internal(\currentbtxfirstinternal)
+ ]
+ \ifx\currentbtxlastpage\empty \else
+ %\btxparameter\c!pageconnector
+ \btxparameter\c!range
+ \goto {
+ \currentbtxlastpage
+ } [
+ internal(\currentbtxlastinternal)
+ ]
+ \fi
+ \else
+ \currentbtxfirstpage
+ \ifx\currentbtxlastpage\empty \else
+ \btxparameter\c!range
+ \currentbtxlastpage
+ \fi
+ \fi
+\stopsetups
+
+\protect
diff --git a/tex/context/base/publ-imp-replacements.lua b/tex/context/base/publ-imp-replacements.lua
new file mode 100644
index 000000000..e0ac249fc
--- /dev/null
+++ b/tex/context/base/publ-imp-replacements.lua
@@ -0,0 +1,23 @@
+-- Many bibtex databases are poluted. This is a side effect of 7 bit encoding on the
+-- one hand and tweaking the outcome at the other. The worst examples are the use
+-- of \rlap on whole names. We found that trying to cope with all can one drive insane
+-- so we stopped at some point. Clean up your mess or pay the price. But, you can load
+-- this file (and similar ones) to help you out. There is simply no reward in trying
+-- to deal with it ourselves.
+
+return {
+ name = "replacements",
+ version = "1.00",
+ comment = "Good riddance",
+ author = "Alan Braslau and Hans Hagen",
+ copyright = "ConTeXt development team",
+ replacements = {
+ [ [[\emdash]] ] = "—",
+ [ [[\endash]] ] = "–",
+ [ [[{\emdash}]] ] = "—",
+ [ [[{\endash}]] ] = "–",
+ [ [[Th\^e\llap{\raise 0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ [ [[Th{\^e}\llap{\raise0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ [ [[Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}}]] ] = "Thánh",
+ },
+}
diff --git a/tex/context/base/publ-imp-test.bib b/tex/context/base/publ-imp-test.bib
new file mode 100644
index 000000000..0d81b49ff
--- /dev/null
+++ b/tex/context/base/publ-imp-test.bib
@@ -0,0 +1,294 @@
+% This is a test bibliography for developing publ-impl-XXX files. Although
+% meant for testing apa styles, it can also be used for other styles.
+%
+% All publication categories have an example entry here. Most have an entry
+% with a "-min" key, containing only the minimal set of fields.
+%
+% Maintained by: Alan Braslau, Hans Hagen, Robin Kirkham
+
+@article {test-article-min,
+ author = "An Author",
+ title = "Title-article",
+ journal = "Journal",
+ year = "Year"
+}
+
+@article {test-article,
+ author = "An Author",
+ title = "Title-article",
+ journal = "Journal",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ pages = "Pages",
+ month = "Month",
+ note = "Note"
+}
+
+% author, editor, or both
+
+@book {test-book-min,
+ author = "An Author",
+ title = "Title-book",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% author, editor, or both
+% volume, number, or both
+
+@book {test-book,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-book",
+ publisher = "Publisher",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+% no author
+
+@book {test-book-edited,
+ editor = "An Editor",
+ title = "Title-book-edited",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+@booklet {test-booklet-min,
+ title = "Title-booklet"
+}
+
+@booklet {test-booklet,
+ title = "Title-booklet",
+ author = "An Author",
+ howpublished = "How-published",
+ address = "Address",
+ month = "Month",
+ year = "Year",
+ note = "Note"
+}
+
+% author, editor, or both
+% chapter, pages, or both
+
+@inbook {test-inbook-min,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-inbook",
+ chapter = "Chapter",
+ pages = "Pages",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% author, editor, or both
+% chapter, pages, or both
+% volume, number, or both
+
+@inbook {test-inbook,
+ author = "An Author",
+ editor = "An Editor",
+ title = "Title-inbook",
+ chapter = "Chapter",
+ pages = "Pages",
+ publisher = "Publisher",
+ year = "Year",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ type = "Type",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+@incollection {test-incollection-min,
+ author = "An Author",
+ title = "Title-incollection",
+ booktitle = "Booktitle",
+ publisher = "Publisher",
+ year = "Year"
+}
+
+% volume, number, or both
+
+@incollection {test-incollection,
+ author = "An Author",
+ title = "Title-incollection",
+ booktitle = "Booktitle",
+ publisher = "Publisher",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ type = "Type",
+ chapter = "Chapter",
+ pages = "Pages",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ note = "Note"
+}
+
+@inproceedings {test-inproceedings-min,
+ author = "An Author",
+ title = "Title-inproceedings",
+ booktitle = "Booktitle",
+ year = "Year"
+}
+
+% volume, number, or both
+
+@inproceedings {test-inproceedings,
+ author = "An Author",
+ title = "Title-inproceedings",
+ booktitle = "Booktitle",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ pages = "Pages",
+ address = "Address",
+ month = "Month",
+ organization = "Organization",
+ publisher = "Publisher",
+ note = "Note"
+}
+
+@manual {test-manual-min,
+ title = "Title-manual"
+}
+
+@manual {test-manual,
+ title = "Title-manual",
+ author = "An Author",
+ organization = "Organization",
+ address = "Address",
+ edition = "Edition",
+ month = "Month",
+ year = "Year"
+ note = "Note"
+}
+
+
+@mastersthesis {test-mastersthesis-min,
+ author = "An Author",
+ title = "Title-mastersthesis",
+ school = "School",
+ year = "Year",
+}
+
+@mastersthesis {test-mastersthesis,
+ author = "An Author",
+ title = "Title-mastersthesis",
+ school = "School",
+ year = "Year",
+ type = "Type",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@proceedings {test-proceedings-min,
+ title = "Title-proceedings",
+ year = "Year",
+}
+
+% volume, number, or both
+
+@proceedings {test-proceedings,
+ title = "Title-proceedings",
+ year = "Year",
+ editor = "An Editor",
+ volume = "Volume",
+ number = "Number",
+ series = "Series",
+ address = "Address",
+ month = "Month",
+ organization = "Organization",
+ publisher = "Publisher",
+ note = "Note"
+}
+
+@phdthesis {test-phdthesis-min,
+ author = "An Author",
+ title = "Title-phdthesis",
+ school = "School",
+ year = "Year",
+}
+
+@phdthesis {test-phdthesis,
+ author = "An Author",
+ title = "Title-phdthesis",
+ school = "School",
+ year = "Year",
+ type = "Type",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@techreport {test-techreport-min,
+ author = "An Author",
+ title = "Title-techreport",
+ institution = "Institution",
+ year = "Year",
+}
+
+@techreport {test-techreport,
+ author = "An Author",
+ title = "Title-techreport",
+ institution = "Institution",
+ year = "Year",
+ type = "Type",
+ number = "Number",
+ address = "Address",
+ month = "Month",
+ note = "Note"
+}
+
+@misc {test-misc,
+ author = "An Author",
+ title = "Title-misc",
+ howpublished = "How-published",
+ month = "Month",
+ year = "Year",
+ note = "Note"
+}
+
+@unpublished {test-unpublished-min,
+ author = "An Author",
+ title = "Title-unpublished",
+ note = "Note"
+}
+
+@unpublished {test-unpublished,
+ author = "An Author",
+ title = "Title-unpublished",
+ note = "Note",
+ month = "Month",
+ year = "Year"
+}
+
+% some other test entries
+
+@misc {test-manyauthor,
+ author = "A Author and B Author and C Author and D Author and
+ E Author and F Author and G Author and H Author and
+ I Author and J Author and K Author and L Author and
+ M Author and N Author and O Author and P Author and
+ Q Author and R Author and S Author and T Author and
+ U Author and V Author and W Author and X Author and
+ Y Author and Z Author",
+ title = "Title-Many Authors"
+}
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
new file mode 100644
index 000000000..34eeaec18
--- /dev/null
+++ b/tex/context/base/publ-ini.lua
@@ -0,0 +1,3295 @@
+if not modules then modules = { } end modules ['publ-ini'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- bah .. this 200 locals limit again ... so we need to split it as adding more
+-- do ... ends makes it messier
+
+-- plug the list sorted in the list mechanism (specification.sortorder)
+
+-- If we define two datasets with the same bib file we can consider
+-- sharing the data but that means that we need to have a parent which
+-- in turn makes things messy if we start manipulating entries in
+-- different ways (future) .. not worth the trouble as we will seldom
+-- load big bib files many times and even then ... fonts are larger.
+
+-- A potential optimization is to work with current_dataset, current_tag when
+-- fetching fields but the code become real messy that way (many currents). The
+-- gain is not that large anyway because not much publication stuff is flushed.
+
+local next, rawget, type, tostring, tonumber = next, rawget, type, tostring, tonumber
+local match, find, gsub = string.match, string.find, string.gsub
+local concat, sort, tohash = table.concat, table.sort, table.tohash
+local utfsub = utf.sub
+local mod = math.mod
+local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array, settings_to_set = utilities.parsers.settings_to_array, utilities.parsers.settings_to_set
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local setmetatableindex = table.setmetatableindex
+local lpegmatch = lpeg.match
+local P, S, C, Ct, Cs, R, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.R, lpeg.Carg
+local upper = utf.upper
+
+local report = logs.reporter("publications")
+local report_cite = logs.reporter("publications","cite")
+local report_list = logs.reporter("publications","list")
+local report_reference = logs.reporter("publications","reference")
+local report_suffix = logs.reporter("publications","suffix")
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local trace_cite = false trackers.register("publications.cite", function(v) trace_cite = v end)
+local trace_missing = false trackers.register("publications.cite.missing", function(v) trace_missing = v end)
+local trace_references = false trackers.register("publications.cite.references", function(v) trace_references = v end)
+local trace_detail = false trackers.register("publications.detail", function(v) trace_detail = v end)
+local trace_suffixes = false trackers.register("publications.suffixes", function(v) trace_suffixes = v end)
+
+publications = publications or { }
+local datasets = publications.datasets
+local writers = publications.writers
+local casters = publications.casters
+local detailed = publications.detailed
+local enhancer = publications.enhancer
+local enhancers = publications.enhancers
+
+local tracers = publications.tracers or { }
+publications.tracers = tracers
+
+local variables = interfaces.variables
+
+local v_local = variables["local"]
+local v_global = variables["global"]
+
+local v_force = variables.force
+local v_normal = variables.normal
+local v_reverse = variables.reverse
+local v_none = variables.none
+local v_yes = variables.yes
+local v_no = variables.no
+local v_all = variables.all
+local v_always = variables.always
+local v_doublesided = variables.doublesided
+local v_default = variables.default
+local v_dataset = variables.dataset
+
+local conditionals = tex.conditionals
+
+local numbertochar = converters.characters
+
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+
+local isdefined = tex.isdefined
+
+----- basicsorter = sorters.basicsorter -- (a,b)
+----- sortstripper = sorters.strip
+----- sortsplitter = sorters.splitters.utf
+
+local manipulators = typesetters.manipulators
+local splitmanipulation = manipulators.splitspecification
+local applymanipulation = manipulators.applyspecification
+local manipulatormethods = manipulators.methods
+
+-- this might move elsewhere
+
+manipulatormethods.Word = converters.Word
+manipulatormethods.WORD = converters.WORD
+manipulatormethods.Words = converters.Words
+manipulatormethods.WORDS = converters.WORDS
+
+local context = context
+local commands = commands
+local implement = interfaces.implement
+local ctx_setmacro = interfaces.setmacro
+
+local ctx_doifelse = commands.doifelse
+local ctx_doif = commands.doif
+local ctx_doifnot = commands.doifnot
+local ctx_gobbletwoarguments = context.gobbletwoarguments
+
+local ctx_btxdirectlink = context.btxdirectlink
+local ctx_btxhandlelistentry = context.btxhandlelistentry
+local ctx_btxhandlelisttextentry = context.btxhandlelisttextentry
+local ctx_btxhandlecombientry = context.btxhandlecombientry
+local ctx_btxchecklistentry = context.btxchecklistentry
+local ctx_btxchecklistcombi = context.btxchecklistcombi
+
+local ctx_btxsetdataset = context.btxsetdataset
+local ctx_btxsettag = context.btxsettag
+local ctx_btxsetnumber = context.btxsetnumber
+local ctx_btxsetlanguage = context.btxsetlanguage
+local ctx_btxsetcombis = context.btxsetcombis
+local ctx_btxsetcategory = context.btxsetcategory
+local ctx_btxcitesetup = context.btxcitesetup
+local ctx_btxsubcitesetup = context.btxsubcitesetup
+local ctx_btxnumberingsetup = context.btxnumberingsetup
+local ctx_btxpagesetup = context.btxpagesetup
+local ctx_btxsetfirst = context.btxsetfirst
+local ctx_btxsetsecond = context.btxsetsecond
+----- ctx_btxsetthird = context.btxsetthird
+local ctx_btxsetsuffix = context.btxsetsuffix
+local ctx_btxsetinternal = context.btxsetinternal
+local ctx_btxsetlefttext = context.btxsetlefttext
+local ctx_btxsetrighttext = context.btxsetrighttext
+local ctx_btxsetbefore = context.btxsetbefore
+local ctx_btxsetafter = context.btxsetafter
+local ctx_btxsetbacklink = context.btxsetbacklink
+local ctx_btxsetbacktrace = context.btxsetbacktrace
+local ctx_btxsetcount = context.btxsetcount
+local ctx_btxsetconcat = context.btxsetconcat
+local ctx_btxsetoveflow = context.btxsetoverflow
+local ctx_btxsetfirstpage = context.btxsetfirstpage
+local ctx_btxsetlastpage = context.btxsetlastpage
+local ctx_btxsetfirstinternal = context.btxsetfirstinternal
+local ctx_btxsetlastinternal = context.btxsetlastinternal
+local ctx_btxstartcite = context.btxstartcite
+local ctx_btxstopcite = context.btxstopcite
+local ctx_btxstartciteauthor = context.btxstartciteauthor
+local ctx_btxstopciteauthor = context.btxstopciteauthor
+local ctx_btxstartsubcite = context.btxstartsubcite
+local ctx_btxstopsubcite = context.btxstopsubcite
+local ctx_btxstartlistentry = context.btxstartlistentry
+local ctx_btxstoplistentry = context.btxstoplistentry
+local ctx_btxstartcombientry = context.btxstartcombientry
+local ctx_btxstopcombientry = context.btxstopcombientry
+local ctx_btxlistsetup = context.btxlistsetup
+local ctx_btxflushauthor = context.btxflushauthor
+local ctx_btxsetnoflistentries = context.btxsetnoflistentries
+local ctx_btxsetcurrentlistentry = context.btxsetcurrentlistentry
+local ctx_btxsetcurrentlistindex = context.btxsetcurrentlistindex
+
+languages.data = languages.data or { }
+local data = languages.data
+
+local specifications = publications.specifications
+local currentspecification = specifications[false]
+local ignoredfields = { }
+publications.currentspecification = currentspecification
+
+local function setspecification(name)
+ currentspecification = specifications[name]
+ if trace then
+ report("setting specification %a",type(name) == "string" and name or "anything")
+ end
+ publications.currentspecification = currentspecification
+end
+
+publications.setspecification = setspecification
+
+implement {
+ name = "btxsetspecification",
+ actions = setspecification,
+ arguments = "string",
+}
+
+local optionalspace = lpeg.patterns.whitespace^0
+local prefixsplitter = optionalspace * lpeg.splitat(optionalspace * P("::") * optionalspace)
+
+statistics.register("publications load time", function()
+ local publicationsstats = publications.statistics
+ local nofbytes = publicationsstats.nofbytes
+ if nofbytes > 0 then
+ return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
+ statistics.elapsedtime(publications),
+ nofbytes,
+ publicationsstats.nofdefinitions or 0,
+ publicationsstats.nofshortcuts or 0
+ )
+ else
+ return nil
+ end
+end)
+
+luatex.registerstopactions(function()
+ local done = false
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ if not done then
+ logspushtarget("logfile")
+ logsnewline()
+ report("start used btx commands")
+ logsnewline()
+ done = true
+ end
+ if isdefined[command] then
+ report("%-20s %-20s % 5i %s",name,command,n,"known")
+ elseif isdefined[upper(command)] then
+ report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
+ else
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ if done then
+ logsnewline()
+ report("stop used btx commands")
+ logsnewline()
+ logspoptarget()
+ end
+end)
+
+-- multipass, we need to sort because hashing is random per run and not per
+-- version (not the best changed feature of lua)
+
+local collected = allocate()
+local tobesaved = allocate()
+
+do
+
+ local function serialize(t)
+ local f_key_table = formatters[" [%q] = {"]
+ local f_key_string = formatters[" %s = %q,"]
+ local r = { "return {" }
+ local m = 1
+ for tag, entry in sortedhash(t) do
+ m = m + 1
+ r[m] = f_key_table(tag)
+ local s = sortedkeys(entry)
+ for i=1,#s do
+ local k = s[i]
+ m = m + 1
+ r[m] = f_key_string(k,entry[k])
+ end
+ m = m + 1
+ r[m] = " },"
+ end
+ r[m] = "}"
+ return concat(r,"\n")
+ end
+
+ local function finalizer()
+ local prefix = tex.jobname -- or environment.jobname
+ local setnames = sortedkeys(datasets)
+ for i=1,#setnames do
+ local name = setnames[i]
+ local dataset = datasets[name]
+ local userdata = dataset.userdata
+ local checksum = nil
+ local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
+ if userdata and next(userdata) then
+ if job.passes.first then
+ local newdata = serialize(userdata)
+ checksum = md5.HEX(newdata)
+ io.savedata(username,newdata)
+ end
+ else
+ os.remove(username)
+ username = nil
+ end
+ local loaded = dataset.loaded
+ local sources = dataset.sources
+ local used = { }
+ for i=1,#sources do
+ local source = sources[i]
+ -- if loaded[source.filename] ~= "previous" then -- needs checking
+ if loaded[source.filename] ~= "previous" or loaded[source.filename] == "current" then
+ used[#used+1] = source
+ end
+ end
+ tobesaved[name] = {
+ usersource = {
+ filename = username,
+ checksum = checksum,
+ },
+ datasources = used,
+ }
+ end
+ end
+
+ local function initializer()
+ statistics.starttiming(publications)
+ for name, state in sortedhash(collected) do
+ local dataset = datasets[name]
+ local datasources = state.datasources
+ local usersource = state.usersource
+ if datasources then
+ for i=1,#datasources do
+ local filename = datasources[i].filename
+ publications.load {
+ dataset = dataset,
+ filename = filename,
+ kind = "previous"
+ }
+ end
+ end
+ if usersource then
+ dataset.userdata = table.load(usersource.filename) or { }
+ end
+ end
+ statistics.stoptiming(publications)
+ function initializer() end -- will go, for now, runtime loaded
+ end
+
+ job.register('publications.collected',tobesaved,initializer,finalizer)
+
+end
+
+-- we want to minimize references as there can be many (at least
+-- when testing)
+
+local nofcitations = 0
+local usedentries = nil
+local citetolist = nil
+local listtocite = nil
+local listtolist = nil
+
+do
+
+ local initialize = nil
+
+ initialize = function(t)
+ usedentries = allocate { }
+ citetolist = allocate { }
+ listtocite = allocate { }
+ listtolist = allocate { }
+ local names = { }
+ local internals = structures.references.internals
+ local p_collect = (C(R("09")^1) * Carg(1) / function(s,entry) listtocite[tonumber(s)] = entry end + P(1))^0
+ local nofunique = 0
+ local nofreused = 0
+ for i=1,#internals do
+ local entry = internals[i]
+ if entry then
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind
+ if kind == "full" then
+ -- reference (in list)
+ local userdata = entry.userdata
+ if userdata then
+ local tag = userdata.btxref
+ if tag then
+ local set = userdata.btxset or v_default
+ local s = usedentries[set]
+ if s then
+ local u = s[tag]
+ if u then
+ u[#u+1] = entry
+ else
+ s[tag] = { entry }
+ end
+ nofreused = nofreused + 1
+ else
+ usedentries[set] = { [tag] = { entry } }
+ nofunique = nofunique + 1
+ end
+ -- alternative: collect prev in group
+ local bck = userdata.btxbck
+ if bck then
+ lpegmatch(p_collect,bck,1,entry) -- for s in string.gmatch(bck,"[^ ]+") do listtocite[tonumber(s)] = entry end
+ local lst = tonumber(userdata.btxlst)
+ if lst then
+ listtolist[lst] = entry
+ end
+ else
+ local int = tonumber(userdata.btxint)
+ if int then
+ listtocite[int] = entry
+ end
+ end
+ local detail = datasets[set].details[tag]
+-- todo: these have to be pluggable
+ if detail then
+ local author = detail.author
+ if author then
+ for i=1,#author do
+ local a = author[i]
+ local s = a.surnames
+ if s then
+ local c = concat(s,"+")
+ local n = names[c]
+ if n then
+ n[#n+1] = a
+ break
+ else
+ names[c] = { a }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ elseif kind == "btx" or kind == "userdata" then -- will go: kind == "userdata"
+ -- list entry (each cite)
+ local userdata = entry.userdata
+ if userdata then
+ local int = tonumber(userdata.btxint)
+ if int then
+ citetolist[int] = entry
+ end
+ end
+ end
+ end
+ else
+ -- weird
+ end
+ end
+ for k, v in sortedhash(names) do
+ local n = #v
+ if n > 1 then
+ local original = v[1].original
+ for i=2,n do
+ if original ~= v[i].original then
+ report("potential clash in name %a",k)
+ for i=1,n do
+ v[i].state = 1
+ end
+ break
+ end
+ end
+ end
+ end
+ if trace_detail then
+ report("%s unique bibentries: %s reused entries",nofunique,nofreused)
+ end
+ initialize = nil
+ end
+
+ usedentries = setmetatableindex(function(_,k) if initialize then initialize() end return usedentries[k] end)
+ citetolist = setmetatableindex(function(_,k) if initialize then initialize() end return citetolist [k] end)
+ listtocite = setmetatableindex(function(_,k) if initialize then initialize() end return listtocite [k] end)
+ listtolist = setmetatableindex(function(_,k) if initialize then initialize() end return listtolist [k] end)
+
+ function publications.usedentries()
+ if initialize then
+ initialize()
+ end
+ return usedentries
+ end
+
+end
+
+-- match:
+--
+-- [current|previous|following] section
+-- [current|previous|following] block
+-- [current|previous|following] component
+--
+-- by prefix
+-- by dataset
+
+local findallused do
+
+ local reported = { }
+ local finder = publications.finder
+
+ findallused = function(dataset,reference,internal)
+ local current = datasets[dataset]
+ local finder = publications.finder -- for the moment, not yet in all betas
+ local find = finder and finder(current,reference)
+ local tags = not find and settings_to_array(reference)
+ local todo = { }
+ local okay = { } -- only if mark
+ local allused = usedentries[dataset] or { }
+ local luadata = current.luadata
+ local details = current.details
+ local ordered = current.ordered
+ if set then
+ local registered = { }
+ local function register(tag)
+ local entry = allused[tag]
+ if not entry then
+ local parent = details[tag].parent
+ if parent then
+ entry = allused[parent]
+ end
+ if entry then
+ report("using reference of parent %a for %a",parent,tag)
+ tag = parent
+ end
+ end
+ if registered[tag] then
+ return
+ end
+ if entry then
+ -- only once in a list but at some point we can have more (if we
+ -- decide to duplicate)
+ if #entry == 1 then
+ entry = entry[1]
+ else
+ -- same block and section
+ local done = false
+ if internal and internal > 0 then
+ -- first following in list
+ for i=1,#entry do
+ local e = entry[i]
+ if e.references.internal > internal then
+ done = e
+ break
+ end
+ end
+ if not done then
+ -- last preceding in list
+ for i=1,#entry do
+ local e = entry[i]
+ if e.references.internal < internal then
+ done = e
+ else
+ break
+ end
+ end
+ end
+ end
+ if done then
+ entry = done
+ else
+ entry = entry[1]
+ end
+ end
+ okay[#okay+1] = entry
+ end
+ todo[tag] = true
+ registered[tag] = true
+ return tag
+ end
+ if reference == "*" then
+ tags = { }
+ for i=1,#ordered do
+ local tag = ordered[i].tag
+ tag = register(tag)
+ tags[#tags+1] = tag
+ end
+ elseif find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ tag = register(tag)
+ tags[#tags+1] = tag
+ end
+ end
+ if #tags == 0 and not reported[reference] then
+ tags[1] = reference
+ reported[reference] = true
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ if luadata[tag] then
+ tag = register(tag)
+ tags[i] = tag
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
+ end
+ end
+ else
+ if find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ local parent = details[tag].parent
+ if parent then
+ tag = parent
+ end
+ tags[#tags+1] = tag
+ todo[tag] = true
+ end
+ end
+ if #tags == 0 and not reported[reference] then
+ tags[1] = reference
+ reported[reference] = true
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ local parent = details[tag].parent
+ if parent then
+ tag = parent
+ tags[i] = tag
+ end
+ if luadata[tag] then
+ todo[tag] = true
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
+ end
+ end
+ end
+ return okay, todo, tags
+ end
+
+end
+
+local function unknowncite(reference)
+ ctx_btxsettag(reference)
+ if trace_detail then
+ report("expanding %a cite setup %a","unknown","unknown")
+ end
+ ctx_btxcitesetup("unknown")
+end
+
+local concatstate = publications.concatstate
+
+local tobemarked = nil
+
+local function marknocite(dataset,tag,nofcitations,setup)
+ ctx_btxstartcite()
+ ctx_btxsetdataset(dataset)
+ ctx_btxsettag(tag)
+ ctx_btxsetbacklink(nofcitations)
+ if trace_detail then
+ report("expanding cite setup %a",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopcite()
+end
+
+local function markcite(dataset,tag,flush)
+ if not tobemarked then
+ return 0
+ end
+ local citation = tobemarked[tag]
+ if not citation then
+ return 0
+ end
+ if citation == true then
+ nofcitations = nofcitations + 1
+ if trace_cite then
+ report_cite("mark, dataset: %s, tag: %s, number: %s, state: %s",dataset,tag,nofcitations,"cited")
+ end
+ if flush then
+ marknocite(dataset,tag,nofcitations,"nocite")
+ end
+ tobemarked[tag] = nofcitations
+ return nofcitations
+ else
+ return citation
+ end
+end
+
+local marked_dataset = nil
+local marked_list = nil
+
+local function flushmarked(dataset,list,todo)
+ marked_dataset = dataset
+ marked_list = list
+end
+
+local function btxflushmarked()
+ if marked_list and tobemarked then
+ for i=1,#marked_list do
+ -- keep order
+ local tag = marked_list[i]
+ local tbm = tobemarked[tag]
+ if tbm == true or not tbm then
+ nofcitations = nofcitations + 1
+ marknocite(marked_dataset,tag,nofcitations,tbm and "nocite" or "invalid")
+ if trace_cite then
+ report_cite("mark, dataset: %s, tag: %s, number: %s, state: %s",marked_dataset,tag,nofcitations,tbm and "unset" or "invalid")
+ end
+ end
+ end
+ end
+ tobemarked = nil
+ marked_dataset = nil
+ marked_list = nil
+end
+
+implement { name = "btxflushmarked", actions = btxflushmarked }
+
+-- basic access
+
+local function getfield(dataset,tag,name) -- for the moment quick and dirty
+ local d = datasets[dataset].luadata[tag]
+ return d and d[name]
+end
+
+local function getdetail(dataset,tag,name) -- for the moment quick and dirty
+ local d = datasets[dataset].details[tag]
+ return d and d[name]
+end
+
+local function getcasted(dataset,tag,field,specification)
+ local current = datasets[dataset]
+ if current then
+ local data = current.luadata[tag]
+ if data then
+ local category = data.category
+ if not specification then
+ specification = currentspecification
+ end
+ local catspec = specification.categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ local data = current.details[tag]
+ if data then
+ local kind = specification.types[field]
+ return data[field], field, kind -- no check
+ end
+ end
+ end
+end
+
+local function getfaster(current,data,details,field,categories,types)
+ local category = data.category
+ local catspec = categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = types[field]
+ return detailed[kind][value]
+ end
+ end
+ if details then
+ local kind = types[field]
+ return details[field]
+ end
+end
+
+local function getdirect(dataset,data,field,catspec) -- no field check, no dataset check
+ local catspec = (catspec or currentspecification).categories[data.category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+end
+
+local function getfuzzy(data,field,categories) -- no field check, no dataset check
+ local catspec
+ if categories then
+ local category = data.category
+ if category then
+ catspec = categories[data.category]
+ end
+ end
+ if not field then
+ return
+ elseif not catspec then
+ return data[field]
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+end
+
+publications.getfield = getfield
+publications.getdetail = getdetail
+publications.getcasted = getcasted
+publications.getfaster = getfaster
+publications.getdirect = getdirect
+publications.getfuzzy = getfuzzy
+
+-- this needs to be checked: a specific type should have a checker
+
+-- author pagenumber keyword url
+
+-- function commands.btxsingularorplural(dataset,tag,name)
+-- local d = getcasted(dataset,tag,name)
+-- if type(d) == "table" then
+-- d = #d <= 1
+-- else
+-- d = true
+-- end
+-- ctx_doifelse(d)
+-- end
+
+-- function commands.oneorrange(dataset,tag,name)
+-- local d = datasets[dataset].luadata[tag] -- details ?
+-- if d then
+-- d = d[name]
+-- end
+-- if type(d) == "string" then
+-- d = find(d,"%-")
+-- else
+-- d = false
+-- end
+-- ctx_doifelse(not d) -- so singular is default
+-- end
+
+-- function commands.firstofrange(dataset,tag,name)
+-- local d = datasets[dataset].luadata[tag] -- details ?
+-- if d then
+-- d = d[name]
+-- end
+-- if type(d) == "string" then
+-- context(match(d,"([^%-]+)"))
+-- end
+-- end
+
+local inspectors = allocate()
+local nofmultiple = allocate()
+local firstandlast = allocate()
+
+publications.inspectors = inspectors
+inspectors.nofmultiple = nofmultiple
+inspectors.firstandlast = firstandlast
+
+function nofmultiple.author(d)
+ return type(d) == "table" and #d or 0
+end
+
+function publications.singularorplural(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = nofmultiple[kind]
+ if test then
+ local n = test(data)
+ return not n or n < 2
+ end
+ end
+ return true
+end
+
+function firstandlast.range(d)
+ if type(d) == "table" then
+ return d[1], d[2]
+ end
+end
+
+firstandlast.pagenumber = firstandlast.range
+
+function publications.oneorrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first, last = test(data)
+ return not (first and last)
+ end
+ end
+ return nil -- nothing at all
+end
+
+function publications.firstofrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first = test(data)
+ if first then
+ return first
+ end
+ end
+ end
+end
+
+function publications.lastofrange(dataset,tag,name)
+ local data, field, kind = getcasted(dataset,tag,name)
+ if data then
+ local test = firstandlast[kind]
+ if test then
+ local first, last = test(data)
+ if last then
+ return last
+ end
+ end
+ end
+end
+
+local three_strings = { "string", "string", "string" }
+
+implement {
+ name = "btxsingularorplural",
+ actions = { publications.singularorplural, ctx_doifelse },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxoneorrange",
+ actions = { publications.oneorrange, function(b) if b == nil then ctx_gobbletwoarguments() else ctx_doifelse(b) end end },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxfirstofrange",
+ actions = { publications.firstofrange, context },
+ arguments = three_strings
+}
+
+implement {
+ name = "btxlastofrange",
+ actions = { publications.lastofrange, context },
+ arguments = three_strings
+}
+
+-- basic loading
+
+function publications.usedataset(specification)
+ specification.kind = "current"
+ publications.load(specification)
+end
+
+implement {
+ name = "btxusedataset",
+ actions = publications.usedataset,
+ arguments = {
+ {
+ { "specification" },
+ { "dataset" },
+ { "filename" },
+ }
+ }
+}
+
+implement {
+ name = "convertbtxdatasettoxml",
+ arguments = { "string", true },
+ actions = publications.converttoxml
+}
+
+-- enhancing
+
+do
+
+ -- maybe not redo when already done
+
+ local function shortsorter(a,b)
+ local ay, by = a[2], b[2] -- year
+ if ay ~= by then
+ return ay < by
+ end
+ local ay, by = a[3], b[3] -- suffix
+ if ay ~= by then
+ -- bah, bah, bah
+ local an, bn = tonumber(ay), tonumber(by)
+ if an and bn then
+ return an < bn
+ else
+ return ay < by
+ end
+ end
+ return a[4] < b[4]
+ end
+
+ -- We could avoid loops by combining enhancers but that makes it only
+ -- more messy and for documents that use publications the few extra milli
+ -- seconds are irrelevant (there is for sure more to gain by proper coding
+ -- of the source and or style).
+
+ local f_short = formatters["%s%02i"]
+
+ function publications.enhancers.suffixes(dataset)
+ if not dataset then
+ return -- bad news
+ else
+ report("analyzing previous publication run for %a",dataset.name)
+ end
+ dataset.suffixed = true
+ --
+ local used = usedentries[dataset.name]
+ if not used then
+ return -- probably a first run
+ end
+ local luadata = dataset.luadata
+ local details = dataset.details
+ local ordered = dataset.ordered
+ if not luadata or not details or not ordered then
+ report("nothing to be analyzed in %a",dataset.name)
+ return -- also bad news
+ end
+ -- we have two suffixes: author (dependent of type) and short
+ local kind = dataset.authorconversion or "name"
+ local field = "author" -- currently only author
+ local shorts = { }
+ local authors = { }
+ local hasher = publications.authorhashers[kind]
+ local shorter = publications.authorhashers.short
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if entry then
+ local tag = entry.tag
+ if tag then
+ local use = used[tag]
+ if use then
+ -- use is a table of used list entries (so there can be more) and we just look at
+ -- the first one for btx properties
+ local listentry = use[1]
+ local userdata = listentry.userdata
+ local btxspc = userdata and userdata.btxspc
+ if btxspc then
+ -- we could act on the 3rd arg returned by getcasted but in general any string will do
+ -- so we deal with it in the author hashers ... maybe some day ...
+ local author = getcasted(dataset,tag,field,specifications[btxspc])
+ local kind = type(author)
+ if kind == "table" or kind == "string" then
+ if u then
+ u = listentry.entries.text -- hm
+ else
+ u = "0"
+ end
+ local year = tonumber(entry.year) or 9999
+ local data = { tag, year, u, i }
+ -- authors
+ local hash = hasher(author)
+ local found = authors[hash]
+ if not found then
+ authors[hash] = { data }
+ else
+ found[#found+1] = data
+ end
+ -- shorts
+ local hash = shorter(author)
+ local short = f_short(hash,mod(year,100))
+ local found = shorts[short]
+ if not found then
+ shorts[short] = { data }
+ else
+ found[#found+1] = data
+ end
+ --
+ else
+ report("author typecast expected for field %a",field)
+ end
+ else
+ --- no spec so let's forget about it
+ end
+ end
+ end
+ end
+ end
+ local function addsuffix(hashed,key,suffixkey)
+ for hash, tags in sortedhash(hashed) do -- ordered ?
+ local n = #tags
+ if n == 0 then
+ -- skip
+ elseif n == 1 then
+ local tagdata = tags[1]
+ local tag = tagdata[1]
+ local detail = details[tag]
+ local entry = luadata[tag]
+ local year = entry.year
+ detail[key] = hash
+ elseif n > 1 then
+ sort(tags,shortsorter) -- or take first -- todo: proper utf sorter
+ local lastyear = nil
+ local suffix = nil
+ local previous = nil
+ for i=1,n do
+ local tagdata = tags[i]
+ local tag = tagdata[1]
+ local detail = details[tag]
+ local entry = luadata[tag]
+ local year = entry.year
+ detail[key] = hash
+ if year ~= lastyear then
+ lastyear = year
+ suffix = 1
+ else
+ if previous and suffix == 1 then
+ previous[suffixkey] = suffix
+ end
+ suffix = suffix + 1
+ detail[suffixkey] = suffix
+ end
+ previous = detail
+ end
+ end
+ if trace_suffixes then
+ for i=1,n do
+ local tag = tags[i][1]
+ local year = luadata[tag].year
+ local suffix = details[tag].suffix
+ if suffix then
+ report_suffix("%s: tag %a, hash %a, year %a, suffix %a",key,tag,hash,year or '',suffix or '')
+ else
+ report_suffix("%s: tag %a, hash %a, year %a",key,tag,hash,year or '')
+ end
+ end
+ end
+ end
+ end
+ addsuffix(shorts, "shorthash", "shortsuffix") -- todo: shorthash
+ addsuffix(authors,"authorhash","authorsuffix")
+ end
+
+ -- utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.suffixes")
+
+end
+
+implement {
+ name = "btxaddentry",
+ actions = function(name,settings,content)
+ local dataset = datasets[name]
+ if dataset then
+ publications.addtexentry(dataset,settings,content)
+ end
+ end,
+ arguments = { "string", "string", "string" }
+}
+
+function publications.checkeddataset(name,default)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ return name
+ elseif default and default ~= "" then
+ return default
+ else
+ report("unknown dataset %a, forcing %a",name,v_default)
+ return v_default
+ end
+end
+
+implement {
+ name = "btxsetdataset",
+ actions = { publications.checkeddataset, context },
+ arguments = { "string", "string"}
+}
+
+implement {
+ name = "btxsetentry",
+ actions = function(name,tag)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ if dataset.luadata[tag] then
+ context(tag)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end,
+ arguments = { "string", "string" },
+}
+
+-- rendering of fields
+
+do
+
+ local typesetters = { }
+ publications.typesetters = typesetters
+
+ local function defaulttypesetter(field,value,manipulator)
+ if value and value ~= "" then
+ value = tostring(value)
+ context(manipulator and applymanipulation(manipulator,value) or value)
+ end
+ end
+
+ setmetatableindex(typesetters,function(t,k)
+ local v = defaulttypesetter
+ t[k] = v
+ return v
+ end)
+
+ function typesetters.string(field,value,manipulator)
+ if value and value ~= "" then
+ context(manipulator and applymanipulation(manipulator,value) or value)
+ end
+ end
+
+ function typesetters.author(field,value,manipulator)
+ ctx_btxflushauthor(field)
+ end
+
+ -- function typesetters.url(field,value,manipulator)
+ -- ....
+ -- end
+
+ -- if there is no specification then we're in trouble but there is
+ -- always a default anyway
+ --
+ -- there's also always a fields table but it can be empty due to
+ -- lack of specifications
+ --
+ -- then there can be cases where we have no specification for instance
+ -- when we have a special kind of database
+
+ local splitter = lpeg.splitat(":")
+
+ local function permitted(category,field)
+ local catspec = currentspecification.categories[category]
+ if not catspec then
+ report("invalid category %a, %s",category,"no specification") -- can't happen
+ return false
+ end
+ local fields = catspec.fields
+ if not fields then
+ report("invalid category %a, %s",category,"no fields") -- can't happen
+ return false
+ end
+ if ignoredfields and ignoredfields[field] then
+ return false
+ end
+ local virtualfields = currentspecification.virtualfields
+ if virtualfields and virtualfields[field] then
+ return true
+ end
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ return set
+ end
+ end
+ if fields[field] then
+ return true
+ end
+ local f, l = lpegmatch(splitter,field)
+ if f and l and fields[f] then
+ return true -- language specific one
+ end
+ end
+
+ local function found(dataset,tag,field,valid,fields)
+ if valid == true then
+ -- local fields = dataset.luadata[tag]
+ local okay = fields[field]
+ if okay then
+ return field, okay
+ end
+ local details = dataset.details[tag]
+ local value = details[field]
+ if value then
+ return field, value
+ end
+ elseif valid then
+ -- local fields = dataset.luadata[tag]
+ for i=1,#valid do
+ local field = valid[i]
+ local value = fields[field]
+ if value then
+ return field, value
+ end
+ end
+ local details = dataset.details[tag]
+ for i=1,#valid do
+ local value = details[field]
+ if value then
+ return field, value
+ end
+ end
+ end
+ end
+
+ local function get(dataset,tag,field,what,check,catspec) -- somewhat more extensive
+ local current = rawget(datasets,dataset)
+ if current then
+ local data = current.luadata[tag]
+ if data then
+ local category = data.category
+ local catspec = (catspec or currentspecification).categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ if check then
+ for i=1,#set do
+ local field = set[i]
+ local kind = (not check or data[field]) and fields[field]
+ if kind then
+ return what and kind or field
+ end
+ end
+ elseif what then
+ local t = { }
+ for i=1,#set do
+ t[i] = fields[set[i]] or "unknown"
+ end
+ return concat(t,",")
+ else
+ return concat(set,",")
+ end
+ end
+ end
+ local kind = (not check or data[field]) and fields[field]
+ if kind then
+ return what and kind or field
+ end
+ end
+ end
+ end
+ return ""
+ end
+
+ publications.permitted = permitted
+ publications.found = found
+ publications.get = get
+
+ local function btxflush(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = splitmanipulation(field)
+ local category = fields.category
+ local valid = permitted(category,field)
+ if valid then
+ local name, value = found(dataset,tag,field,valid,fields)
+ if value then
+ typesetters[currentspecification.types[name]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","entry",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","entry",field,category,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxfield(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local category = fields.category
+ local manipulator, field = splitmanipulation(field)
+ if permitted(category,field) then
+ local value = fields[field]
+ if value then
+ typesetters[currentspecification.types[field]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","field",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","field",field,category,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxdetail(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local details = dataset.details[tag]
+ if details then
+ local category = fields.category
+ local manipulator, field = splitmanipulation(field)
+ if permitted(category,field) then
+ local value = details[field]
+ if value then
+ typesetters[currentspecification.types[field]](field,value,manipulator)
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","unknown","detail",field,category,tag,name)
+ end
+ elseif trace_detail then
+ report("%s %s %a in category %a for tag %a in dataset %a","invalid","detail",field,category,tag,name)
+ end
+ else
+ report("no details for tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function btxdirect(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local manipulator, field = splitmanipulation(field)
+ local value = fields[field]
+ if value then
+ context(typesetters.default(field,value,manipulator))
+ elseif trace_detail then
+ report("field %a of tag %a in dataset %a has no value",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+ end
+
+ local function okay(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local category = fields.category
+ local valid = permitted(category,field)
+ if valid then
+ local value, field = found(dataset,tag,field,valid,fields)
+ return value and value ~= ""
+ end
+ end
+ end
+ end
+
+ publications.okay = okay
+
+ implement { name = "btxfield", actions = btxfield, arguments = { "string", "string", "string" } }
+ implement { name = "btxdetail", actions = btxdetail, arguments = { "string", "string", "string" } }
+ implement { name = "btxflush", actions = btxflush, arguments = { "string", "string", "string" } }
+ implement { name = "btxdirect", actions = btxdirect, arguments = { "string", "string", "string" } }
+
+ implement { name = "btxfieldname", actions = { get, context }, arguments = { "string", "string", "string", false, false } }
+ implement { name = "btxfieldtype", actions = { get, context }, arguments = { "string", "string", "string", true, false } }
+ implement { name = "btxfoundname", actions = { get, context }, arguments = { "string", "string", "string", false, true } }
+ implement { name = "btxfoundtype", actions = { get, context }, arguments = { "string", "string", "string", true, true } }
+
+ implement { name = "btxdoifelse", actions = { okay, ctx_doifelse }, arguments = { "string", "string", "string" } }
+ implement { name = "btxdoif", actions = { okay, ctx_doif }, arguments = { "string", "string", "string" } }
+ implement { name = "btxdoifnot", actions = { okay, ctx_doifnot }, arguments = { "string", "string", "string" } }
+
+end
+
+-- -- alternative approach: keep data at the tex end
+
+function publications.singularorplural(singular,plural)
+ if lastconcatsize and lastconcatsize > 1 then
+ context(plural)
+ else
+ context(singular)
+ end
+end
+
+-- loading
+
+do
+
+ local patterns = {
+ "publ-imp-%s.mkvi",
+ "publ-imp-%s.mkiv",
+ "publ-imp-%s.tex",
+ }
+
+ local function failure(name)
+ report("unknown library %a",name)
+ end
+
+ local function action(name,foundname)
+ context.input(foundname)
+ end
+
+ function publications.loaddefinitionfile(name) -- a more specific name
+ resolvers.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+
+ local patterns = {
+ "publ-imp-%s.lua",
+ }
+
+ function publications.loadreplacementfile(name) -- a more specific name
+ resolvers.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = publications.loaders.registercleaner,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+
+ implement { name = "btxloaddefinitionfile", actions = publications.loaddefinitionfile, arguments = "string" }
+ implement { name = "btxloadreplacementfile", actions = publications.loadreplacementfile, arguments = "string" }
+
+end
+
+-- lists
+
+do
+
+ publications.lists = publications.lists or { }
+ local lists = publications.lists
+
+ local context = context
+ local structures = structures
+
+ local references = structures.references
+ local sections = structures.sections
+
+ -- per rendering
+
+ local renderings = { } --- per dataset
+
+ setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ texts = setmetatableindex("table"),
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+ end)
+
+ -- helper
+
+ function lists.register(dataset,tag,short) -- needs checking now that we split
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
+ end
+
+ function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+ end
+
+ local function validkeyword(dataset,tag,keyword,specification) -- todo: pass specification
+ local kw = getcasted(dataset,tag,"keywords",specification)
+ if kw then
+ for i=1,#kw do
+ if keyword[kw[i]] then
+ return true
+ end
+ end
+ end
+ end
+
+ local function registerpage(pages,tag,result,listindex)
+ local p = pages[tag]
+ local r = result[listindex].references
+ if p then
+ local last = p[#p][2]
+ local real = last.realpage
+ if real ~= r.realpage then
+ p[#p+1] = { listindex, r }
+ end
+ else
+ pages[tag] = { { listindex, r } }
+ end
+ end
+
+
+ -- tag | listindex | reference | userdata | dataindex
+
+ local methods = { }
+ lists.methods = methods
+
+ methods[v_dataset] = function(dataset,rendering,keyword)
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local list = rendering.list
+ for tag, data in sortedhash(luadata) do
+ if not keyword or validkeyword(dataset,tag,keyword) then
+ local index = data.index or 0
+ list[#list+1] = { tag, index, 0, false, index }
+ end
+ end
+ end
+
+ -- todo: names = { "btx" }
+
+ methods[v_force] = function (dataset,rendering,keyword)
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ local result = structures.lists.filter(rendering.specifications) or { }
+ local list = rendering.list
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata -- better check on metadata.kind == "btx"
+ if u then
+ local set = u.btxset or v_default
+ if set == dataset then
+ local tag = u.btxref
+ if tag and (not keyword or validkeyword(dataset,tag,keyword)) then
+ local data = luadata[tag]
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
+ end
+ end
+ end
+ end
+ lists.result = result
+ end
+
+ -- local : if tag and done[tag] ~= section then ...
+ -- global : if tag and not alldone[tag] and done[tag] ~= section then ...
+
+ methods[v_local] = function(dataset,rendering,keyword)
+ local result = structures.lists.filter(rendering.specifications) or { }
+ local section = sections.currentid()
+ local list = rendering.list
+ local repeated = rendering.repeated == v_yes
+ local r_done = rendering.done
+ local r_alldone = rendering.alldone
+ local done = repeated and { } or r_done
+ local alldone = repeated and { } or r_alldone
+ local doglobal = rendering.method == v_global
+ local traced = { } -- todo: only if interactive (backlinks) or when tracing
+ local pages = { }
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ -- handy for tracing :
+ rendering.result = result
+ --
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u then -- better check on metadata.kind == "btx"
+ local set = u.btxset or v_default
+ if set == dataset then
+-- inspect(structures.references.internals[tonumber(u.btxint)])
+ local tag = u.btxref
+ if not tag then
+ -- problem
+ elseif done[tag] == section then -- a bit messy for global and all and so
+ -- skip
+ elseif doglobal and alldone[tag] then
+ -- skip
+ elseif not keyword or validkeyword(dataset,tag,keyword) then
+ if traced then
+ local l = traced[tag]
+ if l then
+ l[#l+1] = u.btxint
+ else
+ local data = luadata[tag]
+ local l = { tag, listindex, 0, u, data and data.index or 0 }
+ list[#list+1] = l
+ traced[tag] = l
+ end
+ else
+ done[tag] = section
+ alldone[tag] = true
+ local data = luadata[tag]
+ list[#list+1] = { tag, listindex, 0, u, data and data.index or 0 }
+ end
+ end
+ if tag then
+ registerpage(pages,tag,result,listindex)
+ end
+ end
+ end
+ end
+ if traced then
+ for tag in next, traced do
+ done[tag] = section
+ alldone[tag] = true
+ end
+ end
+ lists.result = result
+ structures.lists.result = result
+ rendering.pages = pages -- or list.pages
+ end
+
+ methods[v_global] = methods[v_local]
+
+ function lists.collectentries(specification)
+ local dataset = specification.dataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+ if not rendering then
+ return
+ end
+ local method = specification.method or v_none
+ local ignored = specification.ignored or ""
+ rendering.method = method
+ rendering.ignored = ignored ~= "" and settings_to_set(ignored) or nil
+ rendering.list = { }
+ rendering.done = { }
+ rendering.sorttype = specification.sorttype or v_default
+ rendering.criterium = specification.criterium or v_none
+ rendering.repeated = specification.repeated or v_no
+ rendering.group = specification.group or ""
+ rendering.specifications = specification
+ local filtermethod = methods[method]
+ if not filtermethod then
+ report_list("invalid method %a",method or "")
+ return
+ end
+ report_list("collecting entries using method %a and sort order %a",method,rendering.sorttype)
+ lists.result = { } -- kind of reset
+ local keyword = specification.keyword
+ if keyword and keyword ~= "" then
+ keyword = settings_to_set(keyword)
+ else
+ keyword = nil
+ end
+ filtermethod(dataset,rendering,keyword)
+ local list = rendering.list
+ ctx_btxsetnoflistentries(list and #list or 0)
+ end
+
+ -- for determining width
+
+ local groups = setmetatableindex("number")
+
+ function lists.prepareentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local used = rendering.used
+ local forceall = rendering.criterium == v_all
+ local repeated = rendering.repeated == v_yes
+ local sorttype = rendering.sorttype or v_default
+ local group = rendering.group or ""
+ local sorter = lists.sorters[sorttype]
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local newlist = { }
+ local lastreferencenumber = groups[group] -- current.lastreferencenumber or 0
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local entry = luadata[tag]
+ if entry then
+ if forceall or repeated or not used[tag] then
+ newlist[#newlist+1] = li
+ -- already here:
+ if not repeated then
+ used[tag] = true -- beware we keep the old state (one can always use criterium=all)
+ end
+ end
+ end
+ end
+ if type(sorter) == "function" then
+ list = sorter(dataset,rendering,newlist,sorttype) or newlist
+ else
+ list = newlist
+ end
+ local newlist = { }
+ local tagtolistindex = { }
+ rendering.tagtolistindex = tagtolistindex
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local entry = luadata[tag]
+ if entry then
+ local detail = details[tag]
+ if not detail then
+ -- fatal error
+ report("fatal error, missing details for tag %a in dataset %a (enhanced: %s)",tag,dataset,current.enhanced and "yes" or "no")
+ -- lastreferencenumber = lastreferencenumber + 1
+ -- details[tag] = { referencenumber = lastreferencenumber }
+ -- li[3] = lastreferencenumber
+ -- tagtolistindex[tag] = i
+ -- newlist[#newlist+1] = li
+ elseif detail.parent then
+ -- skip this one
+ else
+ local referencenumber = detail.referencenumber
+ if not referencenumber then
+ lastreferencenumber = lastreferencenumber + 1
+ referencenumber = lastreferencenumber
+ detail.referencenumber = lastreferencenumber
+ end
+ li[3] = referencenumber
+ tagtolistindex[tag] = i
+ newlist[#newlist+1] = li
+ end
+ end
+ end
+ groups[group] = lastreferencenumber
+ rendering.list = newlist
+ end
+
+ function lists.fetchentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ if list then
+ for i=1,#list do
+ local li = list[i]
+ ctx_btxsettag(li[1])
+ ctx_btxsetnumber(li[3])
+ ctx_btxchecklistentry()
+ end
+ end
+ end
+
+ -- for rendering
+
+ -- setspecification
+
+ local function btxflushpages(dataset,tag)
+ -- todo: interaction
+ local rendering = renderings[dataset]
+ local pages = rendering.pages
+ if not pages then
+ return
+ else
+ pages = pages[tag]
+ end
+ if not pages then
+ return
+ end
+ local nofpages = #pages
+ if nofpages == 0 then
+ return
+ end
+ local first_p = nil
+ local first_r = nil
+ local last_p = nil
+ local last_r = nil
+ local ranges = { }
+ local nofdone = 0
+ local function flush()
+ if last_r and first_r ~= last_r then
+ ranges[#ranges+1] = { first_p, last_p }
+ else
+ ranges[#ranges+1] = { first_p }
+ end
+ end
+ for i=1,nofpages do
+ local next_p = pages[i]
+ local next_r = next_p[2].realpage
+ if not first_r then
+ first_p = next_p
+ first_r = next_r
+ elseif last_r + 1 == next_r then
+ -- continue
+ elseif first_r then
+ flush()
+ first_p = next_p
+ first_r = next_r
+ end
+ last_p = next_p
+ last_r = next_r
+ end
+ if first_r then
+ flush()
+ end
+ local nofranges = #ranges
+ for i=1,nofranges do
+ local r = ranges[i]
+ ctx_btxsetconcat(concatstate(i,nofranges))
+ local first, last = r[1], r[2]
+ ctx_btxsetfirstinternal(first[2].internal)
+ ctx_btxsetfirstpage(first[1])
+ if last then
+ ctx_btxsetlastinternal(last[2].internal)
+ ctx_btxsetlastpage(last[1])
+ end
+ if trace_detail then
+ report("expanding page setup")
+ end
+ ctx_btxpagesetup("") -- nothing yet
+ end
+ end
+
+ implement {
+ name = "btxflushpages",
+ actions = btxflushpages,
+ arguments = { "string", "string" }
+ }
+
+ function lists.sameasprevious(dataset,i,name,order,method)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local n = tonumber(i)
+ if n and n > 1 and n <= #list then
+ local luadata = datasets[dataset].luadata
+ local p_index = list[n-1][1]
+ local c_index = list[n ][1]
+ local previous = getdirect(dataset,luadata[p_index],name)
+ local current = getdirect(dataset,luadata[c_index],name)
+
+ -- authors are a special case
+
+ -- if not order then
+ -- order = gettexcounter("c_btx_list_reference")
+ -- end
+ if order and order > 0 and (method == v_always or method == v_doublesided) then
+ local clist = listtolist[order]
+ local plist = listtolist[order-1]
+ if clist and plist then
+ local crealpage = clist.references.realpage
+ local prealpage = plist.references.realpage
+ if crealpage ~= prealpage then
+ if method == v_always or not conditionals.layoutisdoublesided then
+ if trace_detail then
+ report("previous %a, current %a, different page",previous,current)
+ end
+ return false
+ elseif crealpage % 2 == 0 then
+ if trace_detail then
+ report("previous %a, current %a, different page",previous,current)
+ end
+ return false
+ end
+ end
+ end
+ end
+ local sameentry = false
+ if current and current == previous then
+ sameentry = true
+ else
+ local p_casted = getcasted(dataset,p_index,name)
+ local c_casted = getcasted(dataset,c_index,name)
+ if c_casted and c_casted == p_casted then
+ sameentry = true
+ elseif type(c_casted) == "table" and type(p_casted) == "table" then
+ sameentry = table.identical(c_casted,p_casted)
+ end
+ end
+ if trace_detail then
+ if sameentry then
+ report("previous %a, current %a, same entry",previous,current)
+ else
+ report("previous %a, current %a, different entry",previous,current)
+ end
+ end
+ return sameentry
+ else
+ return false
+ end
+ end
+
+ function lists.combiinlist(dataset,tag)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local toindex = rendering.tagtolistindex
+ return toindex and toindex[tag]
+ end
+
+ function lists.flushcombi(dataset,tag)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local toindex = rendering.tagtolistindex
+ local listindex = toindex and toindex[tag]
+ if listindex then
+ local li = list[listindex]
+ if li then
+ local data = datasets[dataset]
+ local luadata = data.luadata
+ local details = data.details
+ local tag = li[1]
+ local listindex = li[2]
+ local n = li[3]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ ctx_btxstartcombientry()
+ ctx_btxsetcurrentlistindex(listindex)
+ ctx_btxsetcategory(entry.category or "unknown")
+ ctx_btxsettag(tag)
+ ctx_btxsetnumber(n)
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ local authorsuffix = detail.authorsuffix
+ if authorsuffix then
+ ctx_btxsetsuffix(authorsuffix)
+ end
+ ctx_btxhandlecombientry()
+ ctx_btxstopcombientry()
+ end
+ end
+ end
+
+ function lists.flushentry(dataset,i,textmode)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local li = list[i]
+ if li then
+ local data = datasets[dataset]
+ local luadata = data.luadata
+ local details = data.details
+ local tag = li[1]
+ local listindex = li[2]
+ local n = li[3]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ --
+ ctx_btxstartlistentry()
+ ctx_btxsetcurrentlistentry(i) -- redundant
+ ctx_btxsetcurrentlistindex(listindex or 0)
+ local children = detail.children
+ local language = entry.language
+ if children then
+ ctx_btxsetcombis(concat(children,","))
+ end
+ ctx_btxsetcategory(entry.category or "unknown")
+ ctx_btxsettag(tag)
+ ctx_btxsetnumber(n)
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ local userdata = li[4]
+ if userdata then
+ local b = userdata.btxbtx
+ local a = userdata.btxatx
+ if b then
+ ctx_btxsetbefore(b)
+ end
+ if a then
+ ctx_btxsetafter(a)
+ end
+ local bl = userdata.btxint
+ if bl and bl ~= "" then
+ ctx_btxsetbacklink(bl)
+ end
+ end
+ local authorsuffix = detail.authorsuffix
+ if authorsuffix then
+ ctx_btxsetsuffix(authorsuffix)
+ end
+ rendering.userdata = userdata
+ if textmode then
+ ctx_btxhandlelisttextentry()
+ else
+ ctx_btxhandlelistentry()
+ end
+ ctx_btxstoplistentry()
+ --
+ -- context(function()
+ -- -- wrapup
+ -- rendering.ignoredfields = nil
+ -- end)
+ end
+ end
+
+ local function getuserdata(dataset,key)
+ local rendering = renderings[dataset]
+ if rendering then
+ local userdata = rendering.userdata
+ if userdata then
+ local value = userdata[key]
+ if value and value ~= "" then
+ return value
+ end
+ end
+ end
+ end
+
+ lists.uservariable = getuserdata
+
+ function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i, 0, false, false }
+ end
+ end
+
+ implement {
+ name = "btxuservariable",
+ actions = { getuserdata, context },
+ arguments = { "string", "string" }
+ }
+
+ implement {
+ name = "btxdoifelseuservariable",
+ actions = { getuserdata, ctx_doifelse },
+ arguments = { "string", "string" }
+ }
+
+ -- implement {
+ -- name = "btxresolvelistreference",
+ -- actions = lists.resolve,
+ -- arguments = { "string", "string" }
+ -- }
+
+ implement {
+ name = "btxcollectlistentries",
+ actions = lists.collectentries,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ { "method" },
+ { "dataset" },
+ { "keyword" },
+ { "sorttype" },
+ { "repeated" },
+ { "ignored" },
+ { "group" },
+ }
+ }
+ }
+
+ implement {
+ name = "btxpreparelistentries",
+ actions = lists.prepareentries,
+ arguments = { "string" },
+ }
+
+ implement {
+ name = "btxfetchlistentries",
+ actions = lists.fetchentries,
+ arguments = { "string" },
+ }
+
+ implement {
+ name = "btxflushlistentry",
+ actions = lists.flushentry,
+ arguments = { "string", "integer" }
+ }
+
+ implement {
+ name = "btxflushlistcombi",
+ actions = lists.flushcombi,
+ arguments = { "string", "string" }
+ }
+
+ implement {
+ name = "btxdoifelsesameasprevious",
+ actions = { lists.sameasprevious, ctx_doifelse },
+ arguments = { "string", "integer", "string", "integer", "string" }
+ }
+
+ implement {
+ name = "btxdoifelsecombiinlist",
+ actions = { lists.combiinlist, ctx_doifelse },
+ arguments = { "string", "string" }
+ }
+
+end
+
+do
+
+ local citevariants = { }
+ publications.citevariants = citevariants
+
+ local function btxhandlecite(specification)
+ local dataset = specification.dataset or v_default
+ local reference = specification.reference
+ local variant = specification.variant
+ if not variant or variant == "" then
+ variant = "default"
+ end
+ if not reference or reference == "" then
+ return
+ end
+ --
+ local data = datasets[dataset]
+ if not data.suffixed then
+ data.authorconversion = specification.authorconversion
+ publications.enhancers.suffixes(data)
+ end
+ --
+ specification.variant = variant
+ specification.compress = specification.compress
+ specification.markentry = specification.markentry ~= false
+ --
+ if specification.sorttype == v_yes then
+ specification.sorttype = v_normal
+ end
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if prefix and rest then
+ dataset = prefix
+ specification.dataset = prefix
+ specification.reference = rest
+ end
+ --
+ if trace_cite then
+ report_cite("inject, dataset: %s, tag: %s, variant: %s, compressed",
+ specification.dataset or "-",
+ specification.reference,
+ specification.variant
+ )
+ end
+ --
+ ctx_btxsetdataset(dataset)
+ --
+ citevariants[variant](specification) -- we always fall back on default
+ end
+
+ local function btxhandlenocite(specification)
+ local dataset = specification.dataset or v_default
+ local reference = specification.reference
+ if not reference or reference == "" then
+ return
+ end
+ --
+ local markentry = specification.markentry ~= false
+ local internal = specification.internal or ""
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if rest then
+ dataset = prefix
+ reference = rest
+ end
+ --
+ if trace_cite then
+ report_cite("mark, dataset: %s, tags: %s",dataset or "-",reference)
+ end
+ --
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ --
+ tobemarked = markentry and todo
+ if found and tobemarked then
+ flushmarked(dataset,list)
+ btxflushmarked() -- here (could also be done in caller)
+ end
+ end
+
+ implement {
+ name = "btxhandlecite",
+ actions = btxhandlecite,
+ arguments = {
+ {
+ { "dataset" },
+ { "reference" },
+ { "markentry", "boolean" },
+ { "variant" },
+ { "sorttype" },
+ { "compress" },
+ { "authorconversion" },
+ { "author" },
+ { "lefttext" },
+ { "righttext" },
+ { "before" },
+ { "after" },
+ }
+ }
+ }
+
+ implement {
+ name = "btxhandlenocite",
+ actions = btxhandlenocite,
+ arguments = {
+ {
+ { "dataset" },
+ { "reference" },
+ { "markentry", "boolean" },
+ }
+ }
+ }
+
+ -- sorter
+
+ local keysorter = function(a,b)
+ local ak = a.sortkey
+ local bk = b.sortkey
+ if ak == bk then
+ local as = a.suffix -- numeric
+ local bs = b.suffix -- numeric
+ if as and bs then
+ return (as or 0) < (bs or 0)
+ else
+ return false
+ end
+ else
+ return ak < bk
+ end
+ end
+
+ local revsorter = function(a,b)
+ return keysorter(b,a)
+ end
+
+ local function compresslist(source,specification)
+ if specification.sorttype == v_normal then
+ sort(source,keysorter)
+ elseif specification.sorttype == v_reverse then
+ sort(source,revsorter)
+ end
+ if specification and specification.compress == v_yes and specification.numeric then
+ local first, last, firstr, lastr
+ local target, noftarget, tags = { }, 0, { }
+ local oldvalue = nil
+ local function flushrange()
+ noftarget = noftarget + 1
+ if last > first + 1 then
+ target[noftarget] = {
+ first = firstr,
+ last = lastr,
+ tags = tags,
+ }
+ else
+ target[noftarget] = firstr
+ if last > first then
+ noftarget = noftarget + 1
+ target[noftarget] = lastr
+ end
+ end
+ tags = { }
+ end
+ for i=1,#source do
+ local entry = source[i]
+ local current = entry.sortkey -- so we need a sortkey !
+ if entry.suffix then
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
+ else
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ elseif current == last + 1 then
+ last, lastr = current, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
+ end
+ tags[#tags+1] = entry.tag
+ end
+ if first and last then
+ flushrange()
+ end
+ return target
+ else
+ local target, noftarget = { }, 0
+ for i=1,#source do
+ local entry = source[i]
+ noftarget = noftarget + 1
+ target[noftarget] = {
+ first = entry,
+ tags = { entry.tag },
+ }
+ end
+ return target
+ end
+ end
+
+ -- local source = {
+ -- { tag = "one", internal = 1, value = "foo", page = 1 },
+ -- { tag = "two", internal = 2, value = "bar", page = 2 },
+ -- { tag = "three", internal = 3, value = "gnu", page = 3 },
+ -- }
+ --
+ -- local target = compresslist(source)
+
+ local numberonly = R("09")^1 / tonumber + P(1)^0
+ local f_missing = formatters["<%s>"]
+
+ -- maybe also sparse (e.g. pages)
+
+ -- a bit redundant access to datasets
+
+ local function processcite(presets,specification)
+ --
+ if specification then
+ setmetatableindex(specification,presets)
+ else
+ specification = presets
+ end
+ --
+ local dataset = specification.dataset
+ local reference = specification.reference
+ local internal = specification.internal
+ local setup = specification.variant
+ local compress = specification.compress
+ local sorttype = specification.sorttype
+ local getter = specification.getter
+ local setter = specification.setter
+ local compressor = specification.compressor
+ --
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ tobemarked = specification.markentry and todo
+ --
+ if not found or #found == 0 then
+ report("no entry %a found in dataset %a",reference,dataset)
+ elseif not setup then
+ report("invalid reference for %a",reference)
+ else
+ if trace_cite then
+ report("processing reference %a",reference)
+ end
+ local source = { }
+ local luadata = datasets[dataset].luadata
+ for i=1,#found do
+ local entry = found[i]
+-- inspect(entry)
+ local tag = entry.userdata.btxref
+ local ldata = luadata[tag]
+ local data = {
+ internal = entry.references.internal,
+ language = ldata.language,
+ dataset = dataset,
+ tag = tag,
+ -- combis = entry.userdata.btxcom,
+ -- luadata = ldata,
+ }
+ setter(data,dataset,tag,entry)
+ if type(data) == "table" then
+ source[#source+1] = data
+ else
+ report("error in cite rendering %a",setup or "?")
+ end
+ end
+
+ local lefttext = specification.lefttext
+ local righttext = specification.righttext
+ local before = specification.before
+ local after = specification.after
+
+ if lefttext and lefttext ~= "" then lefttext = settings_to_array(lefttext) end
+ if righttext and righttext ~= "" then righttext = settings_to_array(righttext) end
+ if before and before ~= "" then before = settings_to_array(before) end
+ if after and after ~= "" then after = settings_to_array(after) end
+
+ local function flush(i,n,entry,last)
+ local tag = entry.tag
+ local currentcitation = markcite(dataset,tag)
+ --
+ ctx_btxstartcite()
+ ctx_btxsettag(tag)
+ ctx_btxsetcategory(entry.category or "unknown")
+ --
+ if lefttext then local text = lefttext [i] ; if text and text ~= "" then ctx_btxsetlefttext (text) end end
+ if righttext then local text = righttext[i] ; if text and text ~= "" then ctx_btxsetrighttext(text) end end
+ if before then local text = before [i] ; if text and text ~= "" then ctx_btxsetbefore (text) end end
+ if after then local text = after [i] ; if text and text ~= "" then ctx_btxsetafter (text) end end
+ --
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ if bl then
+ -- we refer to a coming list entry
+ ctx_btxsetinternal(bl.references.internal or "")
+ else
+ -- we refer to a previous list entry
+ ctx_btxsetinternal(entry.internal or "")
+ end
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ -- local combis = entry.combis
+ -- if combis then
+ -- ctx_btxsetcombis(combis)
+ -- end
+ if not getter(entry,last,nil,specification) then
+ ctx_btxsetfirst("") -- (f_missing(tag))
+ end
+ ctx_btxsetconcat(concatstate(i,n))
+ if trace_detail then
+ report("expanding cite setup %a",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopcite()
+ end
+ if sorttype == v_normal or sorttype == v_reverse then
+ local target = (compressor or compresslist)(source,specification)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ local nofcollected = #source
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ flush(i,nofcollected,source[i])
+ end
+ end
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ if first then
+ flush(i,nofcollected,first,entry.last)
+ else
+ flush(i,nofcollected,entry)
+ end
+ end
+ end
+ else
+ local nofcollected = #source
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ flush(i,nofcollected,source[i])
+ end
+ end
+ end
+ end
+ if tobemarked then
+ flushmarked(dataset,list)
+ btxflushmarked() -- here (could also be done in caller)
+ end
+ end
+
+ --
+
+ local function simplegetter(first,last,field,specification)
+ local value = first[field]
+ if value then
+ ctx_btxsetfirst(value)
+ if last then
+ ctx_btxsetsecond(last[field])
+ end
+ return true
+ end
+ end
+
+ local setters = setmetatableindex({},function(t,k)
+ local v = function(data,dataset,tag,entry)
+ local value = getcasted(dataset,tag,k)
+ data.value = value -- not really needed
+ data[k] = value
+ data.sortkey = value
+ data.sortfld = k
+ end
+ t[k] = v
+ return v
+ end)
+
+ local getters = setmetatableindex({},function(t,k)
+ local v = function(first,last,_,specification)
+ return simplegetter(first,last,k,specification) -- maybe _ or k
+ end
+ t[k] = v
+ return v
+ end)
+
+ setmetatableindex(citevariants,function(t,k)
+ local p = defaultvariant or "default"
+ local v = rawget(t,p)
+ report_cite("variant %a falls back on %a setter and getter with setup %a",k,p,k)
+ t[k] = v
+ return v
+ end)
+
+ function citevariants.default(presets)
+ local variant = presets.variant
+ processcite(presets,{
+ setup = variant,
+ setter = setters[variant],
+ getter = getters[variant],
+ })
+ end
+
+ -- category
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.category = getfield(dataset,tag,"category")
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"category",specification)
+ end
+
+ function citevariants.category(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+
+ -- entry (we could provide a generic one)
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ -- nothing
+ end
+
+ local function getter(first,last,_,specification) -- last not used
+ ctx_btxsetfirst(first.tag)
+ end
+
+ function citevariants.entry(presets)
+ processcite(presets,{
+ compress = false,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- short
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local short = getdetail(dataset,tag,"shorthash")
+ local suffix = getdetail(dataset,tag,"shortsuffix")
+ data.short = short
+ data.sortkey = short
+ data.suffix = suffix
+ end
+
+ local function getter(first,last,_,specification) -- last not used
+ local short = first.short
+ if short then
+ local suffix = first.suffix
+ ctx_btxsetfirst(short)
+ if suffix then
+ ctx_btxsetsuffix(suffix) -- watch out: third
+ end
+ return true
+ end
+ end
+
+ function citevariants.short(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- pages (no compress)
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.pages = getcasted(dataset,tag,"pages")
+ end
+
+ local function getter(first,last,_,specification)
+ local pages = first.pages
+ if pages then
+ if type(pages) == "table" then
+ ctx_btxsetfirst(pages[1])
+ ctx_btxsetsecond(pages[2])
+ else
+ ctx_btxsetfirst(pages)
+ end
+ return true
+ end
+ end
+
+ function citevariants.page(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- num
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local entries = entry.entries
+ local text = entries and entries.text or "?"
+ data.num = text
+ data.sortkey = tonumber(text) or text
+ end
+
+ local function getter(first,last,tag,specification)
+ return simplegetter(first,last,"num",specification)
+ end
+
+ function citevariants.num(presets)
+ processcite(presets,{
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ citevariants.textnum = citevariants.num -- should not be needed
+
+ end
+
+ -- year
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local year = getfield (dataset,tag,"year")
+ local suffix = getdetail(dataset,tag,"authorsuffix")
+ data.year = year
+ data.suffix = suffix
+ data.sortkey = tonumber(year) or 9999
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"year",specification)
+ end
+
+ function citevariants.year(presets)
+ processcite(presets,{
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- index
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ local index = getfield(dataset,tag,"index")
+ data.index = index
+ data.sortkey = index
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"index",specification)
+ end
+
+ function citevariants.index(presets)
+ processcite(presets,{
+ setter = setter,
+ getter = getter,
+ numeric = true,
+ })
+ end
+
+ end
+
+ -- tag
+
+ do
+
+ local function setter(data,dataset,tag,entry)
+ data.tag = tag
+ data.sortkey = tag
+ end
+
+ local function getter(first,last,_,specification)
+ return simplegetter(first,last,"tag",specification)
+ end
+
+ function citevariants.tag(presets)
+ return processcite(presets,{
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- keyword
+
+ do
+
+ local function listof(list)
+ local size = type(list) == "table" and #list or 0
+ if size > 0 then
+ return function()
+ for i=1,size do
+ ctx_btxsetfirst(list[i])
+ ctx_btxsetconcat(concatstate(i,size))
+ ctx_btxcitesetup("listelement")
+ end
+ return true
+ end
+ else
+ return "?" -- unknown
+ end
+ end
+
+ local function setter(data,dataset,tag,entry)
+ data.keywords = getcasted(dataset,tag,"keywords")
+ end
+
+ local function getter(first,last,_,specification)
+ context(listof(first.keywords))
+ end
+
+ function citevariants.keywords(presets)
+ return processcite(presets,{
+ variant = "keywords",
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+ -- authors
+
+ do
+
+ -- is this good enough?
+
+ local keysorter = function(a,b)
+ local ak = a.authorhash
+ local bk = b.authorhash
+ if ak == bk then
+ local as = a.authorsuffix -- numeric
+ local bs = b.authorsuffix -- numeric
+ if as and bs then
+ return (as or 0) < (bs or 0)
+ else
+ return false
+ end
+ elseif ak and bk then
+ return ak < bk
+ else
+ return false
+ end
+ end
+
+ local revsorter = function(a,b)
+ return keysorter(b,a)
+ end
+
+ local currentbtxciteauthor = function()
+ context.currentbtxciteauthor()
+ return true -- needed?
+ end
+
+ local function authorcompressor(found,specification)
+ -- HERE
+ if specification.sorttype == v_normal then
+ sort(found,keysorter)
+ elseif specification.sorttype == v_reverse then
+ sort(found,revsorter)
+ end
+ local result = { }
+ local entries = { }
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.authorhash
+ if author then
+ local aentries = entries[author]
+ if aentries then
+ aentries[#aentries+1] = entry
+ else
+ entries[author] = { entry }
+ end
+ end
+ end
+ -- beware: we use tables as hash so we get a cycle when inspecting (unless we start
+ -- hashing with strings)
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.authorhash
+ if author then
+ local aentries = entries[author]
+ if not aentries then
+ result[#result+1] = entry
+ elseif aentries == true then
+ -- already done
+ else
+ result[#result+1] = entry
+ entry.entries = aentries
+ entries[author] = true
+ end
+ end
+ end
+ return result
+ end
+
+ local function authorconcat(target,key,setup)
+ ctx_btxstartsubcite(setup)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ unknowncite(tag)
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ local tag = entry.tag
+ local currentcitation = markcite(entry.dataset,tag)
+ ctx_btxstartciteauthor()
+ ctx_btxsettag(tag)
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ ctx_btxsetinternal(bl and bl.references.internal or "")
+ if first then
+ ctx_btxsetfirst(first[key] or "") -- f_missing(first.tag))
+ local suffix = entry.suffix
+ local last = entry.last
+ local value = last and last[key]
+ if value then
+ ctx_btxsetsecond(value)
+ end
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ else
+ local suffix = entry.suffix
+ local value = entry[key] or "" -- f_missing(tag)
+ ctx_btxsetfirst(value)
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ end
+ ctx_btxsetconcat(concatstate(i,nofcollected))
+ if trace_detail then
+ report("expanding %a cite setup %a","multiple author",setup)
+ end
+ ctx_btxsubcitesetup(setup)
+ ctx_btxstopciteauthor()
+ end
+ end
+ ctx_btxstopsubcite()
+ end
+
+ local function authorsingle(entry,key,setup)
+ ctx_btxstartsubcite(setup)
+ ctx_btxstartciteauthor()
+ local tag = entry.tag
+ ctx_btxsettag(tag)
+ -- local currentcitation = markcite(entry.dataset,tag)
+ -- ctx_btxsetbacklink(currentcitation)
+ -- local bl = listtocite[currentcitation]
+ -- ctx_btxsetinternal(bl and bl.references.internal or "")
+ ctx_btxsetfirst(entry[key] or "") -- f_missing(tag)
+ if suffix then
+ ctx_btxsetsuffix(entry.suffix)
+ end
+ if trace_detail then
+ report("expanding %a cite setup %a","single author",setup)
+ end
+ ctx_btxcitesetup(setup)
+ ctx_btxstopciteauthor()
+ ctx_btxstopsubcite()
+ end
+
+ local partialinteractive = false
+
+ local function authorgetter(first,last,key,specification) -- only first
+ -- ctx_btxsetfirst(first.author) -- unformatted
+ -- ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ if first.type == "author" then
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ else
+ ctx_btxsetfirst(first.author) -- unformatted
+ end
+ local entries = first.entries
+ -- alternatively we can use a concat with one ... so that we can only make the
+ -- year interactive, as with the concat
+ if partialinteractive and not entries then
+ entries = { first }
+ end
+ if entries then
+ -- happens with year
+ local c = compresslist(entries,specification)
+ local f = function() authorconcat(c,key,specification.setup or "author") return true end -- indeed return true?
+ ctx_btxsetcount(#c)
+ ctx_btxsetsecond(f)
+ elseif first then
+ -- happens with num
+ local f = function() authorsingle(first,key,specification.setup or "author") return true end -- indeed return true?
+ ctx_btxsetcount(0)
+ ctx_btxsetsecond(f)
+ end
+ return true
+ end
+
+ -- author
+
+ local function setter(data,dataset,tag,entry)
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.sortkey = text and lpegmatch(numberonly,text)
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ end
+
+ local function getter(first,last,_,specification)
+ if first.type == "author" then
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ else
+ ctx_btxsetfirst(first.author) -- unformatted
+ end
+ return true
+ end
+
+ function citevariants.author(presets)
+ processcite(presets,{
+ variant = "author",
+ setup = "author",
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ -- authornum
+
+ local function setter(data,dataset,tag,entry)
+ local entries = entry.entries
+ local text = entries and entries.text or "?"
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ data.num = text
+ data.sortkey = text and lpegmatch(numberonly,text)
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"num",specification)
+ return true
+ end
+
+ function citevariants.authornum(presets)
+ processcite(presets,{
+ variant = "authornum",
+ setup = "author:num",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ -- authoryear | authoryears
+
+ local function setter(data,dataset,tag,entry)
+ data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
+ local year = getfield (dataset,tag,"year")
+ local suffix = getdetail(dataset,tag,"authorsuffix")
+ data.year = year
+ data.suffix = suffix
+ data.sortkey = tonumber(year) or 9999
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"year",specification)
+ return true
+ end
+
+ function citevariants.authoryear(presets)
+ processcite(presets,{
+ variant = "authoryear",
+ setup = "author:year",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ local function getter(first,last,_,specification)
+ authorgetter(first,last,"year",specification)
+ return true
+ end
+
+ function citevariants.authoryears(presets)
+ processcite(presets,{
+ variant = "authoryears",
+ setup = "author:years",
+ numeric = true,
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
+
+ end
+
+end
+
+-- List variants
+
+do
+
+ local listvariants = { }
+ publications.listvariants = listvariants
+
+ local function btxlistvariant(dataset,block,tag,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tag,variant,tonumber(listindex) or 0)
+ end
+ end
+
+ implement {
+ name = "btxlistvariant",
+ actions = btxlistvariant,
+ arguments = { "string", "string", "string", "string", "string" } -- not integer here
+ }
+
+ function listvariants.default(dataset,block,tag,variant)
+ ctx_btxsetfirst("?")
+ if trace_detail then
+ report("expanding %a list setup %a","default",variant)
+ end
+ ctx_btxnumberingsetup("default")
+ end
+
+ function listvariants.num(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(listindex)
+ if trace_detail then
+ report("expanding %a list setup %a","num",variant)
+ end
+ ctx_btxnumberingsetup(variant or "num")
+ end
+
+ -- listvariants[v_yes] = listvariants.num
+
+ function listvariants.index(dataset,block,tag,variant,listindex)
+ local index = getdetail(dataset,tag,"index")
+ ctx_btxsetfirst(index or "?")
+ if trace_detail then
+ report("expanding %a list setup %a","index",variant)
+ end
+ ctx_btxnumberingsetup(variant or "index")
+ end
+
+ function listvariants.tag(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(tag)
+ if trace_detail then
+ report("expanding %a list setup %a","tag",variant)
+ end
+ ctx_btxnumberingsetup(variant or "tag")
+ end
+
+ function listvariants.short(dataset,block,tag,variant,listindex)
+ local short = getdetail(dataset,tag,"shorthash")
+ local suffix = getdetail(dataset,tag,"shortsuffix")
+ if short then
+ ctx_btxsetfirst(short)
+ end
+ if suffix then
+ ctx_btxsetsuffix(suffix)
+ end
+ if trace_detail then
+ report("expanding %a list setup %a","short",variant)
+ end
+ ctx_btxnumberingsetup(variant or "short")
+ end
+
+ function listvariants.page(dataset,block,tag,variant,listindex)
+ local rendering = renderings[dataset]
+ local specification = rendering.list[listindex]
+ for i=3,#specification do
+ local backlink = tonumber(specification[i])
+ if backlink then
+ local citation = citetolist[backlink]
+ if citation then
+ local references = citation.references
+ if references then
+ local internal = references.internal
+ local realpage = references.realpage
+ if internal and realpage then
+ ctx_btxsetconcat(i-2)
+ ctx_btxsetfirst(realpage)
+ ctx_btxsetsecond(backlink)
+ if trace_detail then
+ report("expanding %a list setup %a","page",variant)
+ end
+ ctx_btxlistsetup(variant)
+ end
+ end
+ end
+ end
+ end
+ end
+
+end
+
+-- a helper
+
+do
+
+ -- local context = context
+ -- local lpegmatch = lpeg.match
+ local splitter = lpeg.tsplitat(":")
+
+ interfaces.implement {
+ name = "checkinterfacechain",
+ arguments = { "string", "string" },
+ actions = function(str,command)
+ local chain = lpegmatch(splitter,str)
+ if #chain > 0 then
+ local command = context[command]
+ local parent = ""
+ local child = chain[1]
+ command(child,parent)
+ for i=2,#chain do
+ parent = child
+ child = child .. ":" .. chain[i]
+ command(child,parent)
+ end
+ end
+ end
+ }
+
+end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
new file mode 100644
index 000000000..233734bb6
--- /dev/null
+++ b/tex/context/base/publ-ini.mkiv
@@ -0,0 +1,1813 @@
+%D \module
+%D [ file=publ-ini,
+%D version=2013.05.12,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% TODO: s! vs v! for default and neutral key/values
+% todo: too many refs in list
+
+% todo: no need for all these %'s
+
+% todo: tagging
+% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
+% todo: \v!cite => \s!cite
+% todo: interface with (ml)bibtex (export -> call -> import)
+% todo: check if 'all' etc are ok ... either use list or use other criterium
+% todo: \the\everysetupbtxciteplacement probably too often
+
+% \definecolor[btx:field] [darkred]
+% \definecolor[btx:crossref][darkblue]
+% \definecolor[btx:key] [darkgreen]
+% \definecolor[btx:todo] [darkyellow]
+
+%D We operate on several axis:
+%D
+%D \startitemize[packed]
+%D \startitem we can have several databases (or combinations) \stopitem
+%D \startitem we can add entries to them if needed (coded in tex) \stopitem
+%D \startitem we can have several lists each using one of the databases \stopitem
+%D \startitem we can render each list or citation independently \stopitem
+%D \stopitemize
+%D
+%D We assume that the rendering of a list entry is consistent in a document,
+%D although one can redefine properties if needed. Adding more granularity would
+%D complicate the user interface beyond comprehension.
+
+\writestatus{loading}{ConTeXt Publication Support / Initialization}
+
+\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-ini}{1.001}
+\registerctxluafile{publ-sor}{1.001}
+\registerctxluafile{publ-aut}{1.001}
+\registerctxluafile{publ-usr}{1.001}
+\registerctxluafile{publ-oth}{1.001} % this could become an option
+\registerctxluafile{publ-fnd}{1.001} % new method (for the moment only local)
+\registerctxluafile{publ-jrn}{1.001}
+\registerctxluafile{publ-reg}{1.001}
+
+\unprotect
+
+\startcontextdefinitioncode
+
+\def\s!btx {btx}
+
+\def\v!btxcite {btxcite}
+\def\v!btxlist {btxlist}
+\def\v!btxrendering {btxrendering}
+
+\def\s!btxset {btxset}
+\def\s!btxref {btxref}
+\def\s!btxint {btxint}
+\def\s!btxbck {btxbck}
+\def\s!btxltx {btxltx}
+\def\s!btxrtx {btxrtx}
+\def\s!btxatx {btxatx}
+\def\s!btxbtx {btxbtx}
+\def\s!btxspc {btxspc}
+\def\s!btxlst {btxlst}
+\def\s!btxcom {btxcom}
+
+\definelabelclass[btxlabel][2]
+
+% It is not that trivial to come up with a proper organization of setup
+% and control commands for publications. This is because we have complex
+% inline as well as extensive list rendering. The rules are partially
+% driven by somewhat archaic bibtex specifications and evolving journal
+% (or field) specific demands. The logic in the standards is often so
+% complex that it looks like manual rendering is assumed. But, we want to
+% automate the process as much as possible.
+%
+% Another complication is that in manuals we want to demonstrate different
+% properties of the implementation and therefore we need a way to handle
+% independent standards, databases, etc. This has resulted in the following
+% organization:
+%
+% - general setup (rather minimal)
+% - list setup (rendering)
+% - cite setup
+% - dataset setup
+%
+% The rendering is mostly driven by setups. In there we can call for fields
+% in the database but also for virtual fields or combinations.
+
+% The main rendering style (standard driven).
+
+%D We assume that a specification is global or used grouped. It doesn't make much sense
+%D to split between cite and list here as it only complicates matters (timing) and is
+%D not clear either.
+
+\let\currentbtxspecification\empty
+
+\unexpanded\def\startbtxrenderingdefinitions[#1]%
+ {\unprotect
+ \pushmacro\currentbtxspecification
+ \edef\currentbtxspecification{#1}}
+
+\unexpanded\def\stopbtxrenderingdefinitions
+ {\popmacro\currentbtxspecification
+ \protect}
+
+\unexpanded\def\loadbtxdefinitionfile [#1]{\clf_btxloaddefinitionfile {#1}}
+\unexpanded\def\loadbtxreplacementfile[#1]{\clf_btxloadreplacementfile{#1}}
+
+\unexpanded\def\publ_specification_push#1%
+ {\pushmacro\currentbtxspecification
+ \pushmacro\currentbtxspecificationfallback
+ \edef\currentbtxspecification{#1}%
+ \edef\currentbtxspecificationfallback{\namedbtxparameter\currentbtxspecification\c!default}%
+ \ifx\currentbtxspecificationfallback\currentbtxspecification
+ \let\currentbtxspecificationfallback\empty
+ \fi
+ \clf_btxsetspecification{\currentbtxspecification}}
+
+\unexpanded\def\publ_specification_pop
+ {\popmacro\currentbtxspecificationfallback
+ \popmacro\currentbtxspecification
+ \clf_btxsetspecification{\currentbtxspecification}}
+
+\unexpanded\def\publ_specification_set#1% beware: is global
+ {\edef\currentbtxspecification{#1}%
+ \edef\currentbtxspecificationfallback{\namedbtxparameter\currentbtxspecification\c!default}%
+ \ifx\currentbtxspecificationfallback\currentbtxspecification
+ \let\currentbtxspecificationfallback\empty
+ \fi
+ % has to be done explicitly: \loadbtxdefinitionfile[\currentbtxspecification]%
+ \ifx\currentbtxspecification\empty
+ % we set default at the end
+ \else
+ \clf_btxsetspecification{\currentbtxspecification}%
+ \fi}% todo: ,true == also load
+
+\installcorenamespace {btx}
+
+\installswitchcommandhandler \??btx {btx} \??btx
+
+% because we have lots of setups we provide a checker for sloppy users
+
+\unexpanded\def\btx_check_chain#1#2#3%
+ {\doifelsesomething{#3}
+ {\writestatus{btx #1}{defining\space"#2"\space as\space descendant\space of\space"#3"}% we're in definition regime (no space)
+ \definebtx[#2][#3]}
+ {\writestatus{btx #1}{defining\space"#2"}%
+ \definebtx[#2]}}
+
+% \unexpanded\def\btxcheckdefine#1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain{define}{#1}{#2}}}
+% \unexpanded\def\btxchecksetup #1#2{\doifelsecommandhandler\??btx{#1}\donothing{\btx_check_chain {setup}{#1}{#2}}}
+
+\unexpanded\def\btxcheckdefine#1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain{define}{#1}}} % {#2}
+\unexpanded\def\btxchecksetup #1{\doifelsecommandhandler\??btx{#1}\gobbleoneargument{\btx_check_chain {setup}{#1}}} % {#2}
+
+% fpr the moment experimental:
+
+\unexpanded\def\btxenableautodefine
+ {\prependtoks
+ \clf_checkinterfacechain{\currentbtx}{btxcheckdefine}%
+ \to \everydefinebtx
+ \prependtoks
+ \ifnum\btxsetupmode=\doingrootsetupnamed
+ \clf_checkinterfacechain{\currentbtx}{btxchecksetup}%
+ \fi
+ \to \everysetupbtx
+ \let\btxenableautodefine\relax}
+
+\appendtoks
+ \ifnum\btxsetupmode=\doingrootsetuproot
+ \publ_specification_set{\btxparameter\c!specification}%
+ \else\ifnum\btxsetupmode=\doingrootsetupnamed
+ \doifelsecommandhandler\??btx\currentbtx
+ {\publ_specification_set{\btxparameter\c!specification}}%
+ {}% maybe a warning
+ \fi\fi
+\to \everysetupbtx
+
+\appendtoks
+ \ifnum\btxsetupmode=\doingrootsetuproot
+ \edef\currentbtxdataset{\clf_btxsetdataset{\btxparameter\c!dataset}{\currentbtxdataset}}%
+ \fi
+\to \everysetupbtx
+
+\appendtoks
+ \publ_specification_set{\btxparameter\c!specification}%
+\to \everyjob
+
+\unexpanded\def\startusingbtxspecification[#1]%
+ {\publ_specification_push{#1}}
+
+\let\stopusingbtxspecification\publ_specification_pop
+
+% \setupbtxlist[alternative=paragraph,width=auto,distance=\emwidth]
+% \setupbtxlist[alternative=paragraph,width=auto,distance=\emwidth,margin=2em] % useless
+% \setupbtxlist[alternative=paragraph,width=fit,distance=\emwidth]
+% \setupbtxlist[alternative=paragraph,width=fit,distance=\emwidth,margin=2em]
+
+% here starts the bib stuff
+
+\installcorenamespace {btxdataset}
+\installcorenamespace {btxrendering}
+\installcorenamespace {btxregister}
+\installcorenamespace {btxcommand}
+\installcorenamespace {btxrenderingdefinition}
+
+\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
+\installcommandhandler \??btxregister {btxregister} \??btxregister
+\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+
+\let\currentbtxcitealternative\empty
+
+\let\currentbtxspecificationfallback\empty
+
+\unexpanded\def\setbtxparameterset#1#2%
+ {\edef\currentbtx
+ {\ifcsname\??btx\currentbtxspecification:#1:#2:\s!parent\endcsname
+ \currentbtxspecification:%
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:#1:#2:\s!parent\endcsname
+ \currentbtxspecificationfallback:%
+ \fi\fi\fi#1:#2}}
+
+\unexpanded\def\setbtxparametersetroot#1%
+ {\edef\currentbtx
+ {\ifcsname\??btx\currentbtxspecification:#1:\s!parent\endcsname
+ \currentbtxspecification:#1%
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:#1:\s!parent\endcsname
+ \currentbtxspecificationfallback:#1%
+ \fi\fi\fi}}
+
+\unexpanded\def\setbtxrendering
+ {\edef\currentbtxrendering
+ {\ifcsname\??btx\currentbtxspecification:\s!parent\endcsname
+ \currentbtxspecification
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:\s!parent\endcsname
+ \currentbtxspecificationfallback
+ \fi\fi\fi}}
+
+\unexpanded\def\setbtxlist % maybe simplify this one, always list=rendering?
+ {\edef\currentbtxlist
+ {\ifcsname\??btx\currentbtxrendering:\s!parent\endcsname
+ \currentbtxrendering
+ \else\ifcsname\??btx\currentbtxspecification:\s!parent\endcsname
+ \currentbtxspecification
+ \else\ifx\currentbtxspecificationfallback\empty
+ \else\ifcsname\??btx\currentbtxspecificationfallback:\s!parent\endcsname
+ \currentbtxspecificationfallback
+ \fi\fi\fi\fi}%
+ \edef\currentlist{\s!btx:\currentbtxlist}}
+
+\unexpanded\def\usebtxdataset
+ {\begingroup
+ \dotripleempty\publ_use_dataset}
+
+\def\publ_use_dataset[#1][#2][#3]%
+ {\getdummyparameters[\c!specification=\currentbtxspecification,#3]%
+ \ifsecondargument
+ \clf_btxusedataset
+ specification {\dummyparameter\c!specification}%
+ dataset {#1}%
+ filename {#2}%
+ \relax
+ \else\iffirstargument
+ \clf_btxusedataset
+ specification {\dummyparameter\c!specification}%
+ dataset {\v!default}%
+ filename {#1}%
+ \relax
+ \fi\fi
+ \endgroup}
+
+\definebtxdataset
+ [\v!default]
+% [\c!language=] % nothing set so use current
+
+% \usebtxdataset
+% [default]
+% [mybibs.bib]
+
+\let\startpublication\relax
+\let\stoppublication \relax
+
+\unexpanded\def\startpublication
+ {\dodoubleempty\publ_set_publication}
+
+\def\publ_set_publication[#1][#2]%
+ {\begingroup
+ \catcode\commentasciicode\othercatcode
+ \ifsecondargument
+ \expandafter\publ_set_publication_indeed
+ \else\iffirstargument
+ \doubleexpandafter\publ_set_publication_checked
+ \else
+ \doubleexpandafter\publ_set_publication_default
+ \fi\fi{#1}{#2}}
+
+\def\publ_set_publication_default#1#2%
+ {\publ_set_publication_indeed\v!default{#1}}
+
+\def\publ_set_publication_checked#1#2%
+ {\doifelseassignment{#1}
+ {\publ_set_publication_indeed\v!default{#1}}
+ {\publ_set_publication_indeed{#1}{}}}
+
+\def\publ_set_publication_indeed#1#2#3\stoppublication
+ {\clf_btxaddentry{#1}{#2}{\detokenize{#3}}%
+ \endgroup
+ \ignorespaces}
+
+% commands
+
+\unexpanded\def\btxcommand#1%
+ {\ifcsname\??btxcommand#1\endcsname
+ \expandafter\publ_command_yes
+ \else
+ \expandafter\publ_command_nop
+ \fi{#1}}
+
+\let\btxcmd\btxcommand
+
+\def\publ_command_yes#1%
+ {\csname\??btxcommand#1\endcsname}
+
+\def\publ_command_nop#1%
+ {\ifcsname#1\endcsname
+ \showmessage\m!publications{10}{#1,#1}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
+ \else\ifcsname\utfupper{#1}\endcsname
+ \showmessage\m!publications{10}{#1}{\utfupper{#1}}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
+ \else
+ \showmessage\m!publications{11}{#1}%
+ \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
+ \fi\fi
+ \publ_command_yes{#1}}
+
+\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
+ {\setuvalue{\??btxcommand\strippedcsname#1}}%
+
+% tracing
+
+\installtextracker
+ {publications.crosslinks}
+ {\let\btx_trace_list_cross\strc_references_tracer}
+ {\let\btx_trace_list_cross\gobbletwoarguments}
+
+\let\btx_trace_list_cross\gobbletwoarguments
+
+% access
+
+\let\currentbtxtag \empty
+\let\currentbtxdataset\v!default
+
+\unexpanded\def\setbtxentry[#1]% or maybe btxsetentry
+ {\edef\currentbtxtag{\clf_btxsetentry{\currentbtxdataset}{#1}}}
+
+% \let\btxsetdataset\setbtxdataset
+% \let\btxsetentry \setbtxentry
+
+% todo: no need for the currents as we can keep them at the lua end so we will have
+%
+% \btxfield : current
+% \btxspecificfield : dataset,tag,key
+
+\def\btxfield #1{\clf_btxfield {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdetail #1{\clf_btxdetail {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxflush #1{\clf_btxflush {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdirect #1{\clf_btxdirect {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfieldname #1{\clf_btxfieldname {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfieldtype #1{\clf_btxfieldtype {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfoundname #1{\clf_btxfoundname {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxfoundtype #1{\clf_btxfoundtype {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxauthorfield#1{\clf_btxauthorfield \currentbtxauthorindex{#1}}
+\def\btxdoifelse #1{\clf_btxdoifelse {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdoif #1{\clf_btxdoif {\currentbtxdataset}{\currentbtxtag}{#1}}
+\def\btxdoifnot #1{\clf_btxdoifnot {\currentbtxdataset}{\currentbtxtag}{#1}}
+
+\let\btxsetup\fastsetup
+
+%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
+%D with another one? I think this assumption is okay. For manuals we might want to
+%D mix but we can work around it.
+
+%D Rendering.
+
+\unexpanded\def\btxspace {\removeunwantedspaces\space}
+\unexpanded\def\btxnobreakspace {\removeunwantedspaces\nobreakspace} % these two are
+\unexpanded\def\btxnbsp {\removeunwantedspaces\nbsp} % the same anyway
+\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
+\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
+\unexpanded\def\btxcommabreak {\removeunwantedspaces,\hskip\zeropoint plus .5\emwidth\relax}
+\unexpanded\def\btxcolon {\removeunwantedspaces:\space}
+\unexpanded\def\btxsemicolon {\removeunwantedspaces;\space}
+\unexpanded\def\btxlparent {\removeunwantedspaces\space(} % obsolete
+\unexpanded\def\btxrparent {\removeunwantedspaces)\space} % obsolete
+\unexpanded\def\btxleftparenthesis {\removeunwantedspaces\space(}
+\unexpanded\def\btxrightparenthesis {\removeunwantedspaces)\space}
+\unexpanded\def\btxrightparenthesisperiod{\removeunwantedspaces).\space}
+\unexpanded\def\btxrightparenthesiscomma {\removeunwantedspaces),\space}
+\unexpanded\def\btxleftbracket {\removeunwantedspaces\space[}
+\unexpanded\def\btxrightbracket {\removeunwantedspaces]\space}
+\unexpanded\def\btxrightbracketperiod {\removeunwantedspaces].\space}
+\unexpanded\def\btxrightbracketcomma {\removeunwantedspaces],\space}
+
+%D Variables:
+
+\let\currentbtxbacklink \empty \unexpanded\def\btxsetbacklink {\def\currentbtxbacklink}
+\let\currentbtxbacktrace \empty \unexpanded\def\btxsetbacktrace {\def\currentbtxbacktrace}
+\let\currentbtxcategory \empty \unexpanded\def\btxsetcategory {\def\currentbtxcategory}
+\let\currentbtxcombis \empty \unexpanded\def\btxsetcombis {\def\currentbtxcombis}
+\let\currentbtxdataset \empty \unexpanded\def\btxsetdataset {\def\currentbtxdataset}
+\let\currentbtxfirst \empty \unexpanded\def\btxsetfirst {\def\currentbtxfirst}
+\let\currentbtxsecond \empty \unexpanded\def\btxsetsecond {\def\currentbtxsecond}
+%let\currentbtxthird \empty \unexpanded\def\btxsetthird {\def\currentbtxthird}
+\let\currentbtxsuffix \empty \unexpanded\def\btxsetsuffix {\def\currentbtxsuffix}
+\let\currentbtxinternal \empty \unexpanded\def\btxsetinternal {\def\currentbtxinternal}
+\let\currentbtxlefttext \empty \unexpanded\def\btxsetlefttext {\def\currentbtxlefttext}
+\let\currentbtxrighttext \empty \unexpanded\def\btxsetrighttext {\def\currentbtxrighttext}
+\let\currentbtxbefore \empty \unexpanded\def\btxsetbefore {\def\currentbtxbefore}
+\let\currentbtxafter \empty \unexpanded\def\btxsetafter {\def\currentbtxafter}
+\let\currentbtxlanguage \empty \unexpanded\def\btxsetlanguage {\def\currentbtxlanguage}
+\let\currentbtxtag \empty \unexpanded\def\btxsettag {\def\currentbtxtag}
+\let\currentbtxnumber \empty \unexpanded\def\btxsetnumber {\def\currentbtxnumber}
+\let\currentbtxauthorvariant\v!normal \unexpanded\def\btxsetauthorvariant{\def\currentbtxauthorvariant}
+
+\let\currentbtxfirstnames \empty \unexpanded\def\btxsetfirstnames{\let\currentbtxfirstnames\currentbtxfirstnames_indeed}
+\let\currentbtxinitials \empty \unexpanded\def\btxsetinitials {\let\currentbtxinitials \currentbtxinitials_indeed }
+\let\currentbtxjuniors \empty \unexpanded\def\btxsetjuniors {\let\currentbtxjuniors \currentbtxjuniors_indeed }
+\let\currentbtxsurnames \empty \unexpanded\def\btxsetsurnames {\let\currentbtxsurnames \currentbtxsurnames_indeed }
+\let\currentbtxvons \empty \unexpanded\def\btxsetvons {\let\currentbtxvons \currentbtxvons_indeed }
+
+\newconstant\currentbtxoverflow \unexpanded\def\btxsetoverflow #1{\currentbtxoverflow #1\relax}
+\newconstant\currentbtxconcat \unexpanded\def\btxsetconcat #1{\currentbtxconcat #1\relax}
+\newconstant\currentbtxcount \unexpanded\def\btxsetcount #1{\currentbtxcount #1\relax}
+\newconstant\currentbtxauthorindex %unexpanded\def\btxsetauthorindex#1{\currentbtxauthorindex#1\relax} % passed directly
+\newconstant\currentbtxauthorcount %unexpanded\def\btxsetauthorcount#1{\currentbtxauthorcount#1\relax} % passed directly
+\newconstant\currentbtxauthorstate \unexpanded\def\btxsetauthorstate#1{\currentbtxauthorstate#1\relax}
+
+\unexpanded\def\currentbtxfirstnames_indeed{\clf_btxcurrentfirstnames\numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxinitials_indeed {\clf_btxcurrentinitials \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxjuniors_indeed {\clf_btxcurrentjuniors \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxsurnames_indeed {\clf_btxcurrentsurnames \numexpr\currentbtxauthorindex\relax}
+\unexpanded\def\currentbtxvons_indeed {\clf_btxcurrentvons \numexpr\currentbtxauthorindex\relax}
+
+\let\currentbtxfirstpage \empty \unexpanded\def\btxsetfirstpage #1{\def\currentbtxfirstpage{\btx_page_number{#1}}}
+\let\currentbtxlastpage \empty \unexpanded\def\btxsetlastpage #1{\def\currentbtxlastpage {\btx_page_number{#1}}}
+\let\currentbtxfirstinternal\empty \unexpanded\def\btxsetfirstinternal {\def\currentbtxfirstinternal}
+\let\currentbtxlastinternal \empty \unexpanded\def\btxsetlastinternal {\def\currentbtxlastinternal}
+
+\def\currentbtxauthorvariant{normal}
+
+\unexpanded\def\btx_reset_list % not needed as we're grouped
+ {\let\currentbtxcombis \empty
+ \let\currentbtxcategory \empty
+ \let\currentbtxinternal \empty
+ \let\currentbtxlefttext \empty
+ \let\currentbtxrighttext\empty
+ \let\currentbtxbefore \empty
+ \let\currentbtxafter \empty
+ \let\currentbtxbacklink \empty
+ \let\currentbtxbacktrace\empty
+ \let\currentbtxlanguage \empty
+ \let\currentbtxtag \empty
+ \let\currentbtxsuffix \empty
+ \let\currentbtxnumber \empty
+ \let\currentbtxdataset \empty}
+
+\unexpanded\def\btx_reset_cite % check for less .. not all resets needed when we're grouped (only subcites)
+ {\let \currentbtxfirst \empty
+ \let \currentbtxsecond \empty
+ \let \currentbtxsuffix \empty
+ \let \currentbtxinternal \empty
+ \let \currentbtxlefttext \empty
+ \let \currentbtxrighttext \empty
+ \let \currentbtxbefore \empty
+ \let \currentbtxafter \empty
+ \let \currentbtxbacklink \empty
+ \let \currentbtxbacktrace \empty % not used here
+ \let \currentbtxlanguage \empty
+ \let \currentbtxdataset \empty
+ \let \currentbtxtag \empty
+ \let \currentbtxnumber \empty
+ \setconstant\currentbtxoverflow \zerocount
+ \setconstant\currentbtxconcat \zerocount
+ \setconstant\currentbtxcount \zerocount}
+
+\unexpanded\def\btx_reset_page % probably not needed
+ {\let \currentbtxfirstpage \empty
+ \let \currentbtxlastpage \empty
+ \let \currentbtxfirstinternal\empty
+ \let \currentbtxlastinternal \empty
+ \setconstant\currentbtxoverflow \zerocount
+ \setconstant\currentbtxconcat \zerocount
+ \setconstant\currentbtxcount \zerocount}
+
+\unexpanded\def\btx_reset_numbering % probably not needed
+ {\let \currentbtxfirst \empty
+ \let \currentbtxsecond\empty
+ \let \currentbtxsuffix\empty
+ \setconstant\currentbtxconcat\zerocount}
+
+%D Pages:
+
+\unexpanded\def\btx_page_number#1%
+ {\def\currentlistindex{#1}%
+ \structurelistpagenumber}
+
+%D Language:
+
+\def\mainbtxlanguage{\currentmainlanguage}
+
+\unexpanded\def\btx_check_language
+ {\let\mainbtxlanguage\currentlanguage
+ \ifx\currentbtxlanguage\empty
+ \let\currentbtxlanguage\currentlanguage
+ \else
+ \btx_check_language_indeed
+ \fi}
+
+\unexpanded\def\btx_check_language_indeed
+ {\edef\currentbtxlanguage{\reallanguagetag\currentbtxlanguage}%
+ \ifx\currentbtxlanguage\empty
+ \let\currentbtxlanguage\currentlanguage
+ \else\ifx\currentbtxlanguage\currentlanguage\else
+ \setcurrentlanguage\currentmainlanguage\currentbtxlanguage
+ \fi\fi}
+
+%D Tracing
+
+\newconditional\c_btx_trace % not used yet
+
+\installtextracker
+ {btxrendering}
+ {\settrue \c_btx_trace}
+ {\setfalse\c_btx_trace}
+
+%D Rendering lists and citations.
+
+\unexpanded\def\btxtodo#1%
+ {[#1]}
+
+%D Lists:
+
+\newdimen\d_publ_number_width
+
+\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
+\ifdefined\btxcitecounter \else \newcount\btxcitecounter \fi % maybe pass this to lua
+
+\newtoks \everysetupbtxlistplacement % name will change
+\newtoks \everysetupbtxciteplacement % name will change
+
+\definelist % only used for selecting
+ [\s!btx]
+
+\setuplist
+ [\s!btx]
+ [\c!prefixstopper=:,
+ \c!state=\v!start,
+ \c!alternative=a,
+ %\c!alternative=\v!paragraph,
+ %\c!width=\v!auto,
+ %\c!distance=\emwidth,
+ \c!before=\blank,
+ \c!after=\blank]
+
+\unexpanded\def\setupbtxlist
+ {\dodoubleempty\publ_setup_list}
+
+\unexpanded\def\publ_setup_list[#1][#2]%
+ {\ifsecondargument
+ \setuplist[\s!btx:#1][#2]%
+ \else\iffirstargument
+ \setuplist[\s!btx][#1]%
+ \fi\fi}
+
+\appendtoks
+ \ifx\currentbtxrenderingparent\empty
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx]%
+ \else\ifx\currentbtxrenderingparent\s!btx
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx]%
+ \else
+ \definelist
+ [\s!btx:\currentbtxrendering]%
+ [\s!btx:\currentbtxrenderingparent]%
+ \fi\fi
+\to \everydefinebtxrendering
+
+\newconditional\c_btx_list_texts
+
+\appendtoks
+ \doifelse{\btxrenderingparameter\c!textstate}\v!start
+ \settrue\setfalse\c_btx_list_texts
+\to \everysetupbtxlistplacement
+
+\newconditional\c_btx_list_pages
+
+\appendtoks
+ \doifelse{\btxrenderingparameter\c!pagestate}\v!start
+ \settrue\setfalse\c_btx_list_pages
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btx_entry_inject_pages % for the moment only normal
+ {\dontleavehmode
+ \begingroup
+ \setbtxlist % probably already set
+ \btx_reset_page
+ \setbtxparameterset\s!list\s!page
+ \btxparameter\c!command
+ {\usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \clf_btxflushpages{\currentbtxdataset}{\currentbtxtag}%
+ \btxparameter\c!right}%
+ \endgroup}
+
+\unexpanded\def\btxpagesetup#1% there will b eno left|right|command|style at this inner level
+ {\begingroup
+ \publ_fast_setup\plusfive\s!list\s!page
+ \endgroup
+ \btx_reset_page} % probably not needed
+
+\unexpanded\def\btxnumberingsetup#1%
+ {\begingroup
+ \setbtxparameterset{\c!list:\s!numbering}\currentbtxnumbering % brrrr \setbtxlist
+ \btxparameter\c!left
+ % \btxparameter\c!command{\publ_fast_setup\plusthree{\s!list:\s!numbering}{#1}}%
+ \publ_fast_setup\plusthree{\s!list:\s!numbering}{#1}%
+ \btxparameter\c!right
+ \endgroup
+ \btx_reset_numbering} % probably not needed
+
+% end of page stuff
+
+\unexpanded\def\btx_entry_inject
+ {\begingroup
+ \redoconvertfont % see (**) in strc-lst, this will become an configuration option
+ \edef\currentbtxcategory{\btxfield{category}}%
+ \ignorespaces
+ \ifconditional\c_btx_list_texts
+ \currentbtxbefore
+ \fi
+ \begingroup
+ \usebtxstyleandcolor\c!style\c!color
+ \ignorespaces
+ \publ_fast_setup\plusfour\s!list\currentbtxcategory
+ \removeunwantedspaces
+ \endgroup
+ \ifx\currentbtxcombis\empty \else
+ \btxrenderingparameter\c!separator
+ % maybe move this loop to lua
+ \begingroup
+ \processcommacommand[\currentbtxcombis]\btx_entry_inject_combi
+ \endgroup
+ \fi
+ \ifconditional\c_btx_list_pages
+ \btx_entry_inject_pages
+ \fi
+ \ifconditional\c_btx_list_texts
+ \currentbtxafter
+ \fi
+ \endgroup}
+
+\unexpanded\def\btxshowentryinline
+ {\dodoubleempty\btx_entry_show_inline}
+
+\unexpanded\def\btx_entry_show_inline[#1][#2]%
+ {\ifsecondargument
+ \ctxcommand{showbtxentry("#1","#2")}
+ \else\iffirstargument
+ \ctxcommand{showbtxentry("\currentbtxdataset","#1")}
+ \else
+ \ctxcommand{showbtxentry("\currentbtxdataset","\currentbtxtag")}
+ \fi\fi}
+
+\unexpanded\def\btxstartcombientry
+ {\begingroup}
+
+\unexpanded\def\btxstopcombientry
+ {\endgroup}
+
+\unexpanded\def\btxhandlecombientry
+ {\btx_reference_indeed}
+
+\def\btx_entry_inject_combi#1%
+ {\begingroup
+ \def\currentbtxtag{#1}%
+ \ignorespaces
+ \publ_fast_setup\plusfour\s!list\currentbtxcategory
+ \removeunwantedspaces
+ \endgroup}
+
+% uses reference when set
+
+% \def\btx_entry_inject_combi#1%
+% {\begingroup
+% \def\currentbtxtag{#1}%
+% \ignorespaces
+% \btxdoifelsecombiinlist\currentbtxdataset\currentbtxtag
+% {\clf_btxflushlistcombi{\currentbtxdataset}{\currentbtxtag}}
+% {\publ_fast_setup\plusfour\s!list\currentbtxcategory}%
+% \removeunwantedspaces
+% \endgroup}
+
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+
+\let\completelistofpublications\completebtxrendering
+\let\placelistofpublications \placebtxrendering
+
+\newtoks\everybtxlistrendering
+
+\appendtoks
+ \setbtxlist
+ %
+ \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}% \v!cite will become \s!cite
+ \ifx\currentbtxcriterium\empty
+ \let\currentbtxcriterium\v!previous
+ \else\ifx\currentbtxcriterium\v!cite
+ \let\currentbtxcriterium\v!here
+ \fi\fi
+ %
+ \iflocation
+ \letinteractionparameter\c!style\empty
+ \fi
+\to \everybtxlistrendering
+
+\def\nofbtxlistentries {0}
+\def\currentbtxlistentry{0}
+\def\currentbtxlistindex{0} % only for internal use (points back to big list)
+
+\newconditional\c_publ_prefixed
+
+\unexpanded\def\btxsetnoflistentries #1{\edef\nofbtxlistentries {#1}}
+\unexpanded\def\btxsetcurrentlistentry#1{\edef\currentbtxlistentry{#1}}
+\unexpanded\def\btxsetcurrentlistindex#1{\edef\currentbtxlistindex{#1}}
+
+\unexpanded\def\btxdoifelsesameaspreviouschecked#1#2% #1 == always | doublesided
+ {\clf_btxdoifelsesameasprevious
+ {\currentbtxdataset}%
+ \currentbtxlistentry%
+ {#2}%
+ \c_btx_list_reference
+ {#1}}
+
+\unexpanded\def\btxdoifelsesameasprevious
+ {\btxdoifelsesameaspreviouschecked\v!doublesided}
+
+\unexpanded\def\btxdoifelsecombiinlist#1#2%
+ {\clf_btxdoifelsecombiinlist{#1}{#2}}
+
+\let\btxdoifsameaspreviouscheckedelse\btxdoifelsesameaspreviouschecked
+\let\btxdoifsameaspreviouselse \btxdoifelsesameasprevious
+\let\btxdoifcombiinlistelse \btxdoifelsecombiinlist
+
+\def\publ_place_list_indeed#1[#2][#3]%
+ {\begingroup
+ \ifsecondargument
+ % [rendering] [settings]
+ \edef\currentbtxrendering{#2}%
+ \setupcurrentbtxrendering[#3]%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \fi
+ \else\iffirstargument
+ \doifelseassignment{#2}
+ {% [settings]
+ \let\currentbtxrendering\currentbtxspecification
+ \setupcurrentbtxrendering[#2]%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \let\currentbtxrendering\currentbtxspecification % tricky
+ \fi}
+ {\edef\currentbtxrendering{#2}%
+ \edef\p_specification{\btxrenderingparameter\c!specification}%
+ \ifx\p_specification\empty\else
+ \let\currentbtxspecification\p_specification
+ \fi}%
+ \else
+ \let\currentbtxrendering\currentbtxspecification
+ \fi\fi
+ \setbtxparameterset\currentbtxspecification\s!list
+ \the\everybtxlistrendering
+ \ifconditional#1\relax
+ \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
+ \ifx\currentbtxrenderingtitle\empty
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
+ \else
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
+ \fi
+ \fi
+ \ifx\currentbtxrendering\empty
+ \setbtxrendering % hm
+ \fi
+ \btxrenderingparameter\c!before
+ \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
+ \uselanguageparameter\btxdatasetparameter % new
+ \setbtxlist
+ \the\everystructurelist
+ \the\everysetupbtxlistplacement
+ \forgetall
+ % why not pass this with collect .. todo
+ % here we just collect items
+ \clf_btxcollectlistentries
+ names {\s!btx}%
+ criterium {\currentbtxcriterium}%
+ reference {\btxrenderingparameter\c!reference}%
+ method {\btxrenderingparameter\c!method}%
+ dataset {\currentbtxdataset}%
+ keyword {\btxrenderingparameter\c!keyword}%
+ sorttype {\btxrenderingparameter\c!sorttype}%
+ repeated {\btxrenderingparameter\c!repeat}%
+ ignored {\btxrenderingparameter\c!ignore}%
+ group {\btxrenderingparameter\c!group}%
+ \relax
+ \ifnum\nofbtxlistentries>\zerocount
+ \startpacked[\v!blank]%
+ % sorting and so
+ \clf_btxpreparelistentries{\currentbtxdataset}% could be put in collect
+ % next we analyze the width
+ \ifx\currentbtxnumbering\empty \else
+ \edef\p_width{\listparameter\c!width}%
+ \ifx\p_width\v!auto
+ \setbox\scratchbox\vbox \bgroup
+ \settrialtypesetting
+ \clf_btxfetchlistentries{\currentbtxdataset}%
+ \egroup
+ \d_publ_number_width\wd\scratchbox
+ \letlistparameter\c!width\d_publ_number_width
+ \fi
+ \fi
+ \doifelse{\listparameter\c!prefix}\v!yes\settrue\setfalse\c_publ_prefixed
+ % this actually typesets them, we loop here as otherwise the whole
+ % bunch gets flushed at once
+ \dorecurse\nofbtxlistentries
+ {\let\currentbtxlistentry\recurselevel
+ \clf_btxflushlistentry{\currentbtxdataset}\currentbtxlistentry\relax}%
+ \stoppacked
+ \fi
+ \btxrenderingparameter\c!after
+ \global\advance\btxblock\plusone
+ \ifconditional#1\relax
+ \stopnamedsection
+ \fi
+ \endgroup}
+
+\def\publ_place_list_complete{\publ_place_list_indeed\conditionaltrue}
+\def\publ_place_list_standard{\publ_place_list_indeed\conditionalfalse}
+
+\def\currentbtxblock{\number\btxblock}
+
+% called at the lua end, for determining the width
+
+\unexpanded\def\btxchecklistentry
+ {\begingroup
+ % todo, switch to font
+ \hbox{\btx_reference_checked}%
+ \par
+ \endgroup}
+
+% called at the lua end, the real rendering
+
+% we could have a yes and no where o nils the btx_reference_indeed ... saves a check there
+
+\installstructurelistprocessor{\s!btx}
+ {\let\currentlistentrynumber \btx_reference_indeed
+ \let\currentlistentrytitle \btx_entry_indeed
+ \let\currentlistentrypagenumber\btx_page_indeed
+ \strc_lists_apply_renderingsetup}
+
+\def\btx_entry_indeed
+ {\btx_list_reference_inject
+ \btx_entry_inject}
+
+\def\btx_page_indeed
+ {}
+
+\unexpanded\def\btxhandlelistentry
+ {\strc_lists_entry_process}
+
+\unexpanded\def\btxstartlistentry % maybe pass i
+ {\begingroup
+ \global\advance\c_btx_list_reference\plusone}
+
+\unexpanded\def\btxstoplistentry
+ {\iftrialtypesetting
+ \global\advance\c_btx_list_reference\minusone
+ \fi
+ \endgroup}
+
+\newtoks\everybtxlistentry
+
+\unexpanded\def\btxlistsetup#1% used for the reference in the list
+ {\the\everybtxlistentry
+ \everybtxlistentry\emptytoks % so only once per entry to be sure
+ \publ_fast_setup\plusfour\s!list{#1}}
+
+\appendtoks
+ \btx_check_language
+\to \everybtxlistentry
+
+\unexpanded\def\btx_reference_indeed
+ {\begingroup
+ % redundantm will go away:
+ \setbtxparameterset{\c!list:\s!numbering}\currentbtxnumbering
+ %
+ \ifx\currentbtxnumbering\empty
+ % nothing
+ \else\ifx\currentbtxnumbering\v!no
+ % nothing
+ \else
+ \usebtxstyleandcolor\c!style\c!color % new, needed?
+ \ifconditional\c_publ_prefixed\btxlistprefixednumber\fi
+ \clf_btxlistvariant % some can go
+ {\currentbtxdataset}%
+ {\currentbtxblock}%
+ {\currentbtxtag}%
+ {\currentbtxnumbering}%
+ {\currentbtxnumber}%
+ \relax
+ \fi\fi
+ \endgroup}
+
+\unexpanded\def\btxlistprefixednumber % hack but alan needs it
+ {\clf_listprefixednumber
+ {\currentlist}%
+ \currentbtxlistindex
+ {%
+ prefix {\listparameter\c!prefix}%
+ separatorset {\listparameter\c!prefixseparatorset}%
+ conversionset {\listparameter\c!prefixconversionset}%
+ starter {\listparameter\c!prefixstarter}%
+ stopper {\listparameter\c!prefixstopper}%
+ set {\listparameter\c!prefixset}%
+ segments {\listparameter\c!prefixsegments}%
+ connector {\listparameter\c!prefixconnector}%
+ }%
+ \relax}
+
+\unexpanded\def\btx_reference_checked
+ {\dontleavehmode\hbox\bgroup
+ \btx_reference_indeed
+ \egroup}
+
+\newcount\c_btx_list_reference
+
+\unexpanded\def\btx_list_reference_inject
+ {\dontleavehmode\begingroup % no box
+ \iftrialtypesetting\else
+ \btx_list_reference_inject_now
+ \fi
+ % \btx_reference_indeed % else double entry in list
+ \endgroup}
+
+\def\btx_list_reference_inject_now
+ {\btx_trace_list_cross\empty\currentbtxbacktrace
+ \strc_references_direct_full_user
+ {\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
+ \s!btxref=\currentbtxtag,%
+ \s!btxspc=\currentbtxspecification,%
+ \s!btxlst=\number\c_btx_list_reference,% check if needed
+ %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \ifx\currentbtxbacklink\currentbtxbacktrace\s!btxint=\currentbtxbacklink\else\s!btxbck=\currentbtxbacktrace\fi}%
+ {\s!btx::\v!list::\number\c_btx_list_reference}%
+ {\currentbtxnumber}}
+
+\newconditional\c_btx_cite_reference_injected
+
+\unexpanded\def\btx_cite_reference_inject
+ {\ifconditional\c_btx_cite_reference_injected
+ \else
+ \dontleavehmode
+ \iftrialtypesetting \else
+ \ifx\currentbtxbacklink\empty
+ % can be made empty when combining author / year
+ \else
+ \btx_cite_reference_inject_indeed
+ \settrue\c_btx_cite_reference_injected
+ \fi
+ \fi
+ \fi}
+
+\newtoks\t_btx_reference_inject
+
+\def\btx_cite_reference_inject_indeed
+ {\btx_trace_list_cross\currentbtxbacklink\empty
+ \the\t_btx_reference_inject
+ \strc_lists_inject_direct % todo: make like \btx_list_reference_inject_now with { }
+ [\s!btx]%
+ [\c!type=\s!btx]% \c!location=\v!none
+ [\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
+ \s!btxref=\currentbtxtag,%
+ %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \s!btxint=\number\currentbtxbacklink
+ \ifx\currentbtxciteuservariables\empty\else,\currentbtxciteuservariables\fi]}
+
+\def\currentbtxuservariable #1{\clf_btxuservariable {\currentbtxdataset}{#1}}
+\def\btxdoifelseuservariable#1{\clf_btxdoifelseuservariable{\currentbtxdataset}{#1}}
+
+\let\btxdoifuservariableelse\btxdoifelseuservariable
+
+\let\btxcitereference\btx_cite_reference_inject
+
+\let\currentbtxnumbering\empty
+
+\appendtoks
+ \edef\currentbtxnumbering{\btxrenderingparameter\c!numbering}%
+ \ifx\currentbtxnumbering\v!yes
+ \def\currentbtxnumbering{num}% convenient alias
+ \letbtxrenderingparameter\c!numbering\currentbtxnumbering
+ \letlistparameter\c!headnumber\v!always
+ \else\ifx\currentbtxnumbering\v!no
+ \letlistparameter\c!headnumber\v!no
+ \let\currentbtxnumbering\empty
+ % \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
+ \letlistparameter\c!symbol \v!none
+ \letlistparameter\c!aligntitle \v!yes
+ \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
+ \else
+ \letlistparameter\c!headnumber\v!always
+ \fi\fi
+ \let\currentlistmethod\s!btx
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btxremapauthor
+ {\dodoubleargument\btx_remap_author}
+
+\def\btx_remap_author[#1][#2]%
+ {\clf_btxremapauthor{#1}{#2}}
+
+\unexpanded\def\btxflushauthor
+ {\doifelsenextoptionalcs\btx_flush_author_yes\btx_flush_author_nop}
+
+\unexpanded\def\btxflushsuffix
+ {\ifx\currentbtxsuffix\empty
+ % nothing
+ \else
+ \characters{\currentbtxsuffix}% todo : rendering specific converter
+ \fi}
+
+\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
+\def\btx_flush_author_nop {\btx_flush_author{\btxparameter\c!authorconversion}}
+
+\unexpanded\def\btx_flush_author#1#2%
+ {\begingroup
+ \edef\currentbtxfield{#2}%
+ \setbtxparameterset\s!list\currentbtxfield
+% \let\currentbtxlistvariant\currentbtxfield
+ \clf_btxauthor
+ {\currentbtxdataset}%
+ {\currentbtxtag}%
+ {\currentbtxfield}%
+ {%
+ combiner {#1}%
+ kind {list}%
+ etallimit {\btxparameter\c!etallimit}%
+ etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
+ symbol {\btxparameter{\c!stopper:initials}}%
+ }%
+ \relax
+ \endgroup}
+
+% yes or no: maybe just \flushauthor{...}{...}
+
+\unexpanded\def\btxflushauthorname {\btx_flush_author{name}} % #1
+\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
+\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
+\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
+\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
+
+\unexpanded\def\currentbtxciteauthor % always author
+ {\begingroup
+ \setbtxparameterset\s!cite\s!author
+ \clf_btxauthor
+ {\currentbtxdataset}%
+ {\currentbtxtag}%
+ {\s!author}%
+ {%
+ combiner {\btxparameter\c!authorconversion}%
+ kind {cite}%
+ etallimit {\btxparameter\c!etallimit}%
+ etaldisplay {\btxparameter\c!etaldisplay}%
+ etaloption {\btxparameter\c!etaloption}%
+ symbol {\btxparameter{\c!stopper:initials}}%
+ }%
+ \relax
+ \endgroup}
+
+\unexpanded\def\btxstartauthor#1#2#3% a state > 0 signals that some authors can clash
+ {\begingroup
+ \currentbtxauthorindex#1\relax
+ \currentbtxauthorcount#2\relax
+ \currentbtxauthorstate#3\relax}
+
+\unexpanded\def\btxstopauthor
+ {\endgroup}
+
+\unexpanded\def\btxciteauthorsetup#1{\fastsetup{\s!btx:\s!cite:\s!author:#1}}
+\unexpanded\def\btxlistauthorsetup#1{\fastsetup{\s!btx:\s!list:\s!author:#1}}
+
+% \btxflushauthor{author}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[name]{author}
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% \btxflushauthor{author}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[name]{author}
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% Interaction
+
+\newconditional\btxinteractive
+\newconditional\btx_interactive
+
+% of maybe modes?
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \setfalse\btxinteractive
+ \else
+ \settrue\btxinteractive
+ \ifx\p_interaction\v!all
+ \settrue\btx_interactive
+ \else
+ \setfalse\btx_interactive
+ \fi
+ \fi
+ \else
+ \setfalse\btxinteractive
+ \setfalse\btx_interactive
+ \fi
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \setfalse\btxinteractive
+ \else
+ \settrue\btxinteractive
+ \fi
+ \else
+ \setfalse\btxinteractive
+ \fi
+\to \everysetupbtxciteplacement
+
+%D When a publication is cited, we need to signal that somehow. This is done with the
+%D following (not user) command. We could tag without injecting a node but this way
+%D we also store the location, which makes it possible to ask local lists.
+
+%D \macros{cite,nocite,citation,nocitation,usecitation}
+%D
+%D The inline \type {\cite} command creates a (often) short reference to a publication
+%D and for historic reasons uses a strict test for brackets. This means, at least
+%D in the default case that spaces are ignored in the argument scanner. The \type
+%D {\citation} commands is more liberal but also gobbles following spaces. Both
+%D commands insert a reference as well as a visual clue.
+%D
+%D The \type {no} commands all do the same (they are synonyms): they make sure that
+%D a reference is injected but show nothing. However, they do create a node so best
+%D attach them to some text in order to avoid spacing interferences. A slightly
+%D less efficient alternative is \type {\cite[none][tag]}.
+
+% [tags]
+% [settings|variant][tags]
+% [base::tags]
+% [settings|variant][base::tags]
+
+% these need to be sort of protected:
+
+\let\p_publ_cite_before \empty
+\let\p_publ_cite_after \empty
+\let\p_publ_cite_lefttext \empty
+\let\p_publ_cite_righttext\empty
+
+\let\currentbtxciteuservariables\empty
+
+\unexpanded\def\btxhybridcite % so one can alias the old
+ {\dontleavehmode
+ \begingroup
+ \strictdoifelsenextoptional\publ_cite_tags_options\publ_cite_tags_indeed}
+
+\unexpanded\def\publ_cite_tags_options[#1]%
+ {\strictdoifelsenextoptional{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
+
+\unexpanded\def\publ_cite_tags_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \setbtxparametersetroot\s!cite % we need to get the default
+ \edef\currentbtxcitealternative{\btxparameter\c!alternative}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \edef\currentbtxcitetag{#1}%
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\unexpanded\def\publ_cite_tags_options_indeed#1%
+ {\doifelseassignment{#1}\publ_cite_tags_settings_indeed\publ_cite_tags_variants_indeed{#1}}
+
+\def\publ_cite_tags_settings_indeed#1[#2]%
+ {\letinteractionparameter\c!style\empty
+ %\letinteractionparameter\c!color\empty
+ \letdummyparameter\c!reference \empty
+ \letdummyparameter\c!alternative\empty
+ \letdummyparameter\c!before \empty
+ \letdummyparameter\c!after \empty
+ \letdummyparameter\c!lefttext \empty
+ \letdummyparameter\c!righttext \empty
+ \getdummyparameters[#1]%
+ \edef\p_reference{\dummyparameter\c!reference}%
+ \ifx\p_reference\empty
+ \edef\currentbtxcitetag{#2}%
+ \else
+ \let\currentbtxcitetag\p_reference
+ \edef\currentbtxciteuservariables{#2}%
+ \fi
+ \edef\p_alternative{\dummyparameter\c!alternative}%
+ \ifx\p_alternative\empty
+ \setbtxparametersetroot\s!cite
+ \edef\currentbtxcitealternative{\btxparameter\c!alternative}%
+ \else
+ \let\currentbtxcitealternative\p_alternative
+ \fi
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \setupcurrentbtx[#1]%
+ %
+ \edef\p_publ_cite_before {\dummyparameter\c!before}%
+ \edef\p_publ_cite_after {\dummyparameter\c!after}%
+ \edef\p_publ_cite_lefttext {\dummyparameter\c!lefttext}%
+ \edef\p_publ_cite_righttext{\dummyparameter\c!righttext}%
+ %
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\def\publ_cite_tags_variants_indeed#1[#2]%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitealternative{#1}%
+ \edef\currentbtxcitetag{#2}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \the\everysetupbtxciteplacement
+ \publ_cite_variant
+ \endgroup}
+
+\newconditional\btxcitecompress
+
+\let\currentbtxreference\empty
+
+\def\publ_cite_variant
+ {\begingroup
+ \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
+
+\unexpanded\def\publ_cite_handle_variant#1%
+ {\begingroup
+ \edef\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \the\everysetupbtxciteplacement
+ \dosingleargument\publ_cite_handle_variant_indeed}
+
+\unexpanded\def\publ_cite_handle_variant_blob
+ {\clf_btxhandlecite
+ dataset {\currentbtxdataset}%
+ reference {\currentbtxreference}%
+ markentry \iftrialtypesetting\s!false\else\s!true\fi\space
+ variant {\currentbtxcitealternative}%
+ sorttype {\btxparameter\c!sorttype}%
+ compress {\btxparameter\c!compress}%
+ author {\btxparameter\c!author}%
+ authorconversion {\c!authorconversion}%
+ lefttext {\p_publ_cite_lefttext}%
+ righttext {\p_publ_cite_righttext}%
+ before {\p_publ_cite_before}%
+ after {\p_publ_cite_after}%
+ \relax
+ \clf_btxflushmarked} % maybe: \iftrialtypesetting\else ... \fi
+
+\let\dobtxcitevariantblob\publ_cite_handle_variant_blob % command can use it via lua
+
+\def\publ_cite_handle_variant_indeed[#1]%
+ {\letbtxparameter\c!alternative\currentbtxcitealternative
+ \edef\currentbtxreference{#1}%
+ \usebtxstyleandcolor\c!style\c!color
+ \uselanguageparameter\btxdatasetparameter % new
+ \btxparameter\c!left
+ \btxparameter\c!command{\dobtxcitevariantblob}% {\publ_cite_handle_variant_blob}%
+ \btxparameter\c!right
+ \endgroup}
+
+\unexpanded\def\btxcitation
+ {\dontleavehmode
+ \begingroup
+ \dodoubleempty\publ_citation}
+
+\def\publ_citation[#1][#2]% could be made more efficient but not now
+ {\ifsecondargument
+ \publ_cite_tags_options_indeed{#1}[#2]%
+ \else
+ \publ_cite_tags_indeed{#1}%
+ \fi}
+
+\unexpanded\def\btxnocitation
+ {\dosingleempty\publ_cite_no}
+
+\unexpanded\def\publ_cite_no[#1]%
+ {\iftrialtypesetting \else
+ \begingroup
+ \edef\currentbtxreference{#1}%
+ \clf_btxhandlenocite
+ dataset {\currentbtxdataset}%
+ reference {\currentbtxreference}%
+ markentry true%
+ \relax
+ % \clf_btxflushmarked
+ \endgroup
+ \fi}
+
+\unexpanded\def\btxmissing#1%
+ {\dontleavehmode{\tttf<#1>}}
+
+%D Compatibility:
+
+\let\cite \btxcitation
+\let\citation \btxcitation
+\let\nocite \btxnocitation
+\let\nocitation\btxnocitation
+
+\unexpanded\def\cite {\doifelsenextoptionalcs\btxcitation \btxdirectcite}
+\unexpanded\def\nocite{\doifelsenextoptionalcs\btxnocitation\btxdirectnocite}
+
+\unexpanded\def\btxdirectcite #1{\btxcitation [#1]\relax} % no optional arguments
+\unexpanded\def\btxdirectnocite#1{\btxnocitation[#1]\relax} % no optional arguments
+
+%D Setup helpers, beware, we need to wrap this .. now we need to know
+%D how setups are implemented.
+
+\setvalue{\??setup:\s!btx:\s!unknown}#1{\inframed{\tttf#1}}
+
+\def\publ_fast_setup_yes#1#2%
+ {\csname\??setup:\s!btx:%
+ \ifcsname\??setup:\s!btx:\currentbtxspecification:#1:#2\endcsname
+ \currentbtxspecification:#1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:#2\endcsname
+ \currentbtxspecificationfallback:#1:#2%
+ \else\ifcsname\??setup:\s!btx:#1:#2\endcsname
+ #1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecification:#1:\s!unknown\endcsname
+ \currentbtxspecification:#1:\s!unknown
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:\s!unknown\endcsname
+ \currentbtxspecificationfallback:#1:\s!unknown
+ \else
+ #1:\s!unknown
+ \fi\fi\fi\fi\fi
+ \endcsname{#2}}
+
+\def\publ_fast_setup_nop#1#2%
+ {\csname\??setup:\s!btx:%
+ \ifcsname\??setup:\s!btx:\currentbtxspecification:#1:#2\endcsname
+ \currentbtxspecification:#1:#2%
+ \else\ifcsname\??setup:\s!btx:#1:#2\endcsname
+ #1:#2%
+ \else\ifcsname\??setup:\s!btx:\currentbtxspecification:#1:\s!unknown\endcsname
+ \currentbtxspecification:#1:\s!unknown
+ \else
+ #1:\s!unknown
+ \fi\fi\fi
+ \endcsname{#2}}
+
+\newconstant\btxsetuptype
+
+% 0 = unknown darkred
+% 1 = cite darkblue
+% 2 = subcite darkgreen
+% 3 = numbering darkorange
+% 4 = list darkcyan
+% 5 = page darkmagenta
+% 6 = unknown darkred
+
+\unexpanded\def\publ_fast_btx_setup_chain_inbetween
+ {\allowbreak->\allowbreak}
+
+\unexpanded\def\publ_fast_btx_setup_chain_yes#1#2%
+ {\dontleavehmode\begingroup
+ \infofont
+ \ifcase\btxsetuptype\darkred\or\darkblue\or\darkgreen\or\darkcyan\or\darkmagenta\else\darkred\fi
+ [%
+ \currentbtxspecification :#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecificationfallback:#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ #1:#2\ifcsname\??setup:\s!btx :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecification :#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecificationfallback:#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecificationfallback:#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ unset\fi\fi\fi\fi\fi
+ \space @\space
+ \currentbtx
+ ]%
+ \endgroup}
+
+\unexpanded\def\publ_fast_btx_setup_chain_nop#1#2%
+ {\dontleavehmode\begingroup
+ \infofont
+ \darkred
+ [%
+ \currentbtxspecification :#1:#2\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ #1:#2\ifcsname\??setup:\s!btx :#1:#2\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ \currentbtxspecification :#1:\s!unknown\ifcsname\??setup:\s!btx:\currentbtxspecification :#1:\s!unknown\endcsname\else
+ \publ_fast_btx_setup_chain_inbetween
+ unset\fi\fi\fi
+ \space @\space
+ \currentbtx
+ ]%
+ \endgroup}
+
+\unexpanded\def\publ_fast_btx_setup_normal#1%
+ {\btxsetuptype#1\relax
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_setup_nop
+ \else
+ \expandafter\publ_fast_setup_yes
+ \fi}
+
+\unexpanded\def\publ_fast_btx_setup_visual#1#2#3%
+ {\btxsetuptype#1\relax
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_btx_setup_chain_nop
+ \else
+ \expandafter\publ_fast_btx_setup_chain_yes
+ \fi{#2}{#3}%
+ \ifx\currentbtxspecificationfallback\empty
+ \expandafter\publ_fast_setup_nop
+ \else
+ \expandafter\publ_fast_setup_yes
+ \fi{#2}{#3}}
+
+\installtextracker
+ {publications.setups}
+ {\let\publ_fast_setup\publ_fast_btx_setup_visual}
+ {\let\publ_fast_setup\publ_fast_btx_setup_normal}
+
+\let\publ_fast_setup\publ_fast_btx_setup_normal
+
+%D Cite helpers:
+
+\newtoks\everybtxciteentry
+
+\prependtoks
+ \setfalse\c_btx_cite_reference_injected
+\to \everybtxciteentry
+
+\unexpanded\def\btxcitesetup#1%
+ {\the\everybtxciteentry
+ \everybtxciteentry\emptytoks % tricky maybe not when subcites
+ \publ_fast_setup\plusone\s!cite{#1}} % no \btxcitereset as we loose dataset and such
+
+\unexpanded\def\btxsubcitesetup#1%
+ {\the\everybtxciteentry
+ \everybtxciteentry\emptytoks % tricky maybe not when subcites
+ \publ_fast_setup\plustwo\s!cite{#1}} % no \btxcitereset as we loose dataset and such
+
+\appendtoks
+ \btx_check_language
+\to \everybtxciteentry
+
+\unexpanded\def\btxstartsubcite#1%
+ {\begingroup
+ \btx_reset_cite % todo: limited set
+ \def\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \relax}
+
+\unexpanded\def\btxstopsubcite
+ {\relax
+ \btxparameter\c!right
+ \endgroup}
+
+\unexpanded\def\btxstartciterendering[#1]%
+ {\begingroup
+ \edef\currentbtxcitealternative{#1}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \usebtxstyleandcolor\c!style\c!color
+ \btxparameter\c!left
+ \relax}
+
+\unexpanded\def\btxstopciterendering
+ {\relax
+ \btxparameter\c!right
+ \endgroup}
+
+\let\btxstartcite \begingroup
+\let\btxstopcite \endgroup
+\let\btxstartciteauthor\begingroup
+\let\btxstopciteauthor \endgroup
+
+%D Whatever helpers:
+
+\unexpanded\def\btxsingularplural#1{\clf_btxsingularorplural{\currentbtxdataset}{\currentbtxtag}{#1}}
+\unexpanded\def\btxoneorrange #1{\clf_btxoneorrange {\currentbtxdataset}{\currentbtxtag}{#1}}
+\unexpanded\def\btxfirstofrange #1{\clf_btxfirstofrange {\currentbtxdataset}{\currentbtxtag}{#1}}
+
+\let\btxsingularorplural\btxsingularplural
+
+\stopcontextdefinitioncode
+
+%D Journals
+
+\unexpanded\def\btxloadjournallist [#1]{\clf_btxloadjournallist{#1}}
+\unexpanded\def\btxsavejournallist [#1]{\clf_btxsavejournallist{#1}}
+\unexpanded\def\btxaddjournal [#1][#2]{\clf_btxaddjournal{#1}{#2}}
+ \def\btxexpandedjournal #1{\clf_btxexpandedjournal{#1}} % \unexpanded ?
+ \def\btxabbreviatedjournal#1{\clf_btxabbreviatedjournal{#1}} % \unexpanded ?
+
+% \installcorenamespace{btxjournal}
+%
+% \letvalue{\s!btxjournal\v!long }\btxexpandedjournal
+% \letvalue{\s!btxjournal\v!short }\btxabbreviatedjournal
+% \letvalue{\s!btxjournal\v!normal}\firstofoneargument
+%
+% \unexpanded\def\btxcheckedjournal
+% {\expandnamespaceparameter\s!btxjournal\btxrenderingparameter\c!journalconversion}
+
+% \btxloadjournallist[list.txt] % Foo Journal of Bars = FBJ \n ....
+%
+% \btxexpandedjournal[fbj]
+% \btxabbreviatedjournal[foo journal of bars]
+
+%D Saving data:
+
+\unexpanded\def\savebtxdataset
+ {\dotripleargument\publ_save_dataset}
+
+\unexpanded\def\publ_save_dataset[#1][#2][#3]%
+ {\ifthirdargument
+ \publ_save_dataset_indeed[#1][#2][#3]%
+ \else\ifsecondargument
+ \doifelseassignment{#2}%
+ {\publ_save_dataset_indeed[\s!default][#1][#2]}%
+ {\publ_save_dataset_indeed[#1][#2][]}%
+ \else\iffirstargument
+ \doifelseassignment{#1}%
+ {\publ_save_dataset_indeed[\s!default][\jobname-saved.bib][#1]}%
+ {\publ_save_dataset_indeed[\s!default][#1][]}%
+ % \else
+ % % bad news
+ \fi\fi\fi}
+
+\unexpanded\def\publ_save_dataset_indeed[#1][#2][#3]%
+ {\begingroup
+ \getdummyparameters
+ [\c!criterium=\v!all,%
+ \c!type=,%
+ \c!dataset=#1,%
+ \c!file=#2,%
+ #3]% % all or used
+ \clf_btxsavedataset
+ dataset {\dummyparameter\c!dataset}%
+ filename {\dummyparameter\c!file}%
+ filetype {\dummyparameter\c!type}%
+ criterium {\dummyparameter\c!criterium}%
+ \relax
+ \endgroup}
+
+% \savebtxdataset[default][e:/tmp/foo.bib]
+% \savebtxdataset[default][e:/tmp/foo.lua]
+% \savebtxdataset[default][e:/tmp/foo.xml]
+
+%D In-text entries:
+
+\unexpanded\def\placecitation{\citation[entry]} % [#1]
+
+\unexpanded\def\btxhandleciteentry
+ {\dontleavehmode
+ \begingroup
+ \def\currentbtxcitealternative{entry}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative % needs checking
+ \btxcitereference
+ \btx_entry_inject
+ \endgroup}
+
+%D Registers
+
+% \setupbtxregister
+% [\c!state=\v!start,
+% \c!dataset=\v!all,
+% \c!method=\v!always]
+
+\unexpanded\def\publ_registers_set
+ {\ifx\currentbtxregister\empty \else
+ \clf_btxsetregister
+ specification {\currentbtxspecification}%
+ name {\currentbtxregister}%
+ state {\btxregisterparameter\c!state}%
+ dataset {\btxregisterparameter\c!dataset}%
+ field {\btxregisterparameter\c!field}%
+ register {\btxregisterparameter\c!register}%
+ method {\btxregisterparameter\c!method}%
+ alternative {\btxregisterparameter\c!alternative}%
+ \relax
+ \fi}
+
+\appendtoks
+ \publ_registers_set
+\to \everydefinebtxregister
+
+\appendtoks
+ \publ_registers_set
+\to \everysetupbtxregister
+
+\appendtoks
+ \normalexpanded{%
+ \defineprocessor
+ [\s!btx:r:\currentbtxregister]%
+ [\c!style=\noexpand\namedbtxregisterparameter{\currentbtxregister}\noexpand\c!style,
+ \c!color=\noexpand\namedbtxregisterparameter{\currentbtxregister}\noexpand\c!color]}%
+\to \everydefinebtxregister
+
+\appendtoks
+ \clf_btxtoregister{\currentbtxdataset}{\currentbtxtag}%
+\to \t_btx_reference_inject
+
+\unexpanded\def\btxindexedauthor#1#2#3#4#5#6% alternative von last initials first junior
+ {\begingroup
+ \def\currentbtxcitealternative{#1}%
+ \ifx\currentbtxcitealternative\empty
+ \edef\currentbtxcitealternative{invertedshort}% maybe we need some default here too?
+ \fi
+ %let\currentbtxlistvariant\currentbtxcitealternative % we inherit
+ \the\everysetupbtxciteplacement
+ \def\currentbtxvons {#2}%
+ \def\currentbtxsurnames {#3}%
+ \def\currentbtxinitials {#4}%
+ \def\currentbtxfirstnames {#5}%
+ \def\currentbtxjuniors {#6}%
+ \setbtxparameterset\s!cite\currentbtxcitealternative
+ \fastsetup{\s!btx:\s!cite:\s!author:\currentbtxcitealternative}%
+ \endgroup}
+
+\unexpanded\def\btxregisterauthor
+ {\doifelsenextoptionalcs\publ_register_author_yes\publ_register_author_nop}
+
+\def\publ_register_author_yes[#1]#2%
+ {\clf_btxauthortoregister{#1}{#2}\relax}
+
+\def\publ_register_author_nop#1%
+ {\clf_btxauthortoregister{\currentbtxdataset}{#1}\relax}
+
+
+%D We hook some setters in the definition sets:
+
+% \installdefinitionsetmember \??btx {btxspecification} \??btxcitevariant {btxcitevariant}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxlistvariant {btxlistvariant}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxlist {btxlist}
+% \installdefinitionsetmember \??btx {btxspecification} \??btxrendering {btxrendering}
+% \installdefinitionsetmember \??btx {btxspecification} \??btx {btx}
+
+%D And more helpers ... a never ending story these publications:
+
+% \definebtx
+% [btx:apa:list:article:title]
+% [style=bolditalic,
+% command=\WORD]
+%
+% \btxstartstyle[btx:apa:list:article:title]
+% \btxusecommand[btx:apa:list:article:title]{foo}
+% \btxstopstyle
+
+\let\savedcurrentbtx\empty
+
+\unexpanded\def\btxstartstyle[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxstyle\c!style
+ \let\currentbtx\savedcurrentbtx}
+
+\unexpanded\def\btxstartcolor[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxcolor\c!color
+ \let\currentbtx\savedcurrentbtx}
+
+\unexpanded\def\btxstartstyleandcolor[#1]%
+ {\begingroup
+ \let\savedcurrentbtx\currentbtx
+ \def\currentbtx{#1}%
+ \usebtxstyleandcolor\c!style\c!color
+ \let\currentbtx\savedcurrentbtx}
+
+\let\btxstopstyle \endgroup
+\let\btxstopcolor \endgroup
+\let\btxstopstyleandcolor\endgroup
+
+\unexpanded\def\btxusecommand[#1]#2% using #2 permits space after []
+ {\namedbtxparameter{#1}\c!command{#2}}
+
+%D Defaults:
+
+\setupbtxrendering
+ [\c!interaction=\v!start, % \v!all
+ \c!specification=\btxparameter\c!specification,
+ \c!dataset=\v!default,
+ \c!repeat=\v!no,
+ \c!continue=\v!no,
+ \c!method=\v!global,
+ % \c!setups=btx:\btxrenderingparameter\c!alternative:initialize, % not the same usage as cite !
+ \c!sorttype=\v!default,
+ \c!criterium=\v!text,
+ \c!refcommand=authoryears, % todo
+ \c!numbering=\v!yes,
+ %\c!saveinlist=\v!no, % maybe for before/after
+ \c!textstate=\v!start,
+ \c!width=\v!auto,
+ \c!separator={;\space},
+ \c!distance=1.5\emwidth]
+
+% Quite some interpunction and labels are the same of at least consistent within
+% a standard when citations and list entries are involved. We assume that each
+% standard defines its own set but it can fall back on these defaults.
+
+\setupbtx
+ [\c!interaction=\v!start,
+ \c!alternative=num, % default cite form
+ \c!inbetween=\space,
+ \c!range=\endash, % separator:range?
+ \c!compress=\v!yes, % was no?
+ \c!authorconversion=normal,
+ \c!sorttype=normal, % normal, reverse or none
+ \c!etallimit=3,
+ \c!etaldisplay=\btxparameter\c!etallimit,
+ \c!otherstext={\space et al.},
+ \c!separator:firstnames={\space},
+ \c!separator:juniors={\space},
+ \c!separator:vons={\space},
+ \c!separator:initials={\space},
+ \c!stopper:initials={.},
+ %\c!surnamesep={,\space}, % is this used anywhere?
+ \c!separator:invertedinitials={,\space},
+ \c!separator:invertedfirstnames={,\space},
+ \c!separator:names:2={,\space}, % separates multiple names
+ \c!separator:names:3=\btxparameter{\c!separator:2}, % before last name in a list
+ \c!separator:names:4=\btxparameter{\c!separator:2}, % between only two names
+ \c!separator:2={;\space}, % aka pubsep - separates multiple objects
+ \c!separator:3=\btxparameter{separator:2}, % before last object in a list
+ \c!separator:4=\btxparameter{separator:2}, % between only two objects
+ \c!pagestate=\v!stop]
+
+% Do we want these in the format? Loading them delayed is somewhat messy.
+
+\loadbtxdefinitionfile[commands]
+\loadbtxdefinitionfile[definitions]
+
+\loadbtxdefinitionfile[cite]
+\loadbtxdefinitionfile[list]
+\loadbtxdefinitionfile[page]
+\loadbtxdefinitionfile[author]
+
+% we assume that the users sets up the right specification and if not ... well,
+% hope for the best that something shows up and consult the manual otherwise
+
+\unexpanded\def\usebtxdefinitions[#1]%
+ {\loadbtxdefinitionfile[#1]% % for hh
+ \setupbtx[\c!specification=#1]} % for ab
+
+\setupbtx
+ [\c!specification=\s!default,
+ \c!dataset=\v!default,
+ \c!default=\v!default]
+
+\loadbtxdefinitionfile
+ [\s!default]
+
+\protect
diff --git a/tex/context/base/publ-jrn.lua b/tex/context/base/publ-jrn.lua
new file mode 100644
index 000000000..0cc41862f
--- /dev/null
+++ b/tex/context/base/publ-jrn.lua
@@ -0,0 +1,189 @@
+if not modules then modules = { } end modules ['publ-jrn'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- http://jabref.sourceforge.net/journals/journal_abbreviations_general.txt
+--
+-- = [;shortest unique abbreviation[;frequency]].
+
+-- require("char-utf")
+
+-- Abhandlungen aus dem Westfälischen Museum für Naturkunde = Abh. Westfäl. Mus. Nat.kd.
+-- Abhandlungen der Naturforschenden Gesellschaft in Zürich = Abh. Nat.forsch. Ges. Zür.
+-- Abhandlungen des Naturwissenschaftlichen Vereins zu Bremen = Abh. Nat.wiss. Ver. Bremen
+
+local context = context
+local commands = commands
+
+local type = type
+local find = string.find
+local P, C, S, Cs, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cs, lpeg.match, lpeg.patterns
+
+local report_journals = logs.reporter("publications","journals")
+
+local publications = publications
+local journals = { }
+publications.journals = journals
+
+local lowercase = characters.lower
+
+local expansions = { }
+local abbreviations = { }
+local nofexpansions = 0
+local nofabbreviations = 0
+
+local valid = 1 - S([[ ."':;,-]])
+local pattern = Cs((valid^1 + P(1)/"")^1)
+
+local function simplify(name)
+ -- we have utf but it doesn't matter much if we lower the bytes
+ return name and lowercase(lpegmatch(pattern,name)) or name
+end
+
+local function add(expansion,abbreviation)
+ if expansion and abbreviation then
+ local se = simplify(expansion)
+ local sa = simplify(abbreviation)
+ if not expansions[sa] then
+ expansions[sa] = expansion
+ nofexpansions = nofexpansions + 1
+ end
+ if not abbreviations[se] then
+ abbreviations[se] = abbreviation
+ nofabbreviations = nofabbreviations + 1
+ end
+ end
+end
+
+-- [#%-] comment
+-- meaning = abbreviations [;.....]
+
+local whitespace = lpegpatterns.whitespace^0
+local assignment = whitespace * P("=") * whitespace
+local separator = P(";")
+local newline = lpegpatterns.newline
+local endofline = lpegpatterns.space^0 * (newline + P(-1) + separator)
+local restofline = (1-newline)^0
+local splitter = whitespace * C((1-assignment)^1) * assignment * C((1-endofline)^1) * restofline
+local comment = S("#-%") * restofline
+local pattern = (comment + splitter / add)^0
+
+function journals.load(filename)
+ if not filename then
+ return
+ end-- error
+ if file.suffix(filename,"txt") then
+ local data = io.loaddata(filename)
+ if type(data) ~= "string" then
+ return
+ elseif find(data,"=") then
+ -- expansion = abbreviation
+ lpegmatch(pattern,data)
+ end
+ elseif file.suffix(filename,"lua") then
+ local data = table.load(filename)
+ if type(data) ~= "table" then
+ return
+ else
+ local de = data.expansions
+ local da = data.abbreviations
+ if de and da then
+ -- { expansions = { a = e }, abbreviations = { e = a } }
+ if next(expansions) then
+ table.merge(expansions,de)
+ else
+ expansions = de
+ end
+ if next(abbreviations) then
+ table.merge(abbreviations,da)
+ else
+ abbreviations = da
+ end
+ elseif #data > 0 then
+ -- { expansion, abbreviation }, ... }
+ for i=1,#data do
+ local d = d[i]
+ add(d[1],d[2])
+ end
+ else
+ -- { expansion = abbreviation, ... }
+ for expansion, abbreviation in data do
+ add(expansion,abbreviation)
+ end
+ end
+ end
+ end
+ report_journals("file %a loaded, %s expansions, %s abbreviations",filename,nofexpansions,nofabbreviations)
+end
+
+function journals.save(filename)
+ table.save(filename,{ expansions = expansions, abbreviations = abbreviations })
+end
+
+function journals.add(expansion,abbreviation)
+ add(expansion,abbreviation)
+end
+
+function journals.expanded(name)
+ local s = simplify(name)
+ return expansions[s] or expansions[simplify(abbreviations[s])] or name
+end
+
+function journals.abbreviated(name)
+ local s = simplify(name)
+ return abbreviations[s] or abbreviations[simplify(expansions[s])] or name
+end
+
+local implement = interfaces and interfaces.implement
+
+if implement then
+
+ implement {
+ name = "btxloadjournallist",
+ arguments = "string",
+ actions = journals.load
+ }
+
+ implement {
+ name = "btxsavejournallist",
+ arguments = "string",
+ actions = journals.save
+ }
+
+ implement {
+ name = "btxaddjournal",
+ arguments = { "string", "string" },
+ actions = { journals.add, context }
+ }
+
+ implement {
+ name = "btxexpandedjournal",
+ arguments = "string",
+ actions = { journals.expanded, context },
+ }
+
+ implement {
+ name = "btxabbreviatedjournal",
+ arguments = "string",
+ actions = { journals.abbreviated, context },
+ }
+
+end
+
+-- journals.load("e:/tmp/journals.txt")
+-- journals.save("e:/tmp/journals.lua")
+
+-- inspect(journals.expanded ("Z. Ökol. Nat.schutz"))
+-- inspect(journals.abbreviated("Z. Ökol. Nat. schutz"))
+
+if typesetters then
+ typesetters.manipulators.methods.expandedjournal = journals.expanded
+ typesetters.manipulators.methods.abbreviatedjournal = journals.abbreviated
+end
+
+-- journals.load("t:/manuals/publications-mkiv/journals.txt")
+-- journals.save("t:/manuals/publications-mkiv/journals.lua")
diff --git a/tex/context/base/publ-old.mkiv b/tex/context/base/publ-old.mkiv
new file mode 100644
index 000000000..f616428e6
--- /dev/null
+++ b/tex/context/base/publ-old.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=publ-old,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Old Fashioned \BIBTEX,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+% we could use runtime commands instead
+
+\unexpanded\def\setupbibtex {\usemodule[oldbibtex]\setupbibtex}
+\unexpanded\def\setuppublications {\usemodule[oldbibtex]\setuppublications}
+\unexpanded\def\setuppublicationlist{\usemodule[oldbibtex]\setuppublicationlist}
+
+\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
new file mode 100644
index 000000000..55c62c31e
--- /dev/null
+++ b/tex/context/base/publ-oth.lua
@@ -0,0 +1,154 @@
+if not modules then modules = { } end modules ['publ-oth'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local p_endofline = lpeg.patterns.newline
+
+local publications = publications
+
+local loaders = publications.loaders
+local getindex = publications.getindex
+
+local function addfield(t,k,v,fields)
+ k = fields[k]
+ if k then
+ local tk = t[k]
+ if tk then
+ t[k] = tk .. " and " .. v
+ else
+ t[k] = v
+ end
+ end
+ return t
+end
+
+local function checkfield(_,_,t,categories,all)
+ local tag = t.tag
+ if tag then
+ local category = t.category
+ t.tag = nil
+ t.category = categories[category] or category
+ all[tag] = t
+ end
+ return true
+end
+
+-- endnotes --
+
+local fields = {
+ ["@"] = "tag",
+ ["0"] = "category",
+ ["A"] = "author",
+ ["E"] = "editor",
+ ["T"] = "title",
+ ["D"] = "year",
+ ["I"] = "publisher",
+}
+
+local categories = {
+ ["Journal Article"] = "article",
+}
+
+local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+function publications.endnotes_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.endnote(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if fullname then
+ loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(fullname) or ""))
+ end
+end
+
+-- refman --
+
+local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+local fields = {
+ ["SN"] = "tag",
+ ["TY"] = "category",
+ ["A1"] = "author",
+ ["E1"] = "editor",
+ ["T1"] = "title",
+ ["Y1"] = "year",
+ ["PB"] = "publisher",
+}
+
+local categories = {
+ ["JOUR"] = "article",
+}
+
+function publications.refman_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.refman(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if fullname then
+ loaders.lua(dataset,publications.refman_to_btx(io.loaddata(fullname) or ""))
+ end
+end
+
+-- test --
+
+-- local endnote = [[
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677776
+-- %D 1994
+-- %I IEEE Computer Society
+--
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677775
+-- %D 1994
+-- %I IEEE Computer Society
+-- ]]
+--
+-- local refman = [[
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677776
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+--
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677775
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+-- ]]
+--
+-- inspect(publications.endnotes_to_btx(endnote))
+-- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-reg.lua b/tex/context/base/publ-reg.lua
new file mode 100644
index 000000000..3f276b49a
--- /dev/null
+++ b/tex/context/base/publ-reg.lua
@@ -0,0 +1,227 @@
+if not modules then modules = { } end modules ['publ-reg'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local formatters = string.formatters
+local concat = table.concat
+local sortedhash = table.sortedhash
+local lpegmatch = lpeg.match
+
+local context = context
+
+local implement = interfaces.implement
+local variables = interfaces.variables
+
+local v_once = variables.once
+local v_stop = variables.stop
+local v_all = variables.all
+
+local publications = publications
+local datasets = publications.datasets
+local specifications = publications.specifications
+local writers = publications.writers
+local getcasted = publications.getcasted
+
+local registrations = { }
+local sequence = { }
+local flushers = table.setmetatableindex(function(t,k) local v = t.default t[k] = v return v end)
+
+local function btxsetregister(specification)
+ local name = specification.name
+ local register = specification.register
+ local dataset = specification.dataset
+ local field = specification.field
+ if not field or field == "" or not register or register == "" then
+ return
+ end
+ if not dataset or dataset == "" then
+ dataset = v_all
+ end
+ -- could be metatable magic
+ local s = registrations[register]
+ if not s then
+ s = { }
+ registrations[register] = s
+ end
+ local processors = name ~= register and name or ""
+ if processor == "" then
+ processor = nil
+ elseif processor then
+ processor = "btx:r:" .. processor
+ end
+ local datasets = utilities.parsers.settings_to_array(dataset)
+ for i=1,#datasets do
+ local dataset = datasets[i]
+ local d = s[dataset]
+ if not d then
+ d = { }
+ s[dataset] = d
+ end
+ --
+ -- check all
+ --
+ d.active = specification.state ~= v_stop
+ d.once = specification.method == v_once or false
+ d.field = field
+ d.processor = processor
+ d.alternative = d.alternative or specification.alternative
+ d.register = register
+ d.dataset = dataset
+ d.done = d.done or { }
+ end
+ --
+ sequence = { }
+ for register, s in sortedhash(registrations) do
+ for dataset, d in sortedhash(s) do
+ if d.active then
+ sequence[#sequence+1] = d
+ end
+ end
+ end
+end
+
+local function btxtoregister(dataset,tag)
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local done = step.done
+ if not done[tag] then
+ local value, field, kind = getcasted(current,tag,step.field,specifications[step.specification])
+ if value then
+ flushers[kind](step,field,value)
+ end
+ done[tag] = true
+ end
+ end
+ end
+end
+
+implement {
+ name = "btxsetregister",
+ actions = btxsetregister,
+ arguments = {
+ {
+ { "specification" },
+ { "name" },
+ { "state" },
+ { "dataset" },
+ { "field" },
+ { "register" },
+ { "method" },
+ { "alternative" },
+ }
+ }
+}
+
+implement {
+ name = "btxtoregister",
+ actions = btxtoregister,
+ arguments = { "string", "string" }
+}
+
+-- context.setregisterentry (
+-- { register },
+-- {
+-- ["entries:1"] = value,
+-- ["keys:1"] = value,
+-- }
+-- )
+
+local ctx_dosetfastregisterentry = context.dosetfastregisterentry -- register entry key
+
+----- p_keywords = lpeg.tsplitat(lpeg.patterns.whitespace^0 * lpeg.P(";") * lpeg.patterns.whitespace^0)
+local components = publications.components.author
+local f_author = formatters[ [[\btxindexedauthor{%s}{%s}{%s}{%s}{%s}{%s}]] ]
+
+function flushers.string(step,field,value)
+ if type(value) == "string" and value ~= "" then
+ ctx_dosetfastregisterentry(step.register,value or "","",step.processor or "","")
+ end
+end
+
+flushers.default = flushers.string
+
+local shorts = {
+ normalshort = "normalshort",
+ invertedshort = "invertedshort",
+}
+
+function flushers.author(step,field,value)
+ if type(value) == "string" then
+ value = publications.authorcache[value]
+ end
+ if type(value) == "table" and #value > 0 then
+ local register = step.register
+ local processor = step.processor
+ local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
+ for i=1,#value do
+ local a = value[i]
+ local k = writers[field] { a }
+ local e = f_author(alternative,components(a))
+ ctx_dosetfastregisterentry(register,e,k,processor or "","")
+ end
+ end
+end
+
+function flushers.keyword(step,field,value)
+ if type(value) == "table" and #value > 0 then
+ local register = step.register
+ local processor = step.processor
+ for i=1,#value do
+ ctx_dosetfastregisterentry(register,value[i],"",processor or "","")
+ end
+ end
+end
+
+-- publications.registerflushers = flushers
+
+local function btxtoregister(dataset,tag)
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local done = step.done
+ if not done[tag] then
+ local value, field, kind = getcasted(current,tag,step.field,specifications[step.specification])
+ if value then
+ flushers[kind](step,field,value)
+ end
+ done[tag] = true
+ end
+ end
+ end
+end
+
+local function authortoregister(dataset,hash)
+ local author = publications.authorcache[hash]
+ if author then
+ local current = datasets[dataset]
+ for i=1,#sequence do
+ local step = sequence[i]
+ local dset = step.dataset
+ if dset == v_all or dset == dataset then
+ local register = step.register
+ local processor = step.processor
+ local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
+ local k = writers.author { author }
+ local e = f_author(alternative,components(author,short))
+ ctx_dosetfastregisterentry(register,e,k,processor or "","")
+ end
+ end
+ end
+end
+
+publications.authortoregister = authortoregister
+
+implement {
+ name = "btxauthortoregister",
+ actions = authortoregister,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/publ-sor.lua b/tex/context/base/publ-sor.lua
new file mode 100644
index 000000000..b617af760
--- /dev/null
+++ b/tex/context/base/publ-sor.lua
@@ -0,0 +1,377 @@
+if not modules then modules = { } end modules ['publ-sor'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- if needed we can optimize this one: chekc if it's detail or something else
+-- and use direct access, but in practice it's fast enough
+
+local type = type
+local concat = table.concat
+local formatters = string.formatters
+local compare = sorters.comparers.basic -- (a,b)
+local sort = table.sort
+
+local toarray = utilities.parsers.settings_to_array
+local utfchar = utf.char
+
+local publications = publications
+local writers = publications.writers
+
+local variables = interfaces.variables
+local v_short = variables.short
+local v_default = variables.default
+local v_reference = variables.reference
+local v_dataset = variables.dataset
+local v_list = variables.list
+local v_index = variables.index
+local v_cite = variables.cite
+local v_used = variables.used
+
+local report = logs.reporter("publications","sorters")
+
+local trace_sorters trackers.register("publications.sorters",function(v) trace_sorters = v end)
+
+-- authors(s) | year | journal | title | pages
+
+local template = [[
+local type, tostring = type, tostring
+
+local writers = publications.writers
+local datasets = publications.datasets
+local getter = publications.getfaster -- (current,data,details,field,categories,types)
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
+local function newsplitter(splitter)
+ return table.setmetatableindex({},function(t,k) -- could be done in the sorter but seldom that many shared
+ local v = splitter(k,true) -- in other cases
+ t[k] = v
+ return v
+ end)
+end
+
+return function(dataset,list,method) -- indexer
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local specification = publications.currentspecification
+ local categories = specification.categories
+ local types = specification.types
+ local splitted = newsplitter(splitter) -- saves mem
+ local snippets = { } -- saves mem
+ local result = { }
+
+%helpers%
+
+ for i=1,#list do
+ -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
+ local li = list[i]
+ local tag = type(li) == "string" and li or li[1]
+ local index = tostring(i)
+ local entry = luadata[tag]
+ if entry then
+ -- maybe optional: if entry.key then push the keygetter
+ -- in slot 1 and ignore (e.g. author)
+ local detail = details[tag]
+ result[i] = {
+ index = i,
+ split = {
+
+%getters%
+
+ },
+ }
+ else
+ result[i] = {
+ index = i,
+ split = {
+
+%unknowns%
+
+ },
+ }
+ end
+ end
+ return result
+end
+]]
+
+local f_getter = formatters["splitted[strip(getter(current,entry,detail,%q,categories,types) or %q)], -- %s"]
+local f_writer = formatters["splitted[strip(writer_%s(getter(current,entry,detail,%q,categories,types) or %q,snippets))], -- %s"]
+local f_helper = formatters["local writer_%s = writers[%q] -- %s: %s"]
+local f_value = formatters["splitted[%q], -- %s"]
+local s_index = "splitted[index], -- the order in the list, always added"
+
+-- there is no need to cache this in specification
+
+local sharedmethods = { }
+publications.sortmethods = sharedmethods
+
+local function sortsequence(dataset,list,sorttype)
+
+ if not list or #list == 0 then
+ return
+ end
+
+ local specification = publications.currentspecification
+ local types = specification.types
+ local sortmethods = specification.sortmethods
+ local method = sortmethods and sortmethods[sorttype] or sharedmethods[sorttype]
+ local sequence = method and method.sequence
+
+ local s_default = ""
+ local s_unknown = ""
+
+ local c_default = utfchar(0xFFFE)
+ local c_unknown = utfchar(0xFFFF)
+
+ if not sequence and type(sorttype) == "string" then
+ local list = toarray(sorttype)
+ if #list > 0 then
+ local indexdone = false
+ sequence = { }
+ for i=1,#list do
+ local entry = toarray(list[i])
+ local field = entry[1]
+ local default = entry[2]
+ local unknown = entry[3] or default
+ sequence[i] = {
+ field = field,
+ default = default == s_default and c_default or default or c_default,
+ unknown = unknown == s_unknown and c_unknown or unknown or c_unknown,
+ }
+ if field == "index" then
+ indexdone = true
+ end
+ end
+ if not indexdone then
+ sequence[#sequence+1] = {
+ field = "index",
+ default = 0,
+ unknown = 0,
+ }
+ end
+ end
+ if trace_sorters then
+ report("creating sequence from method %a",sorttype)
+ end
+ end
+
+ if sequence then
+
+ local getters = { }
+ local unknowns = { }
+ local helpers = { }
+
+ if trace_sorters then
+ report("initializing method %a",sorttype)
+ end
+
+ for i=1,#sequence do
+ local step = sequence[i]
+ local field = step.field or "?"
+ local default = step.default or c_default
+ local unknown = step.unknown or c_unknown
+ local fldtype = types[field]
+ local fldwriter = step.writer or fldtype
+ local writer = fldwriter and writers[fldwriter]
+
+ if trace_sorters then
+ report("% 3i : field %a, type %a, default %a, unknown %a",i,field,fldtype,
+ default == c_default and s_default or default,
+ unknown == c_unknown and s_unknown or unknown
+ )
+ end
+
+ if writer then
+ local h = #helpers + 1
+ getters[i] = f_writer(h,field,default,field)
+ helpers[h] = f_helper(h,fldwriter,field,fldtype)
+ else
+ getters[i] = f_getter(field,default,field)
+ end
+ unknowns[i] = f_value(unknown,field)
+ end
+
+ unknowns[#unknowns+1] = s_index
+ getters [#getters +1] = s_index
+
+ local code = utilities.templates.replace(template, {
+ helpers = concat(helpers, "\n"),
+ getters = concat(getters, "\n"),
+ unknowns = concat(unknowns,"\n"),
+ })
+
+ -- print(code)
+
+ local action, error = loadstring(code)
+ if type(action) == "function" then
+ action = action()
+ else
+ report("error when compiling sort method %a: %s",sorttype,error or "unknown")
+ end
+ if type(action) == "function" then
+ local valid = action(dataset,list,method)
+ if valid and #valid > 0 then
+ sorters.sort(valid,compare)
+ return valid
+ else
+ report("error when applying sort method %a",sorttype)
+ end
+ else
+ report("error in sort method %a",sorttype)
+ end
+ else
+ report("invalid sort method %a",sorttype)
+ end
+
+end
+
+-- tag | listindex | reference | userdata | dataindex
+
+-- short : short + tag index
+-- dataset : index + tag
+-- list : list + index
+-- reference : tag + index
+-- used : reference + dataset
+-- authoryear : complex sort
+
+local sorters = { }
+
+sorters[v_short] = function(dataset,rendering,list) -- should we store it
+ local shorts = rendering.shorts
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ local shorta = shorts[taga]
+ local shortb = shorts[tagb]
+ if shorta and shortb then
+ -- assumes ascii shorts ... no utf yet
+ return shorta < shortb
+ end
+ -- fall back on tag order
+ return taga < tagb
+ end
+ -- fall back on dataset order
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_dataset] = function(dataset,rendering,list) -- dataset index
+ local function compare(a,b)
+ if a and b then
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_list] = function(dataset,rendering,list) -- list index (normally redundant)
+ local function compare(a,b)
+ if a and b then
+ local lista = a[2]
+ local listb = b[2]
+ if lista and listb then
+ return lista < listb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_reference] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local taga = a[1]
+ local tagb = b[1]
+ if taga and tagb then
+ return taga < tagb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_used] = function(dataset,rendering,list) -- tag
+ local function compare(a,b)
+ if a and b then
+ local referencea = a[2]
+ local referenceb = b[2]
+ if referencea and referenceb then
+ return referencea < referenceb
+ end
+ local indexa = a[5]
+ local indexb = b[5]
+ if indexa and indexb then
+ return indexa < indexb
+ end
+ end
+ return false
+ end
+ sort(list,compare)
+end
+
+sorters[v_default] = sorters[v_list]
+sorters[""] = sorters[v_list]
+sorters[v_cite] = sorters[v_list]
+sorters[v_index] = sorters[v_dataset]
+
+local function anything(dataset,rendering,list,sorttype)
+ local valid = sortsequence(dataset,list,sorttype) -- field order
+ if valid and #valid > 0 then
+ -- hm, we have a complication here because a sortsequence doesn't know if there's a field
+ -- so there is no real catch possible here .., anyway, we add a index as last entry when no
+ -- one is set so that should be good enough (needs testing)
+ for i=1,#valid do
+ local v = valid[i]
+ valid[i] = list[v.index]
+ end
+ return valid
+ end
+end
+
+table.setmetatableindex(sorters,function(t,k) return anything end)
+
+publications.lists.sorters = sorters
+
+-- publications.sortmethods.key = {
+-- sequence = {
+-- { field = "key", default = "", unknown = "" },
+-- { field = "index", default = "", unknown = "" },
+-- },
+-- }
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
new file mode 100644
index 000000000..4b03307ac
--- /dev/null
+++ b/tex/context/base/publ-tra.lua
@@ -0,0 +1,504 @@
+if not modules then modules = { } end modules ['publ-tra'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: use context.tt .. more efficient, less code
+
+local next, type = next, type
+
+local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local settings_to_array = utilities.parsers.settings_to_array
+local formatters = string.formatters
+local concat = table.concat
+
+local context = context
+local commands = commands
+
+local v_default = interfaces.variables.default
+
+local publications = publications
+local tracers = publications.tracers
+local tables = publications.tables
+local datasets = publications.datasets
+local specifications = publications.specifications
+local citevariants = publications.citevariants
+
+local getfield = publications.getfield
+local getcasted = publications.getcasted
+
+local ctx_NC, ctx_NR, ctx_HL, ctx_FL, ctx_ML, ctx_LL, ctx_EQ = context.NC, context.NR, context.HL, context.FL, context.ML, context.LL, context.EQ
+
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+
+local ctx_formatted = context.formatted
+local ctx_bold = ctx_formatted.monobold
+local ctx_monobold = ctx_formatted.monobold
+local ctx_verbatim = ctx_formatted.verbatim
+
+local ctx_rotate = context.rotate
+local ctx_llap = context.llap
+local ctx_rlap = context.rlap
+local ctx_page = context.page
+
+local privates = tables.privates
+local specials = tables.specials
+
+local report = logs.reporter("publications","tracers")
+
+function tracers.showdatasetfields(settings)
+ local dataset = settings.dataset
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+ if next(luadata) then
+ ctx_starttabulate { "|lT|lT|pTl|" }
+ ctx_NC() ctx_bold("tag")
+ ctx_NC() ctx_bold("category")
+ ctx_NC() ctx_bold("fields")
+ ctx_NC() ctx_NR()
+ ctx_FL()
+ for tag, entry in sortedhash(luadata) do
+ local category = entry.category
+ local catedata = categories[category]
+ local fields = catedata and catedata.fields or { }
+ ctx_NC() context(tag)
+ ctx_NC() context(category)
+ ctx_NC() -- grouping around colors needed
+ for key, value in sortedhash(entry) do
+ if privates[key] then
+ -- skip
+ elseif specials[key] then
+ context("{\\darkblue %s} ",key)
+ else
+ local kind = fields[key]
+ if kind == "required" then
+ context("{\\darkgreen %s} ",key)
+ elseif kind == "optional" then
+ context("%s ",key)
+ else
+ context("{\\darkyellow %s} ",key)
+ end
+ end
+ end
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+ end
+end
+
+function tracers.showdatasetcompleteness(settings)
+ local dataset = settings.dataset
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+
+ -- local lpegmatch = lpeg.match
+ -- local texescape = lpeg.patterns.texescape
+
+ local preamble = { "|lTBw(5em)|lBTp(10em)|plT|" }
+
+ local function identified(tag,category,crossref,index)
+ ctx_NC() ctx_monobold(index)
+ ctx_NC() ctx_monobold(category)
+ ctx_NC() if crossref then
+ ctx_monobold("%s\\hfill\\darkblue => %s",tag,crossref)
+ else
+ ctx_monobold(tag)
+ end
+ ctx_NC() ctx_NR()
+ end
+
+ local function required(done,foundfields,key,value,indirect)
+ ctx_NC() if not done then ctx_monobold("required") end
+ ctx_NC() context(key)
+ ctx_NC()
+ if indirect then
+ if value then
+ context("\\darkblue")
+ ctx_verbatim(value)
+ else
+ context("\\darkred\\tttf [missing crossref]")
+ end
+ elseif value then
+ ctx_verbatim(value)
+ else
+ context("\\darkred\\tttf [missing value]")
+ end
+ ctx_NC() ctx_NR()
+ foundfields[key] = nil
+ return done or true
+ end
+
+ local function optional(done,foundfields,key,value,indirect)
+ ctx_NC() if not done then ctx_monobold("optional") end
+ ctx_NC() context(key)
+ ctx_NC()
+ if indirect then
+ context("\\darkblue")
+ ctx_verbatim(value)
+ elseif value then
+ ctx_verbatim(value)
+ end
+ ctx_NC() ctx_NR()
+ foundfields[key] = nil
+ return done or true
+ end
+
+ local function special(done,key,value)
+ ctx_NC() if not done then ctx_monobold("special") end
+ ctx_NC() context(key)
+ ctx_NC() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ return done or true
+ end
+
+ local function extra(done,key,value)
+ ctx_NC() if not done then ctx_monobold("extra") end
+ ctx_NC() context(key)
+ ctx_NC() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ return done or true
+ end
+
+ if next(luadata) then
+ for tag, entry in sortedhash(luadata) do
+ local category = entry.category
+ local fields = categories[category]
+ local foundfields = { }
+ for k, v in next, entry do
+ foundfields[k] = true
+ end
+ ctx_starttabulate(preamble)
+ identified(tag,category,entry.crossref,entry.index)
+ ctx_FL()
+ if fields then
+ local requiredfields = fields.required
+ local sets = fields.sets or { }
+ local done = false
+ if requiredfields then
+ for i=1,#requiredfields do
+ local r = requiredfields[i]
+ local r = sets[r] or r
+ if type(r) == "table" then
+ local okay = false
+ for i=1,#r do
+ local ri = r[i]
+ if rawget(entry,ri) then
+ done = required(done,foundfields,ri,entry[ri])
+ okay = true
+ elseif entry[ri] then
+ done = required(done,foundfields,ri,entry[ri],true)
+ okay = true
+ end
+ end
+ if not okay then
+ done = required(done,foundfields,table.concat(r," {\\letterbar} "))
+ end
+ elseif rawget(entry,r) then
+ done = required(done,foundfields,r,entry[r])
+ elseif entry[r] then
+ done = required(done,foundfields,r,entry[r],true)
+ else
+ done = required(done,foundfields,r)
+ end
+ end
+ end
+ local optionalfields = fields.optional
+ local done = false
+ if optionalfields then
+ for i=1,#optionalfields do
+ local o = optionalfields[i]
+ local o = sets[o] or o
+ if type(o) == "table" then
+ for i=1,#o do
+ local oi = o[i]
+ if rawget(entry,oi) then
+ done = optional(done,foundfields,oi,entry[oi])
+ elseif entry[oi] then
+ done = optional(done,foundfields,oi,entry[oi],true)
+ end
+ end
+ elseif rawget(entry,o) then
+ done = optional(done,foundfields,o,entry[o])
+ elseif entry[o] then
+ done = optional(done,foundfields,o,entry[o],true)
+ end
+ end
+ end
+ end
+ local done = false
+ for k, v in sortedhash(foundfields) do
+ if privates[k] then
+ -- skip
+ elseif specials[k] then
+ done = special(done,k,entry[k])
+ end
+ end
+ local done = false
+ for k, v in sortedhash(foundfields) do
+ if privates[k] then
+ -- skip
+ elseif not specials[k] then
+ done = extra(done,k,entry[k])
+ end
+ end
+ ctx_stoptabulate()
+ end
+ end
+
+end
+
+function tracers.showfields(settings)
+ local rotation = settings.rotation
+ local specification = settings.specification
+ local fielddata = specification and specifications[specification] or specifications.apa
+ local categories = fielddata.categories
+ local validfields = { }
+ for category, data in next, categories do
+ local sets = data.sets
+ local fields = data.fields
+ for name, list in next, fields do
+ validfields[name] = true
+ end
+ end
+ local s_categories = sortedkeys(categories)
+ local s_fields = sortedkeys(validfields)
+ ctx_starttabulate { "|l" .. string.rep("|c",#s_categories) .. "|" }
+ ctx_FL()
+ ctx_NC()
+ if rotation then
+ rotation = { rotation = rotation }
+ end
+ for i=1,#s_categories do
+ ctx_NC()
+ local txt = formatters["\\bf %s"](s_categories[i])
+ if rotation then
+ ctx_rotate(rotation,txt)
+ else
+ context(txt)
+ end
+ end
+ ctx_NC() ctx_NR()
+ ctx_FL()
+ for i=1,#s_fields do
+ local field = s_fields[i]
+ ctx_NC()
+ ctx_bold(field)
+ for j=1,#s_categories do
+ ctx_NC()
+ local kind = categories[s_categories[j]].fields[field]
+ if kind == "required" then
+ context("\\darkgreen*")
+ elseif kind == "optional" then
+ context("*")
+ end
+ end
+ ctx_NC() ctx_NR()
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+end
+
+function tracers.showtables(settings)
+ for name, list in sortedhash(tables) do
+ ctx_starttabulate { "|Tl|Tl|" }
+ ctx_FL()
+ ctx_NC()
+ ctx_rlap(function() ctx_bold(name) end)
+ ctx_NC()
+ ctx_NC()
+ ctx_NR()
+ ctx_FL()
+ for k, v in sortedhash(list) do
+ ctx_NC()
+ context(k)
+ ctx_NC()
+ if type(v) == "table" then
+ context("% t",v)
+ else
+ context(tostring(v))
+ end
+ ctx_NC()
+ ctx_NR()
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+ end
+end
+
+function tracers.showdatasetauthors(settings)
+
+ local dataset = settings.dataset
+ local field = settings.field
+
+ local sortkey = publications.writers.author
+
+ if not dataset or dataset == "" then dataset = v_default end
+ if not field or field == "" then field = "author" end
+
+ local function row(i,k,v)
+ ctx_NC()
+ if i then
+ ctx_verbatim(i)
+ end
+ ctx_NC()
+ ctx_verbatim(k)
+ ctx_EQ()
+ if type(v) == "table" then
+ local t = { }
+ for i=1,#v do
+ local vi = v[i]
+ if type(vi) == "table" then
+ t[i] = concat(vi,"-")
+ else
+ t[i] = vi
+ end
+ end
+ ctx_verbatim(concat(t, " | "))
+ else
+ ctx_verbatim(v)
+ end
+ ctx_NC()
+ ctx_NR()
+ end
+
+ local function authorrow(ai,k,i)
+ local v = ai[k]
+ if v then
+ row(i,k,v)
+ end
+ end
+
+ local function commonrow(key,value)
+ ctx_NC() ctx_rlap(function() ctx_verbatim(key) end)
+ ctx_NC()
+ ctx_EQ() ctx_verbatim(value)
+ ctx_NC() ctx_NR()
+ end
+
+ local d = datasets[dataset].luadata
+
+ local trialtypesetting = context.trialtypesetting()
+
+ for tag, entry in sortedhash(d) do
+
+ local a, f, k = getcasted(dataset,tag,field)
+
+ if type(a) == "table" and #a > 0 and k == "author" then
+ context.start()
+ context.tt()
+ ctx_starttabulate { "|B|Bl|p|" }
+ ctx_FL()
+ local original = getfield(dataset,tag,field)
+ commonrow("tag",tag)
+ commonrow("field",field)
+ commonrow("original",original)
+ commonrow("sortkey",sortkey(a))
+ for i=1,#a do
+ ctx_ML()
+ local ai = a[i]
+ if ai then
+ authorrow(ai,"original",i)
+ authorrow(ai,"snippets")
+ authorrow(ai,"initials")
+ authorrow(ai,"firstnames")
+ authorrow(ai,"vons")
+ authorrow(ai,"surnames")
+ authorrow(ai,"juniors")
+ local options = ai.options
+ if options then
+ row(false,"options",sortedkeys(options))
+ end
+ elseif not trialtypesetting then
+ report("bad author name: %s",original or "?")
+ end
+ end
+ ctx_LL()
+ ctx_stoptabulate()
+ context.stop()
+ end
+
+ end
+
+end
+
+function tracers.showentry(dataset,tag)
+ local dataset = datasets[dataset]
+ if dataset then
+ local entry = dataset.luadata[tag]
+ local done = false
+ for k, v in sortedhash(entry) do
+ if not privates[k] then
+ ctx_verbatim("%w[%s: %s]",done and 1 or 0,k,v)
+ done = true
+ end
+ end
+ end
+end
+
+local skipped = { index = true, default = true }
+
+function tracers.showvariants(dataset,pages)
+ local variants = sortedkeys(citevariants)
+ for tag in publications.sortedentries(dataset or v_default) do
+ if pages then
+ ctx_page()
+ end
+ ctx_starttabulate { "|T||" }
+ for i=1,#variants do
+ local variant = variants[i]
+ if not skipped[variant] then
+ ctx_NC() context(variant)
+ -- ctx_EQ() citevariants[variant] { dataset = v_default, reference = tag, variant = variant }
+ ctx_EQ() context.cite({variant},{dataset .. "::" .. tag})
+ ctx_NC() ctx_NR()
+ end
+ end
+ ctx_stoptabulate()
+ if pages then
+ ctx_page()
+ end
+ end
+end
+
+function tracers.showhashedauthors(dataset,pages)
+ local components = publications.components.author
+ ctx_starttabulate { "|T|T|T|T|T|T|" }
+ ctx_NC() ctx_bold("hash")
+ ctx_NC() ctx_bold("vons")
+ ctx_NC() ctx_bold("surnames")
+ ctx_NC() ctx_bold("initials")
+ ctx_NC() ctx_bold("firstnames")
+ ctx_NC() ctx_bold("juniors")
+ ctx_NC() ctx_NR() ctx_HL()
+ for hash, data in sortedhash(publications.authorcache) do
+ local vons, surnames, initials, firstnames, juniors = components(data)
+ ctx_NC() context(hash)
+ ctx_NC() context(vons)
+ ctx_NC() context(surnames)
+ ctx_NC() context(initials)
+ ctx_NC() context(firstnames)
+ ctx_NC() context(juniors)
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+end
+
+commands.showbtxdatasetfields = tracers.showdatasetfields
+commands.showbtxdatasetcompleteness = tracers.showdatasetcompleteness
+commands.showbtxfields = tracers.showfields
+commands.showbtxtables = tracers.showtables
+commands.showbtxdatasetauthors = tracers.showdatasetauthors
+commands.showbtxhashedauthors = tracers.showhashedauthors
+commands.showbtxentry = tracers.showentry
+commands.showbtxvariants = tracers.showvariants
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
new file mode 100644
index 000000000..6ef86ca59
--- /dev/null
+++ b/tex/context/base/publ-tra.mkiv
@@ -0,0 +1,87 @@
+%D \module
+%D [ file=publ-tra,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Tracing,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: make this a runtime module
+% todo: use the module interface
+
+\writestatus{loading}{ConTeXt Publication Support / Tracing}
+
+\registerctxluafile{publ-tra}{1.001}
+
+\unprotect
+
+\unexpanded\def\showbtxdatasetfields {\dosingleempty\publ_show_dataset_fields}
+\unexpanded\def\showbtxdatasetcompleteness{\dosingleempty\publ_show_dataset_completeness}
+\unexpanded\def\showbtxdatasetauthors {\dosingleempty\publ_show_dataset_authors}
+\unexpanded\def\showbtxhashedauthors {\dosingleempty\publ_show_hashed_authors}
+\unexpanded\def\showbtxfields {\dosingleempty\publ_show_fields}
+\unexpanded\def\showbtxtables {\dosingleempty\publ_show_tables}
+
+\def\publ_show_dataset_whatever#1[#2]%
+ {\begingroup
+ \letdummyparameter\c!specification\currentbtxspecification
+ \setdummyparameter\c!dataset {\currentbtxdataset}%
+ \letdummyparameter\c!field \empty
+ \iffirstargument
+ \doifelseassignment{#2}
+ {\getdummyparameters[#2]}%
+ {\setdummyparameter\c!dataset{#2}}%
+ \else
+ \getdummyparameters[#2]%
+ \fi
+ \ctxcommand{#1{
+ dataset = "\dummyparameter\c!dataset",
+ specification = "\dummyparameter\c!specification",
+ field = "\dummyparameter\c!field",
+ }}%
+ \endgroup}
+
+\def\publ_show_dataset_fields {\publ_show_dataset_whatever{showbtxdatasetfields}}
+\def\publ_show_dataset_completeness{\publ_show_dataset_whatever{showbtxdatasetcompleteness}}
+\def\publ_show_dataset_authors {\publ_show_dataset_whatever{showbtxdatasetauthors}}
+
+\def\publ_show_fields[#1]%
+ {\begingroup
+ \setdummyparameter\c!rotation{90}%
+ \doifelseassignment{#1}%
+ {\letdummyparameter\c!specification\currentbtxspecification
+ \getdummyparameters[#1]}%
+ {\doifelsenothing{#1}%
+ {\letdummyparameter\c!specification\currentbtxspecification}%
+ {\setdummyparameter\c!specification{#1}}}%
+ \ctxcommand{showbtxfields{
+ rotation = "\dummyparameter\c!rotation",
+ specification = "\dummyparameter\c!specification"
+ }}%
+ \endgroup}
+
+\def\publ_show_tables[#1]%
+ {\begingroup
+ \ctxcommand{showbtxtables{}}%
+ \endgroup}
+
+\def\publ_show_hashed_authors[#1]%
+ {\ctxcommand{showbtxhashedauthors{}}}
+
+\protect
+
+\continueifinputfile{publ-tra.mkiv}
+
+\starttext
+
+ \showbtxfields[rotation=85] \page
+ \showbtxfields[rotation=90] \page
+
+ \showbtxtables \page
+
+\stoptext
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
new file mode 100644
index 000000000..901f11629
--- /dev/null
+++ b/tex/context/base/publ-usr.lua
@@ -0,0 +1,128 @@
+if not modules then modules = { } end modules ['publ-usr'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, Cs, R, Cc, C, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.C, lpeg.Carg
+local lpegmatch = lpeg.match
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local publications = publications
+local datasets = publications.datasets
+
+local report = logs.reporter("publications")
+local trace = false trackers.register("publications",function(v) trace = v end)
+
+-- local str = [[
+-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
+-- \artauthor[]{Alexander}[A.]{}{Berdnikov}
+-- \artauthor[]{Hans}[H.]{}{Hagen}
+-- \artauthor[]{Taco}[T.]{}{Hoekwater}
+-- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
+-- \pubyear{2000}
+-- \arttitle{{Even more MetaFun with \MP: A request for permission}}
+-- \journal{TUGboat}
+-- \issn{0896-3207}
+-- \volume{21}
+-- \issue{2}
+-- \pages{129--130}
+-- \month{6}
+-- \stoppublication
+-- ]]
+
+local lists = {
+ author = true,
+ editor = true,
+ -- artauthor = true,
+ -- arteditor = true,
+}
+
+local function registervalue(target,key,value)
+ target[key] = value
+end
+
+-- Instead of being generic we just hardcode the old stuff:
+
+local function registerauthor(target,key,juniors,firstnames,initials,vons,surnames)
+ local value = target[key]
+ target[key]= ((value and value .. " and {") or "{") ..
+ vons .. "},{" ..
+ surnames .. "},{" ..
+ juniors .. "},{" ..
+ firstnames .. "},{" ..
+ initials .. "}"
+end
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local leftbracket = P("[")
+local rightbracket = P("]")
+local backslash = P("\\")
+local letter = R("az","AZ")
+
+local skipspaces = lpeg.patterns.whitespace^0
+local key = Cs(letter^1)
+local value = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace
+local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket
+
+local authorkey = (P("artauthor") + P("author")) / "author"
+ + (P("arteditor") + P("editor")) / "editor"
+local authorvalue = (optional + Cc("{}")) * skipspaces -- [juniors]
+ * (value + Cc("{}")) * skipspaces -- {firstnames}
+ * (optional + Cc("{}")) * skipspaces -- [initials]
+ * (value + Cc("{}")) * skipspaces -- {vons}
+ * (value + Cc("{}")) * skipspaces -- {surnames}
+
+local keyvalue = Carg(1) * authorkey * skipspaces * authorvalue / registerauthor
+ + Carg(1) * key * skipspaces * value / registervalue
+
+local pattern = (backslash * keyvalue + P(1))^0
+
+local function addtexentry(dataset,settings,content)
+ local current = datasets[dataset]
+ local settings = settings_to_hash(settings)
+ local data = {
+ tag = settings.tag or settings.k or "no tag",
+ category = settings.category or settings.t or "article",
+ }
+ lpegmatch(pattern,content,1,data) -- can set tag too
+ local tag = data.tag
+ local index = publications.getindex(dataset,current.luadata,tag)
+ current.ordered[index] = data
+ current.luadata[tag] = data
+ current.userdata[tag] = data
+ current.details[tag] = nil
+ return data
+end
+
+local pattern = ( Carg(1)
+ * P("\\startpublication")
+ * skipspaces
+ * optional
+ * C((1 - P("\\stoppublication"))^1)
+ * P("\\stoppublication") / addtexentry
+ + P("%") * (1-lpeg.patterns.newline)^0
+ + P(1)
+)^0
+
+function publications.loaders.bbl(dataset,filename)
+ local dataset, fullname = publications.resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",fullname)
+ return
+ end
+ if trace then
+ report("loading file %a",fullname)
+ end
+ lpegmatch(pattern,data,1,dataset)
+end
+
+publications.addtexentry = addtexentry
+commands.addbtxentry = addtexentry
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
new file mode 100644
index 000000000..cb078f424
--- /dev/null
+++ b/tex/context/base/publ-usr.mkiv
@@ -0,0 +1,2 @@
+% todo
+
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
new file mode 100644
index 000000000..96375b9cc
--- /dev/null
+++ b/tex/context/base/publ-xml.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=publ-xml,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / XML}
+
+\unprotect
+
+\unexpanded\def\convertbtxdatasettoxml
+ {\dosingleempty\publ_convert_to_xml}
+
+\def\publ_convert_to_xml[#1]%
+ {\clf_convertbtxdatasettoxml{\iffirstargument#1\else\s!default\fi}} % or current when not empty
+
+% \startxmlsetups btx:initialize
+% \xmlregistereddocumentsetups{#1}{}
+% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+% \xmlmain{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+% \startxmlsetups btx:entry
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\protect \endinput
+
+% \startxmlsetups bibtex:entry:getkeys
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
+% \stopxmlsetups
+
+% \startbuffer
+% \startxmlsetups xml:bibtex:sorter
+% \xmlresetsorter{bibtex}
+% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
+% \xmlfilter{#1}{
+% bibtex
+% /entry[@category='article']
+% /field[@name='author' and find(text(),'Knuth')]
+% /../command(bibtex:entry:getkeys)}
+% \xmlsortentries{bibtex}
+% \xmlflushsorter{bibtex}{bibtex:entry:flush}
+% \stopxmlsetups
+% \stopbuffer
+
+% \bgroup
+% \setups[bibtex-commands]
+% \getbuffer
+% \egroup
+
+% \startxmlsetups bibtex:entry:flush
+% \xmlfilter{#1}{/field[@name='author']/context()} / %
+% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
+% \xmlatt{#1}{tag}\par
+% \stopxmlsetups
+
+% \startpacked
+% \getbuffer
+% \stoppacked
+
+
+% \unexpanded\def\btx_xml_list_handle_entry
+% {\begingroup
+% \ignorespaces
+% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
+% \removeunwantedspaces
+% \endgroup}
+
+% \startxmlsetups btx:format
+% \btxlistparameter\c!before\relax % prevents lookahead
+% \edef\currentbibxmlnode {#1}
+% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
+% \edef\currentbtxcategory{\xmlatt{#1}{category}}
+% \ignorespaces
+% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
+% \removeunwantedspaces
+% \btxlistparameter\c!after\relax % prevents lookahead
+% \stopxmlsetups
+
+% \startxmlsetups btx:list
+% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
+% \stopxmlsetups
+
+% \startxmlsetups btx:btx
+% \xmlfilter{#1}{/entry/command(btx:format)}
+% \stopxmlsetups
+
+% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
+% \unexpanded\def\btx_xml_todo #1{[#1]}
+
+% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index d5d278b16..37a88fd5f 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -15,7 +15,7 @@ runtime.
local commands, context = commands, context
local utfchar = utf.char
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+local P, Cs, Cc, lpegmatch = lpeg.P, lpeg.Cs, lpeg.Cc, lpeg.match
local char, gsub, format, gmatch, byte, match = string.char, string.gsub, string.format, string.gmatch, string.byte, string.match
local next = next
local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
@@ -27,6 +27,9 @@ local sequencers = utilities.sequencers
local textlineactions = resolvers.openers.helpers.textlineactions
local setmetatableindex = table.setmetatableindex
+local implement = interfaces.implement
+local setmacro = interfaces.setmacro
+
--[[ldx--
We will hook regime handling code into the input methods.
--ldx]]--
@@ -99,6 +102,8 @@ local synonyms = { -- backward compatibility list
["windows"] = "cp1252",
+ ["pdf"] = "pdfdoc",
+
}
local currentregime = "utf"
@@ -132,7 +137,7 @@ end
setmetatableindex(mapping, loadregime)
setmetatableindex(backmapping,loadreverse)
-local function translate(line,regime)
+local function fromregime(regime,line)
if line and #line > 0 then
local map = mapping[regime and synonyms[regime] or regime or currentregime]
if map then
@@ -178,12 +183,15 @@ local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
local r = c[d]
if not r then
local t = fastcopy(backmapping[vector])
- setmetatableindex(t, function(t,k)
- local v = d
- t[k] = v
- return v
- end)
- r = utf.remapper(t)
+ -- r = utf.remapper(t) -- not good for defaults here
+ local pattern = Cs((lpeg.utfchartabletopattern(t)/t + lpeg.patterns.utf8character/d + P(1)/d)^0)
+ r = function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end
c[d] = r
end
return r(str)
@@ -192,6 +200,7 @@ end
local function disable()
currentregime = "utf"
sequencers.disableaction(textlineactions,"regimes.process")
+ return currentregime
end
local function enable(regime)
@@ -202,12 +211,14 @@ local function enable(regime)
currentregime = regime
sequencers.enableaction(textlineactions,"regimes.process")
end
+ return currentregime
end
-regimes.toregime = toregime
-regimes.translate = translate
-regimes.enable = enable
-regimes.disable = disable
+regimes.toregime = toregime
+regimes.fromregime = fromregime
+regimes.translate = function(str,regime) return fromregime(regime,str) end
+regimes.enable = enable
+regimes.disable = disable
-- The following function can be used when we want to make sure that
-- utf gets passed unharmed. This is needed for modules.
@@ -216,7 +227,7 @@ local level = 0
function regimes.process(str,filename,currentline,noflines,coding)
if level == 0 and coding ~= "utf-8" then
- str = translate(str,currentregime)
+ str = fromregime(currentregime,str)
if trace_translating then
report_translating("utf: %s",str)
end
@@ -243,40 +254,62 @@ end
regimes.push = push
regimes.pop = pop
-sequencers.prependaction(textlineactions,"system","regimes.process")
-sequencers.disableaction(textlineactions,"regimes.process")
+if sequencers then
+
+ sequencers.prependaction(textlineactions,"system","regimes.process")
+ sequencers.disableaction(textlineactions,"regimes.process")
+
+end
-- interface:
-commands.enableregime = enable
-commands.disableregime = disable
+implement {
+ name = "enableregime",
+ arguments = "string",
+ actions = function(regime) setmacro("currentregime",enable(regime)) end
+}
-commands.pushregime = push
-commands.popregime = pop
+implement {
+ name = "disableregime",
+ actions = function() setmacro("currentregime",disable()) end
+}
-function commands.currentregime()
- context(currentregime)
-end
+implement {
+ name = "pushregime",
+ actions = push
+}
+
+implement {
+ name = "popregime",
+ actions = pop
+}
local stack = { }
-function commands.startregime(regime)
- insert(stack,currentregime)
- if trace_translating then
- report_translating("start using %a",regime)
+implement {
+ name = "startregime",
+ arguments = "string",
+ actions = function(regime)
+ insert(stack,currentregime)
+ if trace_translating then
+ report_translating("start using %a",regime)
+ end
+ setmacro("currentregime",enable(regime))
end
- enable(regime)
-end
+}
-function commands.stopregime()
- if #stack > 0 then
- local regime = remove(stack)
- if trace_translating then
- report_translating("stop using %a",regime)
+implement {
+ name = "stopregime",
+ actions = function()
+ if #stack > 0 then
+ local regime = remove(stack)
+ if trace_translating then
+ report_translating("stop using %a",regime)
+ end
+ setmacro("currentregime",enable(regime))
end
- enable(regime)
end
-end
+}
-- Next we provide some hacks. Unfortunately we run into crappy encoded
-- (read : mixed) encoded xml files that have these ë ä ö ü sequences
@@ -311,48 +344,82 @@ local patterns = { }
--
-- twice as fast and much less lpeg bytecode
+-- function regimes.cleanup(regime,str)
+-- if not str or str == "" then
+-- return str
+-- end
+-- local p = patterns[regime]
+-- if p == nil then
+-- regime = regime and synonyms[regime] or regime or currentregime
+-- local vector = regime ~= "utf" and regime ~= "utf-8" and mapping[regime]
+-- if vector then
+-- local utfchars = { }
+-- local firsts = { }
+-- for k, uchar in next, vector do
+-- local stream = { }
+-- local split = totable(uchar)
+-- local nofsplits = #split
+-- if nofsplits > 1 then
+-- local first
+-- for i=1,nofsplits do
+-- local u = vector[split[i]]
+-- if not first then
+-- first = firsts[u]
+-- if not first then
+-- first = { }
+-- firsts[u] = first
+-- end
+-- end
+-- stream[i] = u
+-- end
+-- local nofstream = #stream
+-- if nofstream > 1 then
+-- first[#first+1] = concat(stream,2,nofstream)
+-- utfchars[concat(stream)] = uchar
+-- end
+-- end
+-- end
+-- p = P(false)
+-- for k, v in next, firsts do
+-- local q = P(false)
+-- for i=1,#v do
+-- q = q + P(v[i])
+-- end
+-- p = p + P(k) * q
+-- end
+-- p = Cs(((p+1)/utfchars)^1)
+-- -- lpeg.print(p) -- size: 1042
+-- else
+-- p = false
+-- end
+-- patterns[regime] = p
+-- end
+-- return p and lpegmatch(p,str) or str
+-- end
+--
+-- 5 times faster:
+
function regimes.cleanup(regime,str)
+ if not str or str == "" then
+ return str
+ end
local p = patterns[regime]
if p == nil then
regime = regime and synonyms[regime] or regime or currentregime
- local vector = regime ~= "utf" and mapping[regime]
+ local vector = regime ~= "utf" and regime ~= "utf-8" and mapping[regime]
if vector then
- local utfchars = { }
- local firsts = { }
- for k, uchar in next, vector do
- local stream = { }
- local split = totable(uchar)
- local nofsplits = #split
- if nofsplits > 1 then
- local first
- for i=1,nofsplits do
- local u = vector[split[i]]
- if not first then
- first = firsts[u]
- if not first then
- first = { }
- firsts[u] = first
- end
- end
- stream[i] = u
- end
- local nofstream = #stream
- if nofstream > 1 then
- first[#first+1] = concat(stream,2,nofstream)
- utfchars[concat(stream)] = uchar
- end
+ local mapping = { }
+ for k, v in next, vector do
+ local split = totable(v)
+ for i=1,#split do
+ split[i] = utfchar(byte(split[i]))
end
- end
- p = P(false)
- for k, v in next, firsts do
- local q = P(false)
- for i=1,#v do
- q = q + P(v[i])
+ split = concat(split)
+ if v ~= split then
+ mapping[split] = v
end
- p = p + P(k) * q
end
- p = Cs(((p+1)/utfchars)^1)
- -- lpeg.print(p) -- size: 1042
+ p = Cs((lpeg.utfchartabletopattern(mapping)/mapping+P(1))^0)
else
p = false
end
@@ -361,28 +428,9 @@ function regimes.cleanup(regime,str)
return p and lpegmatch(p,str) or str
end
--- local map = require("regi-cp1252")
-- local old = [[test ë ä ö ü crap]]
--- local new = correctencoding(map,old)
---
--- print(old,new)
-
--- obsolete:
---
--- function regimes.setsynonym(synonym,target)
--- synonyms[synonym] = target
--- end
---
--- function regimes.truename(regime)
--- return regime and synonyms[regime] or regime or currentregime
--- end
---
--- commands.setregimesynonym = regimes.setsynonym
---
--- function commands.trueregimename(regime)
--- context(regimes.truename(regime))
--- end
---
--- function regimes.load(regime)
--- return mapping[synonyms[regime] or regime]
--- end
+-- local new = regimes.cleanup("cp1252",old)
+-- report_translating("%s -> %s",old,new)
+-- local old = "Pozn" .. char(0xE1) .. "mky"
+-- local new = fromregime("cp1250",old)
+-- report_translating("%s -> %s",old,new)
diff --git a/tex/context/base/regi-ini.mkiv b/tex/context/base/regi-ini.mkiv
index 651e2f13c..6190393d8 100644
--- a/tex/context/base/regi-ini.mkiv
+++ b/tex/context/base/regi-ini.mkiv
@@ -16,34 +16,18 @@
\unprotect
%D \macros
-%D {enableregime,disableregime,currentregime}
+%D {enableregime,disableregime,
+%D startregime,stopregime,
+%D currentregime}
%D
%D Beware, the enable and disable commands are global switches, so
%D best use the start|/|stop commands.
-\unexpanded\def\enableregime[#1]{\ctxcommand{enableregime("#1")}}
-\unexpanded\def\disableregime {\ctxcommand{disableregime()}}
-\unexpanded\def\startregime [#1]{\ctxcommand{startregime("#1")}}
-\unexpanded\def\stopregime {\ctxcommand{stopregime()}}
- \def\currentregime {\ctxcommand{currentregime()}}
+\let\currentregime\empty
-% D \macros
-% D {defineregimesynonym,trueregimename}
-%
-% \unexpanded\def\defineregimesynonym % more or less obsolete
-% {\dodoubleargument\dodefineregimesynonym}
-%
-% \def\dodefineregimesynonym[#1][#2]%
-% {\ctxcommand{setregimesynonym("#1","#2")}}
-%
-% \def\trueregimename#1%
-% {\ctxcommand{trueregimename("#1")}}
-
-% D \macros
-% D {useregime}
-% D
-% D This one is sort of obsolete but we keep them around.
-
-\unexpanded\def\useregime[#1]{}
+\unexpanded\def\enableregime[#1]{\clf_enableregime{#1}}
+\unexpanded\def\disableregime {\clf_disableregime}
+\unexpanded\def\startregime [#1]{\clf_startregime{#1}}
+\unexpanded\def\stopregime {\clf_stopregime}
\protect \endinput
diff --git a/tex/context/base/regi-pdfdoc.lua b/tex/context/base/regi-pdfdoc.lua
new file mode 100644
index 000000000..363d3ae0d
--- /dev/null
+++ b/tex/context/base/regi-pdfdoc.lua
@@ -0,0 +1,26 @@
+if not modules then modules = { } end modules ['regi-pdfdoc'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F, 0x0010,
+ 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x02D8, 0x02C7, 0x02C6, 0x02D9, 0x02DD, 0x02DB, 0x02DA, 0x02DC, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x0192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C, 0x201D, 0x2018,
+ 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x0141, 0x0152, 0x0160, 0x0178, 0x017D, 0x0131, 0x0142, 0x0153, 0x0161, 0x017E, 0x009F,
+ 0x20AC, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0xFFFD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF,
+}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 7d8064b29..00a1a5c1e 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -34,6 +34,7 @@
%logo [FGA] {fga}
%logo [FGBBS] {fgbbs}
\logo [ACROBAT] {Acro\-bat}
+\logo [APA] {apa}
\logo [AFM] {afm}
\logo [API] {api}
\logo [ALEPH] {Aleph} % {\mathematics{\aleph}}
@@ -45,8 +46,10 @@
\logo [ARABTEX] {Arab\TeX}
\logo [ASCII] {ascii}
\logo [ASCIITEX] {ascii\TeX}
+\logo [ASCIIMATH] {AsciiMath}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
+\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
@@ -64,6 +67,8 @@
\logo [CMYK] {cmyk}
\logo [CODHOST] {CodHost}
\logo [CONTEXT] {\ConTeXt}
+\logo [CONTEXTWIKI] {\ConTeXt\ Wiki}
+\logo [CONTEXTGROUP] {\ConTeXt\ Group}
\logo [CSS] {css}
\logo [CTAN] {ctan}
\logo [CTXTOOLS] {ctxtools}
@@ -92,6 +97,7 @@
\logo [EMTEX] {em\TeX}
\logo [ENCODING] {enc}
\logo [ENCTEX] {enc\TeX}
+\logo [EPUB] {ePub}
\logo [EPS] {eps}
\logo [ETEX] {\eTeX}
\logo [EUROBACHOTEX] {EuroBacho\TeX}
@@ -233,11 +239,13 @@
\logo [SSD] {ssd}
\logo [SVG] {svg}
\logo [STIX] {Stix}
+\logo [SUMATRAPDF] {SumatraPDF}
\logo [SWIG] {swig}
\logo [SWIGLIB] {SwigLib}
\logo [TABLE] {\TaBlE}
\logo [TCPIP] {tcp/ip}
\logo [TDS] {tds} % no sc te
+\logo [TEI] {tei} % no sc te
\logo [TETEX] {te\TeX} % no sc te
\logo [TEX] {\TeX}
\logo [TEXADRES] {\TeX adress}
@@ -259,6 +267,7 @@
\logo [TEXTOOLS] {\TeX tools}
\logo [TEXUTIL] {\TeX util}
\logo [TEXWORK] {\TeX work}
+\logo [TEXWORKS] {\TeX works}
\logo [TEXXET] {\TeX\XeT} \def\XeT{XeT}
\logo [TFM] {tfm}
\logo [TIF] {tif}
diff --git a/tex/context/base/s-art-01.mkiv b/tex/context/base/s-art-01.mkiv
index e2584a357..601ee1adc 100644
--- a/tex/context/base/s-art-01.mkiv
+++ b/tex/context/base/s-art-01.mkiv
@@ -2,7 +2,7 @@
\startmodule[art-01]
-\definemeasure [article:margin] [\paperheight/15]
+\definemeasure [article:margin] [\paperheight/20] % was 15, see xtables-mkiv for results
\definemeasure [overview:margin] [\paperheight/30]
\definelayout
@@ -10,7 +10,7 @@
[\c!topspace=\measure{article:margin},
\c!bottomspace=\measure{article:margin},
\c!backspace=\measure{article:margin},
- \c!header=\measure{overview:margin},
+ \c!header=\measure{article:margin},
\c!footer=0pt,
\c!width=\v!middle,
\c!height=\v!middle]
diff --git a/tex/context/base/s-figures-names.mkiv b/tex/context/base/s-figures-names.mkiv
new file mode 100644
index 000000000..a2782efc9
--- /dev/null
+++ b/tex/context/base/s-figures-names.mkiv
@@ -0,0 +1,99 @@
+%D \module
+%D [ file=s-figures-names.mkiv,
+%D version=2017.07.17,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Figure Names,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Normally this module will be run someplace at the end of a document in some kind of tracing
+%D mode. We could hook it into \type {\stoptext} but better leave it up to the user. Loading
+%D this module upfront will not show anything useful. The main reason for making this module
+%D was that we wanted to report weird figurenames: spaces, multiple hyphens in a row, mixed
+%D hyphens and underscores, inconsistently mixed upper and lowercase, etc.
+%D
+%D If you only want info in the logfile, you can use:
+%D
+%D \starttyping
+%D \enabletrackers[graphics.lognames]
+%D \stoptyping
+%D
+%D This directive is persistent and can be issued any time before the end of the run.
+
+\startmodule[figures-names]
+
+\startluacode
+
+local context = context
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_red = context.red
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+local ctx_hyphenatedname = context.hyphenatedfilename
+
+trackers.enable("graphics.lognames")
+
+context.start()
+
+ context.switchtobodyfont { "tt,small" }
+
+ local template = { "|Bl|p|" }
+
+ for _, data in table.sortedhash(figures.found) do
+ ctx_starttabulate(template)
+ local badname = data.badname
+ local found = data.found
+ ctx_NC()
+ context("asked name")
+ ctx_NC()
+ if badname then
+ ctx_red()
+ end
+ context(data.askedname)
+ ctx_NC() ctx_NR()
+ if found then
+ ctx_NC()
+ context("format")
+ ctx_NC()
+ context(data.format)
+ ctx_NC() ctx_NR()
+ ctx_NC()
+ context("found file")
+ ctx_NC()
+ ctx_hyphenatedname(data.foundname)
+ -- context(data.foundname)
+ ctx_NC() ctx_NR()
+ ctx_NC()
+ context("used file")
+ ctx_NC()
+ ctx_hyphenatedname(data.fullname)
+ -- context(data.fullname)
+ ctx_NC() ctx_NR()
+ if badname then
+ ctx_NC()
+ context("comment")
+ ctx_NC()
+ context("bad name")
+ ctx_NC() ctx_NR()
+ end
+ else
+ ctx_NC()
+ context("comment")
+ ctx_NC()
+ context(data.comment or "not found")
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+ end
+
+context.stop()
+
+\stopluacode
+
+\stopmodule
diff --git a/tex/context/base/s-fnt-10.mkiv b/tex/context/base/s-fnt-10.mkiv
index 9b6211c2b..2fe82e079 100644
--- a/tex/context/base/s-fnt-10.mkiv
+++ b/tex/context/base/s-fnt-10.mkiv
@@ -100,7 +100,7 @@ function fonts.tracers.show_glyphs()
end
\stopluacode
-\def\ShowCompleteFont#1#2#3%
+\unexpanded\def\ShowCompleteFont#1#2#3%
{\bgroup
\page
\font\TestFont=#1 at #2
@@ -118,7 +118,7 @@ end
\page
\egroup}
-\def\ShowAllGlyphs#1#2#3%
+\unexpanded\def\ShowAllGlyphs#1#2#3%
{\bgroup
\page
\def\showglyph##1{\dontleavehmode\strut\char##1\relax\par}
diff --git a/tex/context/base/s-fnt-21.mkiv b/tex/context/base/s-fnt-21.mkiv
index 588c98016..10d5a4623 100644
--- a/tex/context/base/s-fnt-21.mkiv
+++ b/tex/context/base/s-fnt-21.mkiv
@@ -47,7 +47,7 @@
\endinput
-% \usemodule[fnt-20]
+% \usemodule[fnt-21]
%
% \starttext
%
diff --git a/tex/context/base/s-fnt-24.mkiv b/tex/context/base/s-fnt-24.mkiv
index 2c0022e5c..2e6b9a591 100644
--- a/tex/context/base/s-fnt-24.mkiv
+++ b/tex/context/base/s-fnt-24.mkiv
@@ -56,7 +56,7 @@ function fonts.analyzers.cjktest(first,second)
end
\stopluacode
-\def\ShowCombinationsKorean
+\unexpanded\def\ShowCombinationsKorean
{\dodoubleempty\doShowCombinationsKorean}
\def\doShowCombinationsKorean[#1][#2]%
@@ -68,7 +68,7 @@ end
\disabletrackers[cjk.analyzing]
\stoppacked}
-\def\ShowCombinationsChinese
+\unexpanded\def\ShowCombinationsChinese
{\dodoubleempty\doShowCombinationsChinese}
\def\doShowCombinationsChinese[#1][#2]%
diff --git a/tex/context/base/s-fonts-coverage.lua b/tex/context/base/s-fonts-coverage.lua
index db47e57c4..dd772d5f0 100644
--- a/tex/context/base/s-fonts-coverage.lua
+++ b/tex/context/base/s-fonts-coverage.lua
@@ -29,6 +29,7 @@ function moduledata.fonts.coverage.showcomparison(specification)
local present = { }
local names = { }
local files = { }
+ local chars = { }
if not pattern then
-- skip
@@ -56,6 +57,7 @@ function moduledata.fonts.coverage.showcomparison(specification)
end
names[#names+1] = fontname
files[#files+1] = fontfile
+ chars[#names] = fontdata.characters
end
end
@@ -91,7 +93,11 @@ function moduledata.fonts.coverage.showcomparison(specification)
NR()
HL()
for k, v in table.sortedpairs(present) do
- if k > 0 then
+ if k <= 0 then
+ -- ignore
+ elseif k >= 0x110000 then
+ logs.report("warning","weird character %U",k)
+ else
local description = chardata[k].description
if not pattern or (pattern == k) or (description and lpegmatch(pattern,description)) then
NC()
@@ -99,7 +105,11 @@ function moduledata.fonts.coverage.showcomparison(specification)
NC()
for i=1,#names do
getvalue(names[i])
- char(k)
+ if chars[i][k] then
+ char(k)
+ else
+ -- missing
+ end
NC()
end
context(description)
diff --git a/tex/context/base/s-fonts-ligatures.mkiv b/tex/context/base/s-fonts-ligatures.mkiv
new file mode 100644
index 000000000..e6ff2461e
--- /dev/null
+++ b/tex/context/base/s-fonts-ligatures.mkiv
@@ -0,0 +1,292 @@
+%D \module
+%D [ file=s-fonts-ligatures,
+%D version=2014.12.14,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Fonts Ligatures,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin info
+%
+% title : show some ligature building in fonts
+%
+% comment : we trace some ligatures that have rather different implementations in fontss
+% status : experimental, used for luatex testing
+%
+% end info
+
+\definefontfeature
+ [otfligaturetest]
+ [analyze=off,
+ ccmp=yes, % brill uses that .. not really ligatures !
+ %clig=yes,
+ script=latn,
+ language=dflt]
+
+\hyphenation{xf-fi-a}
+\hyphenation{xff-i-b}
+\hyphenation{xffi-c}
+\hyphenation{xffid}
+
+\registerhyphenationexception[xf-fi-a]
+\registerhyphenationexception[xff-i-b]
+\registerhyphenationexception[xffi-c]
+\registerhyphenationexception[xffid]
+
+\starttexdefinition showotfligaturescaption #1
+ \bTD [align={flushleft,lohi},nx=3]
+ \nohyphens
+ \ttbf
+ #1
+ \eTD
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslegend #1
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx original
+ \eTD
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx expanded
+ \eTD
+ \bTD [width=6em,align={flushleft,lohi}]
+ \nohyphens \ttxx traditional
+ \eTD
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesline #1#2
+ \bTD[toffset=.5ex,frame=off]
+ \starthyphenation[#1]
+ \LigatureFont
+ \showfontkerns
+ \showdiscretionaries
+ \begstrut#2\endstrut
+ \par
+ \stophyphenation
+ \eTD
+\stoptexdefinition
+
+\def\showotfligaturescells{3}
+%def\showotfligaturesnx {12}
+%def\showotfligatureswidth{18em}
+\def\showotfligaturesnx {\the\numexpr 4*\showotfligaturescells}
+\def\showotfligatureswidth{\the\dimexpr6em*\showotfligaturescells}
+
+\starttexdefinition showotfligaturesbanner #1
+ \bTR[frame=off]
+ \bTD [nx=\showotfligaturesnx,width=\showotfligatureswidth,align={middle,lohi},height=4ex]
+ \tttf #1
+ \eTD
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturescaptions #1
+ \bTR[height=3ex,align={middle,lohi},bottomframe=off]
+ \processcommalist[#1]\showotfligaturescaption
+ \eTR
+ \bTR[height=3ex,align={middle,lohi},topframe=off]
+ \processcommalist[#1]\showotfligatureslegend
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslineset #1
+ \showotfligaturesline{original} {#1}
+ \showotfligaturesline{expanded} {#1}
+ \showotfligaturesline{traditional}{#1}
+\stoptexdefinition
+
+
+\starttexdefinition showotfligaturesparagraphset #1
+ \showotfligatureslineset {
+ \hsize \zeropoint
+ \lefthyphenmin \plustwo
+ \righthyphenmin\plustwo
+ #1
+ }
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesextremeset #1
+ \showotfligatureslineset {
+ \hsize \zeropoint
+ \lefthyphenmin \plusone
+ \righthyphenmin\plusone
+ #1
+ }
+\stoptexdefinition
+
+\starttexdefinition showotfligatureslines #1
+ \bTR[height=4ex,bottomframe=off]
+ \processcommalist[#1]\showotfligatureslineset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesparagraphs #1
+ \bTR[topframe=off]
+ \processcommalist[#1]\showotfligaturesparagraphset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesextremes #1
+ \bTR[topframe=off]
+ \processcommalist[#1]\showotfligaturesextremeset
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesdefaults
+ \bTR
+ \bTD [nx=\showotfligaturesnx,width=\showotfligatureswidth,align=middle,height=4ex,frame=off]
+ \start \LigatureFont fb \stop \quad
+ \start \LigatureFont ff \stop \quad
+ \start \LigatureFont fi \stop \quad
+ \start \LigatureFont fk \stop \quad
+ \start \LigatureFont fl \stop \quad
+ \start \LigatureFont ft \stop \quad
+ \start \LigatureFont ffb \stop \quad
+ \start \LigatureFont fff \stop \quad
+ \start \LigatureFont ffi \stop \quad
+ \start \LigatureFont ffl \stop \quad
+ \start \LigatureFont ffk \stop \quad
+ \start \LigatureFont fft \stop
+ \eTD
+ \eTR
+\stoptexdefinition
+
+\starttexdefinition showotfligaturesexample #1#2
+ \showotfligaturescaptions {#1}
+ \showotfligatureslines {#2}
+ \showotfligaturesparagraphs{#2}
+ \showotfligaturesextremes {#2}
+\stoptexdefinition
+
+% todo: n -> and split in lua
+
+\starttexdefinition showotfligaturesexamples
+ \showotfligaturesexample
+ {leafing,baffling,efficient,shifffahrt}
+ {leafing,baffling,efficient,shifffahrt}
+ \showotfligaturesexample
+ {offbeat,effect,ef-fective,ef\-fective}
+ {offbeat,effect,ef-fective,ef\-fective}
+ \showotfligaturesexample
+ {xf+fi+a,xff+i+b,xffi+c,xffid}
+ {xffia, xffib, xffic, xffid}
+\stoptexdefinition
+
+\starttexdefinition showotfligatures [#1]
+ \begingroup
+ \getdummyparameters[font=Regular,features=default,#1]
+ \definefont[LigatureFont][\dummyparameter{font}*\dummyparameter{features},otfligaturetest ht 2ex]
+ \bTABLE[leftframe=off,rightframe=off]
+ \showotfligaturesbanner{\dummyparameter{font} * \dummyparameter{features}}
+ \showotfligaturesdefaults
+ \showotfligaturesexamples
+ \eTABLE
+ \endgroup
+\stoptexdefinition
+
+\continueifinputfile{s-fonts-ligatures.mkiv}
+
+\starttext
+
+ \startTEXpage[offset=10pt] \showotfligatures[font=lmroman10-regular.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=dejavu-serif.ttf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=minionpro.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=minionpro.otf, features=smallcaps] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=brill.otf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=gentiumplus-r.ttf, features=default] \stopTEXpage
+ \startTEXpage[offset=10pt] \showotfligatures[font=cambria, features=default] \stopTEXpage
+
+\stoptext
+
+% \startluacode
+%
+% local f = fonts.hashes.identifiers[true]
+%
+% local sequences = f.resources.sequences
+% local descriptions = f.shared.rawdata.descriptions
+% local lookuptypes = f.resources.lookuptypes
+% local lookups = f.resources.lookups
+%
+% local ligatures = { "liga", "dlig", "rlig", "clig", "tlig", "ccmp" }
+% local found = { }
+%
+% for i=1,#sequences do
+% local sequence = sequences[i]
+% local features = sequence.features
+% for i=1,#ligatures do
+% local l = ligatures[i]
+% if features[l] then
+% local subtables = sequence.subtables
+% if subtables then
+% for i=1,#subtables do
+% local subtable = subtables[i]
+% local lookup = found[subtable]
+% if lookup then
+% lookup[l] = true
+% else
+% found[subtable] = { [l] = true }
+% end
+% end
+% end
+% end
+% end
+% end
+%
+% context.starttabulate { "|||T|T|T|" }
+%
+% local function flush(l,v,start,unicode,data,done)
+% local features = found[l]
+% if features then
+% local lookuptype = lookuptypes[l]
+% if lookuptype == "ligature" then
+% local t = { }
+% for i=1,#v do
+% t[i] = utf.char(v[i])
+% end
+% t = table.concat(t," ")
+% if not done[t] then
+% context.NC()
+% context(t)
+% context.NC()
+% context(utf.char(unicode))
+% context.NC()
+% context(" %t",table.sortedkeys(features))
+% context.NC()
+% local name = data.name
+% if name then
+% context(name)
+% end
+% context.NC()
+% context("%U",unicode)
+% context.NC()
+% context.NR()
+% done[t] = true
+% end
+% end
+% end
+% end
+%
+% for unicode, data in table.sortedhash(descriptions) do
+% local slookups = data.slookups
+% local mlookups = data.mlookups
+% local done = { }
+% if slookups then
+% for l, v in next, slookups do
+% flush(l,v,1,unicode,data,done)
+% end
+% end
+% if mlookups then
+% for i=1,#mlookups do
+% local v = mlookups[i]
+% flush(v[1],v,2,unicode,data,done)
+% end
+% end
+% end
+%
+% context.stoptabulate()
+%
+% \stopluacode
diff --git a/tex/context/base/s-fonts-shapes.lua b/tex/context/base/s-fonts-shapes.lua
index b387c11dd..bca860f3f 100644
--- a/tex/context/base/s-fonts-shapes.lua
+++ b/tex/context/base/s-fonts-shapes.lua
@@ -16,24 +16,32 @@ local NC, NR = context.NC, context.NR
local space, dontleavehmode, glyph, getvalue = context.space, context.dontleavehmode, context.glyph, context.getvalue
local formatters = string.formatters
-function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
- specification = interfaces.checkedspecification(specification)
- local id, cs = fonts.definers.internal(specification,"")
- local chrs = fontdata[id].characters
- function char(k)
- dontleavehmode()
- glyph(id,k)
- end
- local function special(v)
- local specials = v.specials
- if specials and #specials > 1 then
- context("%s:",specials[1])
+function char(id,k)
+ dontleavehmode()
+ glyph(id,k)
+end
+
+local function special(id,specials)
+ if specials and #specials > 1 then
+ context("%s:",specials[1])
+ if #specials > 5 then
+ space() char(id,specials[2])
+ space() char(id,specials[3])
+ space() context("...")
+ space() char(id,specials[#specials-1])
+ space() char(id,specials[#specials])
+ else
for i=2,#specials do
- space()
- char(specials[i])
+ space() char(id,specials[i])
end
end
end
+end
+
+function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
+ specification = interfaces.checkedspecification(specification)
+ local id, cs = fonts.definers.internal(specification,"")
+ local chrs = fontdata[id].characters
context.begingroup()
context.tt()
context.starttabulate { "|l|c|c|c|c|l|l|" }
@@ -50,11 +58,11 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
for k, v in next, characters.data do
if chrs[k] then
NC() context("0x%05X",k)
- NC() char(k) -- getvalue(cs) context.char(k)
- NC() char(v.shcode)
- NC() char(v.lccode or k)
- NC() char(v.uccode or k)
- NC() special(v)
+ NC() char(id,k) -- getvalue(cs) context.char(k)
+ NC() char(id,v.shcode)
+ NC() char(id,v.lccode or k)
+ NC() char(id,v.uccode or k)
+ NC() special(id,v.specials)
NC() context.tx(v.description)
NC() NR()
end
@@ -67,20 +75,6 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
specification = interfaces.checkedspecification(specification)
local id, cs = fonts.definers.internal(specification,"")
local chrs = fontdata[id].characters
- function char(k)
- dontleavehmode()
- glyph(id,k)
- end
- local function special(v)
- local specials = v.specials
- if specials and #specials > 1 then
- context("%s:",specials[1])
- for i=2,#specials do
- space()
- char(specials[i])
- end
- end
- end
context.begingroup()
context.tt()
context.starttabulate { "|l|c|c|c|c|l|l|" }
@@ -97,11 +91,11 @@ function moduledata.fonts.shapes.showlist(specification) -- todo: ranges
for k, v in next, characters.data do
if chrs[k] then
NC() context("0x%05X",k)
- NC() char(k)
- NC() char(v.shcode)
- NC() char(v.lccode or k)
- NC() char(v.uccode or k)
- NC() special(v)
+ NC() char(id,k)
+ NC() char(id,v.shcode)
+ NC() char(id,v.lccode or k)
+ NC() char(id,v.uccode or k)
+ NC() special(id,v.specials)
NC() context.tx(v.description)
NC() NR()
end
@@ -137,7 +131,8 @@ local function showglyphshape(specification)
local top_accent, bot_accent = (d.top_accent or 0)*factor, (d.bot_accent or 0)*factor
local anchors, math = d.anchors, d.math
context.startMPcode()
- context("pickup pencircle scaled .25bp ;")
+ context("numeric lw ; lw := .125bp ;")
+ context("pickup pencircle scaled lw ;")
context('picture p ; p := image(draw textext.drt("\\getuvalue{%s}\\gray\\char%s");); draw p ;',cs,charnum)
context('draw (%s,%s)--(%s,%s)--(%s,%s)--(%s,%s)--cycle withcolor green ;',llx,lly,urx,lly,urx,ury,llx,ury)
context('draw (%s,%s)--(%s,%s) withcolor green ;',llx,0,urx,0)
@@ -153,11 +148,11 @@ local function showglyphshape(specification)
l[#l+1] = formatters["((%s,%s) shifted (%s,%s))"](xsign*k*factor,ysign*h*factor,dx,dy)
end
end
- context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1])
+ context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled 1/16) withcolor .5white;", xsign*v[1].kern*factor,lly,dx,dy,l[1])
context("draw laddered (%s) withcolor .5white ;",table.concat(l,".."))
- context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled .25) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l])
+ context("draw ((%s,%s) shifted (%s,%s))--%s dashed (evenly scaled 1/16) withcolor .5white;", xsign*v[#v].kern*factor,ury,dx,dy,l[#l])
for k, v in ipairs(l) do
- context("draw %s withcolor blue withpen pencircle scaled 1bp;",v)
+ context("draw %s withcolor blue withpen pencircle scaled 2lw ;",v)
end
end
end
@@ -203,7 +198,7 @@ local function showglyphshape(specification)
end
local function show(x,y,txt)
local xx, yy = x*factor, y*factor
- context("draw (%s,%s) withcolor blue withpen pencircle scaled 1bp;",xx,yy)
+ context("draw (%s,%s) withcolor blue withpen pencircle scaled 2lw ;",xx,yy)
context('label.top("\\type{%s}",(%s,%s-2bp)) ;',txt,xx,yy)
context('label.bot("(%s,%s)",(%s,%s+2bp)) ;',x,y,xx,yy)
end
@@ -242,9 +237,9 @@ local function showglyphshape(specification)
end
end
if italic ~= 0 then
- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width,ury,width,ury)
- context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue;',width+italic,ury,width+italic,ury)
- context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue;',width,ury,width+italic,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue ;',width,ury,width,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-0.5bp) withcolor blue ;',width+italic,ury,width+italic,ury)
+ context('draw (%s,%s-1bp)--(%s,%s-1bp) withcolor blue ;',width,ury,width+italic,ury)
context('label.lft("\\type{%s}",(%s+2bp,%s-1bp));',"italic",width,ury)
context('label.rt("%s",(%s-2bp,%s-1bp));',d.italic,width+italic,ury)
end
@@ -258,7 +253,7 @@ local function showglyphshape(specification)
context('label.top("\\type{%s}",(%s,%s-1bp));',"bot_accent",top_accent,ury)
context('label.bot("%s",(%s,%s+1bp));',d.bot_accent,bot_accent,lly)
end
- context('draw origin withcolor red withpen pencircle scaled 1bp;')
+ context('draw origin withcolor red withpen pencircle scaled 2lw;')
context("setbounds currentpicture to boundingbox currentpicture enlarged 1bp ;")
context("currentpicture := currentpicture scaled 8 ;")
context.stopMPcode()
diff --git a/tex/context/base/s-fonts-tables.lua b/tex/context/base/s-fonts-tables.lua
index 5c91d5ee7..b3dac7d06 100644
--- a/tex/context/base/s-fonts-tables.lua
+++ b/tex/context/base/s-fonts-tables.lua
@@ -21,6 +21,12 @@ local digits = {
dflt = {
dflt = "1234567890 1/2",
},
+ arab = {
+ dflt = "",
+ },
+ latn = {
+ dflt = "1234567890 1/2",
+ }
}
local punctuation = {
@@ -65,6 +71,9 @@ local lowercase = {
cyrl= {
dflt = "абвгдежзиійклмнопрстуфхцчшщъыьѣэюяѳ"
},
+ arab = {
+ dflt = "ابجدهوزحطيكلمنسعفصقرشتثخذضظغ"
+ },
}
local samples = {
diff --git a/tex/context/base/s-inf-01.mkvi b/tex/context/base/s-inf-01.mkvi
index 8263413ac..2c0c1681e 100644
--- a/tex/context/base/s-inf-01.mkvi
+++ b/tex/context/base/s-inf-01.mkvi
@@ -126,6 +126,8 @@
return max, what, function(n) return (max == 0 and 0) or (n == 0 and 0) or n/max end
end
+ local f_norm = string.formatters["%0.3f"]
+
function document.context_state_1(what)
local max, what, norm = prepare(what)
context.starttabulate { "|Tl|T|T|T|T|T|" }
@@ -154,9 +156,15 @@
(used.mkvi[k] and "vi") or "~~",
k
)
+ -- context("%s\\enspace %s\\quad %s\\quad %s",
+ -- (used.mkii[k] and "ii") or "\\quad",
+ -- (used.mkiv[k] and "iv") or "\\quad",
+ -- (used.mkvi[k] and "vi") or "\\quad",
+ -- k
+ -- )
context.NC()
for i=1,#types do
- context.Bar(types[i],v[i],c[i],norm(v[i]))
+ context.Bar(types[i],v[i],c[i],f_norm(norm(v[i])))
context.NC()
end
context.NR()
@@ -170,7 +178,7 @@
local c = (what == size and comp[k]) or nope
context.StartUp(k)
for i=1,#types do
- context.Up(types[i],norm(v[i]))
+ context.Up(types[i],f_norm(norm(v[i])))
end
context.StopUp()
end
diff --git a/tex/context/base/s-inf-03.mkiv b/tex/context/base/s-inf-03.mkiv
index 822173d00..a253bed77 100644
--- a/tex/context/base/s-inf-03.mkiv
+++ b/tex/context/base/s-inf-03.mkiv
@@ -8,7 +8,7 @@
\setupbodyfont[dejavu]
-\doifmodeelse {tablet} {
+\doifelsemode {tablet} {
\setuppapersize
[S6,landscape]
@@ -16,7 +16,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 16pt]
+ [MonoBold at 15pt]
\setupbodyfont
[tt,8pt]
@@ -25,7 +25,7 @@
\definefont
[TitlePageFont]
- [MonoBold at 20pt]
+ [MonoBold at 18pt]
\setupbodyfont
[tt]
@@ -159,16 +159,16 @@ local skipglobal = table.tohash {
"_G", "_M", "_ENV", "",
"context", "modules", "global", "arg", "utf", 1,
"_ptbs_", "_pcol_", "_plib_", "_clib_", "_tlib_",
- "kpse",
+ "kpse", "commands",
}
local skipkeys = table.tohash {
"_pcol_", "_plib_", "_clib_", "_tlib_", "_bpnf_", "_ptbs_",
- "_cldf_", "_cldn_",
+ "_cldf_", "_cldn_", "_cldo_",
"_clmb_", "_clme_", "_clmm_", "_clmn_", "_clma_", "_clmh_",
"_G", "_M", "_ENV", "",
+ -- "global", "shortcuts",
"_VERSION", "_COPYRIGHT", "_DESCRIPTION", "_NAME", "_PACKAGE", "__unload",
-
}
local sameglobal = {
@@ -196,6 +196,8 @@ local variant = 1 -- all parents
local variant = 2 -- parent name too
local variant = 3 -- no parents
+local done = { }
+
local function childtables(key,tab,handler,depth)
depth = depth or 1
local keys = sortedkeys(tab) -- no sorted_pairs
@@ -214,6 +216,10 @@ local function childtables(key,tab,handler,depth)
t = "data"
handler(s,t,depth)
else
+if done[v] then
+ -- logs.report("inf-03","key %a in %a already done",k,v)
+else
+ done[v] = true
handler(s,t,depth)
if variant == 3 then
childtables(false,v,handler,depth+1)
@@ -223,6 +229,7 @@ local function childtables(key,tab,handler,depth)
childtables(s,v,handler,depth+1)
end
end
+end
else
handler(s,t,depth)
end
@@ -245,6 +252,7 @@ end
local function show(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mark,obsolete)
-- todo: table as argument
+-- print(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mark,obsolete)
local keys = sortedkeys(t) -- no sorted_pairs
if #keys > 0 then
local fulltitle = title
@@ -341,13 +349,20 @@ end
show("global","",sameglobal.global,false,_G,builtin,"darkgreen",globals,"darkblue",false,obsolete)
+-- inspect(table.sortedkeys(context))
+
for k, v in table.sortedpairs(_G) do
if not skipglobal[k] and not obsolete[k] and type(v) == "table" and not marked(v) then
+
+ -- local mt = getmetatable(v)
+ -- print("!!!!!!!!!!",k,v,mt,mt and mt.__index)
+
if basiclua[k] then show(k,"basic lua",sameglobal[k],basiclua[k],v,builtin[k],"darkred", false,false,true)
elseif extralua[k] then show(k,"extra lua",sameglobal[k],extralua[k],v,builtin[k],"darkred", false,false,true)
elseif basictex[k] then show(k,"basic tex",sameglobal[k],basictex[k],v,builtin[k],"darkred", false,false,true)
elseif extratex[k] then show(k,"extra tex",sameglobal[k],extratex[k],v,builtin[k],"darkred", false,false,true)
- else show(k,"context", sameglobal[k],false, v,builtin[k],"darkyellow",false,false,true)
+ else
+ show(k,"context", sameglobal[k],false, v,builtin[k],"darkyellow",false,false,true)
end
end
end
diff --git a/tex/context/base/s-languages-hyphenation.lua b/tex/context/base/s-languages-hyphenation.lua
index 660392f80..c5a4f91f1 100644
--- a/tex/context/base/s-languages-hyphenation.lua
+++ b/tex/context/base/s-languages-hyphenation.lua
@@ -13,8 +13,8 @@ local a_colormodel = attributes.private('colormodel')
local nodecodes = nodes.nodecodes
local nodepool = nodes.pool
-local disc = nodecodes.disc
-local glyph = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glyph_code = nodecodes.glyph
local emwidths = fonts.hashes.emwidths
local exheights = fonts.hashes.exheights
local newkern = nodepool.kern
@@ -23,8 +23,8 @@ local newglue = nodepool.glue
local insert_node_after = node.insert_after
local traverse_by_id = node.traverse_id
-local hyphenate = lang.hyphenate
-local find_tail = node.slide
+local hyphenate = languages.hyphenators.handler -- lang.hyphenate
+local find_tail = node.tail
local remove_node = nodes.remove
local tracers = nodes.tracers
@@ -36,11 +36,11 @@ local function identify(head,marked)
while current do
local id = current.id
local next = current.next
- if id == disc then
- if prev and next.id == glyph then -- catch other usage of disc
+ if id == disc_code then
+ if prev and next then -- and next.id == glyph_code then -- catch other usage of disc
marked[#marked+1] = prev
end
- elseif id == glyph then
+ elseif id == glyph_code then
prev = current
end
current = next
@@ -81,10 +81,10 @@ function moduledata.languages.hyphenation.showhyphens(head)
local m = { }
local l = langs[i]
marked[i] = m
- for n in traverse_by_id(glyph,head) do
+ for n in traverse_by_id(glyph_code,head) do
n.lang = l
end
- hyphenate(head,find_tail(head))
+ languages.hyphenators.methods.original(head)
identify(head,m)
strip(head,m)
end
diff --git a/tex/context/base/s-languages-hyphenation.mkiv b/tex/context/base/s-languages-hyphenation.mkiv
index 769c3d059..6662dbf2f 100644
--- a/tex/context/base/s-languages-hyphenation.mkiv
+++ b/tex/context/base/s-languages-hyphenation.mkiv
@@ -26,7 +26,7 @@
{\begingroup
\par
% \language\zerocount
- \setupalign[\v!nothyphenated]%
+ % \setupalign[\v!nothyphenated]%
\ctxlua{moduledata.languages.hyphenation.startcomparepatterns("#1")}}
\unexpanded\def\stopcomparepatterns
@@ -56,13 +56,15 @@
\starttext
+\def|#1|{-}
+
\startsubject{Normal text}
\input tufte
\stopsubject
\startsubject{Compare hyphenation points of \showcomparepatternslegend[en,de]}
\startcomparepatterns
- \input tufte \quad (\showcomparepatternslegend)
+ \input tufte \quad (\showcomparepatternslegend)
\stopcomparepatterns
\stopsubject
diff --git a/tex/context/base/s-languages-system.lua b/tex/context/base/s-languages-system.lua
index 5afc4d403..3b422db9f 100644
--- a/tex/context/base/s-languages-system.lua
+++ b/tex/context/base/s-languages-system.lua
@@ -10,26 +10,53 @@ moduledata.languages = moduledata.languages or { }
moduledata.languages.system = moduledata.languages.system or { }
local NC, NR, HL = context.NC, context.NR, context.HL
+local sortedhash = table.sortedhash
+local registered = languages.registered
+local context = context
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_bold = context.bold
+
+function moduledata.languages.system.loadinstalled()
+ context.start()
+ for k, v in table.sortedhash(registered) do
+ context.language{ k }
+ end
+ context.stop()
+end
function moduledata.languages.system.showinstalled()
- local numbers = languages.numbers
- local registered = languages.registered
- context.starttabulate { "|r|l|l|l|l|" }
- NC() context("id")
- NC() context("tag")
- NC() context("synonyms")
- NC() context("parent")
- NC() context("loaded")
- NC() NR() HL()
- for i=1,#numbers do
- local tag = numbers[i]
- local data = registered[tag]
- NC() context(data.number)
- NC() context(tag)
- NC() context("% t",table.sortedkeys(data.synonyms))
- NC() context(data.parent)
- NC() context("%+t",table.sortedkeys(data.used))
- NC() NR()
+ --
+ context.starttabulate { "|l|r|l|l|p(7em)|r|p|" }
+ context.FL()
+ ctx_NC() ctx_bold("tag")
+ ctx_NC() ctx_bold("n")
+ ctx_NC() ctx_bold("parent")
+ ctx_NC() ctx_bold("file")
+ ctx_NC() ctx_bold("synonyms")
+ ctx_NC() ctx_bold("patterns")
+ ctx_NC() ctx_bold("characters")
+ ctx_NC() ctx_NR()
+ context.FL()
+ for k, v in sortedhash(registered) do
+ local parent = v.parent
+ local resources = v.resources
+ local patterns = resources and resources.patterns
+ ctx_NC() context(k)
+ ctx_NC() context(v.number)
+ ctx_NC() context(v.parent)
+ ctx_NC() context(v.patterns)
+ ctx_NC() for k, v in sortedhash(v.synonyms) do context("%s\\par",k) end
+ if patterns then
+ ctx_NC() context(patterns.n)
+ ctx_NC() context("% t",utf.split(patterns.characters))
+ else
+ ctx_NC()
+ ctx_NC()
+ end
+ ctx_NC() ctx_NR()
end
+ context.LL()
context.stoptabulate()
+ --
end
diff --git a/tex/context/base/s-languages-system.mkiv b/tex/context/base/s-languages-system.mkiv
index 363720374..22991f264 100644
--- a/tex/context/base/s-languages-system.mkiv
+++ b/tex/context/base/s-languages-system.mkiv
@@ -16,6 +16,7 @@
\registerctxluafile{s-languages-system}{}
\installmodulecommandluasingle \showinstalledlanguages {moduledata.languages.system.showinstalled}
+\installmodulecommandluasingle \loadinstalledlanguages {moduledata.languages.system.loadinstalled}
\stopmodule
@@ -25,6 +26,7 @@
\starttext
+ \loadinstalledlanguages
\showinstalledlanguages
\stoptext
diff --git a/tex/context/base/s-languages-words.lua b/tex/context/base/s-languages-words.lua
new file mode 100644
index 000000000..ea7aee87b
--- /dev/null
+++ b/tex/context/base/s-languages-words.lua
@@ -0,0 +1,32 @@
+if not modules then modules = { } end modules ['s-languages-words'] = {
+ version = 1.001,
+ comment = "companion to s-languages-words.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.languages = moduledata.languages or { }
+moduledata.languages.words = moduledata.languages.words or { }
+
+function moduledata.languages.words.showwords(specification)
+ local filename = specification.filename or file.addsuffix(tex.jobname,"words")
+ if lfs.isfile(filename) then
+ local w = dofile(filename)
+ if w then
+ -- table.print(w)
+ for cname, category in table.sortedpairs(w.categories) do
+ for lname, language in table.sortedpairs(category.languages) do
+ context.bold(string.format("category: %s, language: %s, total: %s, unique: %s:",
+ cname, lname, language.total or 0, language.unique or 0)
+ )
+ for word, n in table.sortedpairs(language.list) do
+ context(" %s (%s)",word,n)
+ end
+ context.par()
+ end
+ end
+ end
+ end
+end
+
diff --git a/tex/context/base/s-languages-words.mkiv b/tex/context/base/s-languages-words.mkiv
new file mode 100644
index 000000000..4e350bf34
--- /dev/null
+++ b/tex/context/base/s-languages-words.mkiv
@@ -0,0 +1,22 @@
+%D \module
+%D [ file=s-languages-words,
+%D version=2010.10.21,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Language Environment 3,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is used in the test suite.
+
+\startmodule[languages-words]
+
+\registerctxluafile{s-languages-words}{}
+
+\installmodulecommandluasingle \showwords {moduledata.languages.words.showwords}
+
+\stopmodule
diff --git a/tex/context/base/s-map-10.mkiv b/tex/context/base/s-map-10.mkiv
index b1218f6e2..c7541babc 100644
--- a/tex/context/base/s-map-10.mkiv
+++ b/tex/context/base/s-map-10.mkiv
@@ -172,7 +172,7 @@
\def\hfontii{\ssbf}
\def\hfontiii{\rm\it}
-\doifmodeelse{nosubsub}{%
+\doifelsemode{nosubsub}{%
\setuphead [section][%
style=\hfontii,
before={\blank[line]},
@@ -201,7 +201,7 @@
before={\blank[halfline]}%
]}
-\doifmodeelse{nosubsub}{%
+\doifelsemode{nosubsub}{%
\setuphead [subject][%
style=\hfontii,
before={\blank[halfline]},
@@ -348,10 +348,10 @@
#1]%
\doifnothing{\MapsPeriod}{%
\ifnum \normalmonth<6 \gdef\MapsPeriod{VOORJAAR}\else \gdef\MapsPeriod{NAJAAR}\fi}
- \doifinstringelse{oorjaar}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
- \doifinstringelse{pring}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
- \doifinstringelse{ajaar}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
- \doifinstringelse{utumn}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
+ \doifelseinstring{oorjaar}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
+ \doifelseinstring{pring}{\MapsPeriod}{\gdef\MapsPeriod{VOORJAAR}}{}%
+ \doifelseinstring{ajaar}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
+ \doifelseinstring{utumn}{\MapsPeriod}{\gdef\MapsPeriod{NAJAAR}}{}%
\doifnothing{\MapsYear}{\gdef\MapsYear{\the\year}}%
\doifnothing{\MapsNumber}{%
\ifnum \normalmonth<6
@@ -441,9 +441,9 @@
%%%%%%%%%%%
-\doifmodeelse{onecolumn}{%
+\doifelsemode{onecolumn}{%
\setuplayout[width=340pt]
- \doifmodeelse{asym}{% one col, asymmetric
+ \doifelsemode{asym}{% one col, asymmetric
\setuplayout[backspace=187.3pt]%
\setuptyping [widetyping][oddmargin=-117pt]
\setuppagenumbering [alternative={singlesided,doublesided}]
diff --git a/tex/context/base/s-math-characters.lua b/tex/context/base/s-math-characters.lua
index b0a79fcb6..8ff3a8660 100644
--- a/tex/context/base/s-math-characters.lua
+++ b/tex/context/base/s-math-characters.lua
@@ -16,23 +16,26 @@ local lower = string.lower
local utfchar = utf.char
local round = math.round
-local fontdata = fonts.hashes.identifiers
-local chardata = characters.data
+local fontdata = fonts.hashes.identifiers
+local chardata = characters.data
+local blocks = characters.blocks
local no_description = "no description, private to font"
-local limited = true
-local fillinthegaps = true
-local upperlimit = 0x0007F
-local upperlimit = 0xF0000
+local limited = true
+local fillinthegaps = true
+local upperlimit = 0x0007F
+local upperlimit = 0xF0000
-local f_unicode = string.formatters["%U"]
-local f_slot = string.formatters["%s/%0X"]
+local f_unicode = string.formatters["%U"]
+local f_slot = string.formatters["%s/%0X"]
function moduledata.math.characters.showlist(specification)
- specification = interfaces.checkedspecification(specification)
- local id = specification.number -- or specification.id
- local list = specification.list
+ specification = interfaces.checkedspecification(specification)
+ local id = specification.number -- or specification.id
+ local list = specification.list
+ local showvirtual = specification.virtual == "all"
+ local check = specification.check == "yes"
if not id then
id = font.current()
end
@@ -70,137 +73,170 @@ function moduledata.math.characters.showlist(specification)
names[k] = (name and file.basename(name)) or id
end
end
- context.showmathcharactersstart()
- for _, unicode in next, sorted do
- if not limited or unicode < upperlimit then
- local code = gaps[unicode] or unicode
- local char = characters[code]
- local desc = descriptions[code]
- local info = chardata[code]
- if char then
- local next_sizes = char.next
- local v_variants = char.vert_variants
- local h_variants = char.horiz_variants
- local commands = char.commands
- local slookups = desc and desc.slookups
- local mlookups = desc and desc.mlookups
- local mathclass = info.mathclass
- local mathspec = info.mathspec
- local mathsymbol = info.mathsymbol
- local description = info.description or no_description
- context.showmathcharactersstartentry()
- context.showmathcharactersreference(f_unicode(unicode))
- context.showmathcharactersentryhexdectit(f_unicode(code),code,lower(description))
- context.showmathcharactersentrywdhtdpic(round(char.width or 0),round(char.height or 0),round(char.depth or 0),round(char.italic or 0))
- if virtual and commands then
- local t = { }
- for i=1,#commands do
- local ci = commands[i]
- if ci[1] == "slot" then
- local fnt, idx = ci[2], ci[3]
- t[#t+1] = f_slot(names[fnt] or fnt,idx)
+ if check then
+ for k, v in table.sortedhash(blocks) do
+ if v.math then
+ local first = v.first
+ local last = v.last
+ local f, l = 0, 0
+ if first and last then
+ for unicode=first,last do
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ if char and not (char.commands and not showvirtual) then
+ f = unicode
+ break
end
end
- if #t > 0 then
- context.showmathcharactersentryresource(concat(t,", "))
- end
- end
- if mathclass or mathspec then
- context.showmathcharactersstartentryclassspec()
- if mathclass then
- context.showmathcharactersentryclassname(mathclass,info.mathname or "no name")
- end
- if mathspec then
- for i=1,#mathspec do
- local mi = mathspec[i]
- context.showmathcharactersentryclassname(mi.class,mi.name or "no name")
+ for unicode=last,first,-1 do
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ if char and not (char.commands and not showvirtual) then
+ l = unicode
+ break
end
end
- context.showmathcharactersstopentryclassspec()
+ context.showmathcharacterssetrange(k,f,l)
end
- if mathsymbol then
- context.showmathcharactersentrysymbol(f_unicode(mathsymbol),mathsymbol)
- end
- if next_sizes then
- local n, done = 0, { }
- context.showmathcharactersstartnext()
- while next_sizes do
- n = n + 1
- if done[next_sizes] then
- context.showmathcharactersnextcycle(n)
- break
- else
- done[next_sizes] = true
- context.showmathcharactersnextentry(n,f_unicode(next_sizes),next_sizes)
- next_sizes = characters[next_sizes]
- v_variants = next_sizes.vert_variants or v_variants
- h_variants = next_sizes.horiz_variants or h_variants
- if next_sizes then
- next_sizes = next_sizes.next
+ end
+ end
+ else
+ context.showmathcharactersstart()
+ for _, unicode in next, sorted do
+ if not limited or unicode < upperlimit then
+ local code = gaps[unicode] or unicode
+ local char = characters[code]
+ local desc = descriptions[code]
+ local info = chardata[code]
+ if char then
+ local commands = char.commands
+ if commands and not showvirtual then
+ -- skip
+ else
+ local next_sizes = char.next
+ local v_variants = char.vert_variants
+ local h_variants = char.horiz_variants
+ local slookups = desc and desc.slookups
+ local mlookups = desc and desc.mlookups
+ local mathclass = info.mathclass
+ local mathspec = info.mathspec
+ local mathsymbol = info.mathsymbol
+ local description = info.description or no_description
+ context.showmathcharactersstartentry()
+ context.showmathcharactersreference(f_unicode(unicode))
+ context.showmathcharactersentryhexdectit(f_unicode(code),code,lower(description))
+ context.showmathcharactersentrywdhtdpic(round(char.width or 0),round(char.height or 0),round(char.depth or 0),round(char.italic or 0))
+ if virtual and commands then
+ local t = { }
+ for i=1,#commands do
+ local ci = commands[i]
+ if ci[1] == "slot" then
+ local fnt, idx = ci[2], ci[3]
+ t[#t+1] = f_slot(names[fnt] or fnt,idx)
+ end
+ end
+ if #t > 0 then
+ context.showmathcharactersentryresource(concat(t,", "))
end
end
- end
- context.showmathcharactersstopnext()
- if h_variants or v_variants then
- context.showmathcharactersbetweennextandvariants()
- end
- end
- if h_variants then
- context.showmathcharactersstarthvariants()
- for i=1,#h_variants do -- we might go top-down in the original
- local vi = h_variants[i]
- context.showmathcharactershvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
- end
- context.showmathcharactersstophvariants()
- elseif v_variants then
- context.showmathcharactersstartvvariants()
- for i=1,#v_variants do
- local vi = v_variants[#v_variants-i+1]
- context.showmathcharactersvvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
- end
- context.showmathcharactersstopvvariants()
- end
- if slookups or mlookups then
- local variants = { }
- if slookups then
- for lookupname, lookupdata in next, slookups do
- local lookuptype = lookuptypes[lookupname]
- if lookuptype == "substitution" then
- variants[lookupdata] = "sub"
- elseif lookuptype == "alternate" then
- for i=1,#lookupdata do
- variants[lookupdata[i]] = "alt"
+ if mathclass or mathspec then
+ context.showmathcharactersstartentryclassspec()
+ if mathclass then
+ context.showmathcharactersentryclassname(mathclass,info.mathname or "no name")
+ end
+ if mathspec then
+ for i=1,#mathspec do
+ local mi = mathspec[i]
+ context.showmathcharactersentryclassname(mi.class,mi.name or "no name")
end
end
+ context.showmathcharactersstopentryclassspec()
end
- end
- if mlookups then
- for lookupname, lookuplist in next, mlookups do
- local lookuptype = lookuptypes[lookupname]
- for i=1,#lookuplist do
- local lookupdata = lookuplist[i]
- local lookuptype = lookuptypes[lookupname]
- if lookuptype == "substitution" then
- variants[lookupdata] = "sub"
- elseif lookuptype == "alternate" then
- for i=1,#lookupdata do
- variants[lookupdata[i]] = "alt"
+ if mathsymbol then
+ context.showmathcharactersentrysymbol(f_unicode(mathsymbol),mathsymbol)
+ end
+ if next_sizes then
+ local n, done = 0, { }
+ context.showmathcharactersstartnext()
+ while next_sizes do
+ n = n + 1
+ if done[next_sizes] then
+ context.showmathcharactersnextcycle(n)
+ break
+ else
+ done[next_sizes] = true
+ context.showmathcharactersnextentry(n,f_unicode(next_sizes),next_sizes)
+ next_sizes = characters[next_sizes]
+ v_variants = next_sizes.vert_variants or v_variants
+ h_variants = next_sizes.horiz_variants or h_variants
+ if next_sizes then
+ next_sizes = next_sizes.next
end
end
end
+ context.showmathcharactersstopnext()
+ if h_variants or v_variants then
+ context.showmathcharactersbetweennextandvariants()
+ end
end
+ if h_variants then
+ context.showmathcharactersstarthvariants()
+ for i=1,#h_variants do -- we might go top-down in the original
+ local vi = h_variants[i]
+ context.showmathcharactershvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
+ end
+ context.showmathcharactersstophvariants()
+ elseif v_variants then
+ context.showmathcharactersstartvvariants()
+ for i=1,#v_variants do
+ local vi = v_variants[#v_variants-i+1]
+ context.showmathcharactersvvariantsentry(i,f_unicode(vi.glyph),vi.glyph)
+ end
+ context.showmathcharactersstopvvariants()
+ end
+ if slookups or mlookups then
+ local variants = { }
+ if slookups then
+ for lookupname, lookupdata in next, slookups do
+ local lookuptype = lookuptypes[lookupname]
+ if lookuptype == "substitution" then
+ variants[lookupdata] = "sub"
+ elseif lookuptype == "alternate" then
+ for i=1,#lookupdata do
+ variants[lookupdata[i]] = "alt"
+ end
+ end
+ end
+ end
+ if mlookups then
+ for lookupname, lookuplist in next, mlookups do
+ local lookuptype = lookuptypes[lookupname]
+ for i=1,#lookuplist do
+ local lookupdata = lookuplist[i]
+ local lookuptype = lookuptypes[lookupname]
+ if lookuptype == "substitution" then
+ variants[lookupdata] = "sub"
+ elseif lookuptype == "alternate" then
+ for i=1,#lookupdata do
+ variants[lookupdata[i]] = "alt"
+ end
+ end
+ end
+ end
+ end
+ context.showmathcharactersstartlookupvariants()
+ local i = 0
+ for variant, lookuptype in table.sortedpairs(variants) do
+ i = i + 1
+ context.showmathcharacterslookupvariant(i,f_unicode(variant),variant,lookuptype)
+ end
+ context.showmathcharactersstoplookupvariants()
+ end
+ context.showmathcharactersstopentry()
end
- context.showmathcharactersstartlookupvariants()
- local i = 0
- for variant, lookuptype in table.sortedpairs(variants) do
- i = i + 1
- context.showmathcharacterslookupvariant(i,f_unicode(variant),variant,lookuptype)
- end
- context.showmathcharactersstoplookupvariants()
end
- context.showmathcharactersstopentry()
end
end
+ context.showmathcharactersstop()
end
- context.showmathcharactersstop()
end
diff --git a/tex/context/base/s-math-characters.mkiv b/tex/context/base/s-math-characters.mkiv
index 1c4159544..3b273cb6c 100644
--- a/tex/context/base/s-math-characters.mkiv
+++ b/tex/context/base/s-math-characters.mkiv
@@ -46,6 +46,7 @@
\let\showmathcharactersstartlookupvariants \relax
\let\showmathcharacterslookupvariant \gobblefourarguments
\let\showmathcharactersstoplookupvariants \relax
+ \let\showmathcharacterssetrange \gobblethreearguments
\stopsetups
@@ -126,15 +127,23 @@
\def\module_math_characters_show[#1]%
{\begingroup
- \getdummyparameters[\c!bodyfont=,\c!list=,\c!alternative=default,#1]%
+ \getdummyparameters
+ [\c!bodyfont=,
+ \c!list=,
+ \c!check=,
+ \c!alternative=default,
+ \c!option=\v!all,
+ #1]%
\directsetup{s-math-characters:\dummyparameter\c!alternative}%
\doifelsenothing{\dummyparameter\c!bodyfont}
{\definedfont[MathRoman*math-text]}
{\definedfont[\dummyparameter\c!bodyfont]}%
\dontcomplain
\ctxlua{moduledata.math.characters.showlist {
- number = false,
- list = "\dummyparameter\c!list",
+ number = false,
+ check = "\dummyparameter\c!check",
+ list = "\dummyparameter\c!list",
+ option = "\dummyparameter\c!option",
}}%
\endgroup}
diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua
index a74e24450..3c6080dc3 100644
--- a/tex/context/base/s-math-coverage.lua
+++ b/tex/context/base/s-math-coverage.lua
@@ -6,135 +6,101 @@ if not modules then modules = { } end modules ['s-math-coverage'] = {
license = "see context related readme files"
}
-moduledata.math = moduledata.math or { }
-moduledata.math.coverage = moduledata.math.coverage or { }
-
local utfchar, utfbyte = utf.char, utf.byte
local formatters, lower = string.formatters, string.lower
local concat = table.concat
+local sortedhash = table.sortedhash
-local context = context
-local NC, NR, HL = context.NC, context.NR, context.HL
-local char, getglyph, bold = context.char, context.getglyph, context.bold
+moduledata.math = moduledata.math or { }
+moduledata.math.coverage = moduledata.math.coverage or { }
-local ucgreek = {
- 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
- 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
- 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
- 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
- 0x03A6, 0x03A7, 0x03A8, 0x03A9
-}
+local context = context
-local lcgreek = {
- 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
- 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
- 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
- 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
- 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
- 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
- 0x03F4, 0x03F5
-}
+local ctx_NC = context.NC
+local ctx_NR = context.NR
+local ctx_HL = context.HL
-local ucletters = {
- 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
- 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
- 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
- 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
- 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
- 0x0005A,
-}
+local ctx_startmixedcolumns = context.startmixedcolumns
+local ctx_stopmixedcolumns = context.stopmixedcolumns
+local ctx_setupalign = context.setupalign
+local ctx_starttabulate = context.starttabulate
+local ctx_stoptabulate = context.stoptabulate
+local ctx_rawmathematics = context.formatted.rawmathematics
+local ctx_mathematics = context.formatted.mathematics
+local ctx_startimath = context.startimath
+local ctx_stopimath = context.stopimath
+local ctx_setmathattribute = context.setmathattribute
+local ctx_underbar = context.underbar
+local ctx_getglyph = context.getglyph
-local lcletters = {
- 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
- 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
- 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
- 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
- 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
- 0x0007A,
-}
+local styles = mathematics.styles
+local alternatives = mathematics.alternatives
+local charactersets = mathematics.charactersets
-local digits = {
- 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
- 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
-}
+local getboth = mathematics.getboth
+local remapalphabets = mathematics.remapalphabets
-local styles = {
- "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard"
-}
-
-local alternatives = {
- "normal", "bold", "italic", "bolditalic"
-}
-
-local alphabets = {
- ucletters, lcletters, ucgreek, lcgreek, digits,
-}
-
-local getboth = mathematics.getboth
-local remapalphabets = mathematics.remapalphabets
-
-local chardata = characters.data
-local superscripts = characters.superscripts
-local subscripts = characters.subscripts
+local chardata = characters.data
+local superscripts = characters.superscripts
+local subscripts = characters.subscripts
context.writestatus("math coverage","underline: not remapped")
function moduledata.math.coverage.showalphabets()
- context.starttabulate { "|lT|l|Tl|" }
+ ctx_starttabulate { "|lT|l|Tl|" }
for i=1,#styles do
local style = styles[i]
for i=1,#alternatives do
local alternative = alternatives[i]
- for i=1,#alphabets do
- local alphabet = alphabets[i]
- NC()
+ for _, alphabet in sortedhash(charactersets) do
+ ctx_NC()
if i == 1 then
context("%s %s",style,alternative)
end
- NC()
- context.startimath()
- context.setmathattribute(style,alternative)
+ ctx_NC()
+ ctx_startimath()
+ ctx_setmathattribute(style,alternative)
for i=1,#alphabet do
local letter = alphabet[i]
local id = getboth(style,alternative)
local unicode = remapalphabets(letter,id)
if not unicode then
- context.underbar(utfchar(letter))
+ ctx_underbar(utfchar(letter))
elseif unicode == letter then
context(utfchar(unicode))
else
context(utfchar(unicode))
end
end
- context.stopimath()
- NC()
+ ctx_stopimath()
+ ctx_NC()
local first = alphabet[1]
local last = alphabet[#alphabet]
local id = getboth(style,alternative)
local f_unicode = remapalphabets(first,id) or utfbyte(first)
local l_unicode = remapalphabets(last,id) or utfbyte(last)
context("%05X - %05X",f_unicode,l_unicode)
- NC()
- NR()
+ ctx_NC()
+ ctx_NR()
end
end
end
- context.stoptabulate()
+ ctx_stoptabulate()
end
function moduledata.math.coverage.showcharacters()
- context.startcolumns()
- context.setupalign { "nothyphenated" }
- context.starttabulate { "|T|i2|Tpl|" }
- for u, d in table.sortedpairs(chardata) do
+ ctx_startmixedcolumns { balance = "yes" }
+ ctx_setupalign { "nothyphenated" }
+ ctx_starttabulate { "|T|i2|Tpl|" }
+ for u, d in sortedhash(chardata) do
local mathclass = d.mathclass
local mathspec = d.mathspec
if mathclass or mathspec then
- NC()
+ ctx_NC()
context("%05X",u)
- NC()
- getglyph("MathRoman",u)
- NC()
+ ctx_NC()
+ ctx_getglyph("MathRoman",u)
+ ctx_NC()
if mathspec then
local t = { }
for i=1,#mathspec do
@@ -145,38 +111,87 @@ function moduledata.math.coverage.showcharacters()
else
context(mathclass)
end
- NC()
- NR()
+ ctx_NC()
+ ctx_NR()
end
end
- context.stoptabulate()
- context.stopcolumns()
+ ctx_stoptabulate()
+ ctx_stopmixedcolumns()
end
-- This is a somewhat tricky table as we need to bypass the math machinery.
function moduledata.math.coverage.showscripts()
- context.starttabulate { "|cT|c|cT|c|c|c|l|" }
- for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do
+ ctx_starttabulate { "|cT|c|cT|c|c|c|l|" }
+ for k, v in sortedhash(table.merged(superscripts,subscripts)) do
local ck = utfchar(k)
local cv = utfchar(v)
local ss = superscripts[k] and "^" or "_"
- NC()
- context("%05X",k)
- NC()
- context(ck)
- NC()
- context("%05X",v)
- NC()
- context(cv)
- NC()
- context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context.formatted.mathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context(lower(chardata[k].description))
- NC()
- NR()
+ ctx_NC() context("%05X",k)
+ ctx_NC() context(ck)
+ ctx_NC() context("%05X",v)
+ ctx_NC() context(cv)
+ ctx_NC() ctx_rawmathematics("x%s = x%s%s",ck,ss,cv)
+ ctx_NC() ctx_mathematics("x%s = x%s%s",ck,ss,cv)
+ ctx_NC() context(lower(chardata[k].description))
+ ctx_NC() ctx_NR()
+ end
+ ctx_stoptabulate()
+end
+
+-- Handy too.
+
+function moduledata.math.coverage.showbold()
+ ctx_starttabulate { "|lT|cm|lT|cm|lT|" }
+ for k, v in sortedhash(mathematics.boldmap) do
+ ctx_NC() context("%U",k)
+ ctx_NC() context("%c",k)
+ ctx_NC() context("%U",v)
+ ctx_NC() context("%c",v)
+ ctx_NC() context(chardata[k].description)
+ ctx_NC() ctx_NR()
end
- context.stoptabulate()
+ ctx_stoptabulate()
end
+
+-- function moduledata.math.coverage.showentities()
+-- ctx_startmixedcolumns { balance = "yes" }
+-- ctx_starttabulate { "|Tl|c|Tl|" }
+-- for k, v in sortedhash(characters.entities) do
+-- local b = utf.byte(v)
+-- local d = chardata[b]
+-- local m = d.mathname
+-- local c = d.contextname
+-- local s = ((m and "\\"..m) or (c and "\\".. c) or v) .. "{}{}{}"
+-- ctx_NC()
+-- context("%U",b)
+-- ctx_NC()
+-- ctx_mathematics(s)
+-- ctx_NC()
+-- context(k)
+-- ctx_NC()
+-- ctx_NR()
+-- end
+-- ctx_stoptabulate()
+-- ctx_stopmixedcolumns()
+-- end
+
+function moduledata.math.coverage.showentities()
+ ctx_startmixedcolumns { balance = "yes" }
+ ctx_starttabulate { "|T||T|T|" }
+ for k, v in sortedhash(characters.entities) do
+ local d = chardata[v]
+ if d then
+ local m = d.mathclass or d.mathspec
+ local u = d.unicodeslot
+ ctx_NC() context(m and "m" or "t")
+ ctx_NC() ctx_getglyph("MathRoman",u)
+ ctx_NC() context("%05X",u)
+ ctx_NC() context(k)
+ ctx_NC() ctx_NR()
+ end
+ end
+ ctx_stoptabulate()
+ ctx_stopmixedcolumns()
+end
+
diff --git a/tex/context/base/s-math-coverage.mkiv b/tex/context/base/s-math-coverage.mkiv
index d68ffe587..e318c9eff 100644
--- a/tex/context/base/s-math-coverage.mkiv
+++ b/tex/context/base/s-math-coverage.mkiv
@@ -15,9 +15,11 @@
\registerctxluafile{s-math-coverage}{}
-\installmodulecommandluasingle \showmathalphabets {moduledata.math.coverage.showalphabets}
-\installmodulecommandluasingle \showmathcharacters {moduledata.math.coverage.showcharacters}
-\installmodulecommandluasingle \showmathscripts {moduledata.math.coverage.showscripts}
+\installmodulecommandluasingle \showmathalphabets {moduledata.math.coverage.showalphabets}
+\installmodulecommandluasingle \showmathcharacters{moduledata.math.coverage.showcharacters}
+\installmodulecommandluasingle \showmathscripts {moduledata.math.coverage.showscripts}
+\installmodulecommandluasingle \showmathbold {moduledata.math.coverage.showbold}
+\installmodulecommandluasingle \showmathentities {moduledata.math.coverage.showentities}
\stopmodule
@@ -30,5 +32,7 @@
\showmathalphabets \page
\showmathcharacters \page
\showmathscripts \page
+ \showmathbold \page
+ \showmathentities \page
\stoptext
diff --git a/tex/context/base/s-math-repertoire.mkiv b/tex/context/base/s-math-repertoire.mkiv
index a66d7fc6d..230eb513e 100644
--- a/tex/context/base/s-math-repertoire.mkiv
+++ b/tex/context/base/s-math-repertoire.mkiv
@@ -39,7 +39,8 @@
% \setuplayout
% [page]
-\setuppapersize[HD+]
+\setuppapersize
+ [HD+]
\setuplayout
[backspace=0pt,
@@ -102,6 +103,16 @@
color=textcolor,
contrastcolor=nonecolor]
+\def\showmathcharacterssetrange#1#2#3%
+ {\writestatus{range}{#1: \unihex{#2} - \unihex{#3}}%
+ \ifcase#2\relax
+ \definereference[#1][notpresent]%
+ \else\ifcase#3\relax
+ \definereference[#1][notpresent]%
+ \else
+ \normalexpanded{\definereference[#1][\unihex{#2}]}%
+ \fi\fi}
+
\startinteractionmenu[bottom]
\startgot [firstpage] first \stopgot \quad
\startgot [deltapage(-100)] -100 \stopgot \quad
@@ -159,6 +170,22 @@
\startgot [U+1D7AA] grk ss bolditalic \stopgot
\stopinteractionmenu
+% \startinteractionmenu[symbols]
+% \startgot [U+00030] dig normal \stopgot \quad
+% \startgot [U+1D7CE] dig bold \stopgot \quad
+% \startgot [U+1D7D8] dig doublestruck \stopgot \quad
+% \startgot [U+1D7E2] dig ss normal \stopgot \quad
+% \startgot [U+1D7EC] dig ss bold \stopgot \quad
+% \startgot [U+1D7F6] dig monospace \stopgot \quad
+% \startgot [U+02200] operators \stopgot \quad
+% \startgot [U+02701] symbols a \stopgot \quad
+% \startgot [U+02901] symbols b \stopgot \quad
+% \startgot [U+02A00] supplemental \stopgot \quad
+% \startgot [U+027F0] arrows a \stopgot \quad
+% \startgot [U+02900] arrows b \stopgot \quad
+% \startgot [U+1F800] arrows c \stopgot
+% \stopinteractionmenu
+
\startinteractionmenu[symbols]
\startgot [U+00030] dig normal \stopgot \quad
\startgot [U+1D7CE] dig bold \stopgot \quad
@@ -167,9 +194,13 @@
\startgot [U+1D7EC] dig ss bold \stopgot \quad
\startgot [U+1D7F6] dig monospace \stopgot \quad
\startgot [U+02200] operators \stopgot \quad
- \startgot [U+02701] symbols a \stopgot \quad
- \startgot [U+02901] symbols b \stopgot \quad
- \startgot [U+02A00] supplemental \stopgot
+ \startgot [miscellaneousmathematicalsymbolsa] symbols a \stopgot \quad
+ \startgot [miscellaneousmathematicalsymbolsb] symbols b \stopgot \quad
+ \startgot [supplementalmathematicaloperators] supplemental \stopgot \quad
+ \startgot [supplementalarrowsa] arrows a \stopgot \quad
+ \startgot [supplementalarrowsb] arrows b \stopgot \quad
+ \startgot [supplementalarrowsc] arrows c \stopgot \quad
+ \removeunwantedspaces
\stopinteractionmenu
\defineframed
@@ -227,7 +258,7 @@
\showmathcharactersmth{10}{#1}%
\endgroup
\vfilll
- \doifmodeelse{crosslink}
+ \doifelsemode{crosslink}
{\goto{\strut\textcolor\showmathcharacterstxt{#2}}[#2::#1]}%
{\strut\textcolor\showmathcharacterstxt{#2}}}%
\hskip1ex}
@@ -404,13 +435,17 @@
% main
+% this is a one-run style so we can forget about an alternative
+% just assume that the previous definitions are global
+
\unprotect
\unexpanded\def\showmathfontrepertoire
{\dosingleempty\module_math_repertoire_show}
-\def\module_math_repertoire_show[#1]% % this is a one-run style so we can forget about an alternative
- {\showmathfontcharacters[\c!alternative=,#1]} % just assume that the previous definitions are global
+\def\module_math_repertoire_show[#1]%
+ {\showmathfontcharacters[alternative=,option=,check=yes,#1]
+ \showmathfontcharacters[alternative=,option=,#1]}
\protect
@@ -418,13 +453,13 @@
\continueifinputfile{s-math-repertoire.mkiv}
-\showmathcharacterssetbodyfonts{lucidanova,cambria,xits,modern,pagella,termes,bonum}
+\showmathcharacterssetbodyfonts{lucidaot,cambria,xits,modern,pagella,termes,bonum,schola,dejavu}
\starttext
\doifelse {\getdocumentargument{bodyfont}} {} {
- \setupbodyfont[cambria, 12pt]
+ % \setupbodyfont[cambria, 12pt]
% \setupbodyfont[modern, 12pt]
% \setupbodyfont[lmvirtual, 12pt]
% \setupbodyfont[pxvirtual, 12pt]
@@ -434,9 +469,11 @@
% \setupbodyfont[stix, 12pt]
% \setupbodyfont[xits, 12pt]
% \setupbodyfont[lucida, 12pt]
- % \setupbodyfont[lucidanova,12pt]
+ % \setupbodyfont[lucidaot, 12pt]
% \setupbodyfont[pagella, 12pt]
% \setupbodyfont[bonum, 12pt]
+ % \setupbodyfont[schola, 12pt]
+ \setupbodyfont[dejavu, 12pt]
} {
diff --git a/tex/context/base/s-pre-17.mkiv b/tex/context/base/s-pre-17.mkiv
index 9505faa6b..9c46b4ed7 100644
--- a/tex/context/base/s-pre-17.mkiv
+++ b/tex/context/base/s-pre-17.mkiv
@@ -194,7 +194,7 @@
\egroup
\setbox\scratchbox\vbox\bgroup
\vskip100pt
- \doifmodeelse {SpreadPage} {
+ \doifelsemode {SpreadPage} {
\hbox spread 200pt
} {
\hbox to \wd\scratchbox
diff --git a/tex/context/base/s-references-show.mkiv b/tex/context/base/s-references-show.mkiv
new file mode 100644
index 000000000..72cccce54
--- /dev/null
+++ b/tex/context/base/s-references-show.mkiv
@@ -0,0 +1,132 @@
+%D \module
+%D [ file=s-references-show.mkiv,
+%D version=2015.04.13,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Reference Checking,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[references-show]
+
+\unprotect
+
+% \enabletrackers[nodes.references,nodes.destinations] % also shows areas
+
+\enabletrackers[nodes.references.show,nodes.destinations.show]
+
+\enablehiddenbackground % trick
+
+\edef\hiddenbackgroundlist{\hiddenbackgroundlist,trace-references}
+
+\defineoverlay
+ [trace-references]
+ [\directsetup{trace:references:onpage}]
+
+\startluacode
+ local pagelist = structures.references.tracedpages
+
+ function commands.getreferencesonpage(n)
+ n = tonumber(n)
+ if n then
+ local pagedata = pagelist[n]
+ if pagedata then
+ context("%s references",#pagedata)
+ context.blank()
+ for i=1,#pagedata do
+ local details = pagedata[i]
+ local prefix = details[1]
+ local reference = details[2]
+ -- local internal = details[3]
+ -- context("%04i = %s : %s",internal,prefix == "" and "-" or prefix,reference)
+ context("%s : %s",prefix == "" and "-" or prefix,reference)
+ context.par()
+ end
+ else
+ context("no references")
+ end
+ else
+ context("no valid page")
+ end
+ end
+
+\stopluacode
+
+\definecolor
+ [trace:references:onpage]
+ [b=.5,a=1,t=.25]
+
+\defineframed
+ [trace:references:onpage]
+ [\c!offset=2\exheight,
+ %\c!foregroundstyle=\infofont,
+ \c!frame=\v!off,
+ \c!background=\v!color,
+ \c!backgroundcolor=trace:references:onpage,
+ \c!align=\v!normal]
+
+\startsetups trace:references:onpage
+ \vbox to \vsize \bgroup
+ \infofont
+ \vskip\dimexpr-\topspace-\headerheight+2\exheight\relax
+ \hbox to \hsize \bgroup
+ \doifoddpageelse\hss{\hskip\dimexpr-\cutspace+2\exheight\relax}%
+ \directlocalframed [
+ trace:references:onpage
+ ] {
+ \ctxcommand{getreferencesonpage(\the\realpageno)}
+ }
+ \doifoddpageelse{\hskip\dimexpr-\cutspace+2\exheight\relax}\hss
+ \egroup
+ \vss
+ \egroup
+\stopsetups
+
+\protect
+
+\continueifinputfile{s-references-show.mkiv}
+
+\usemodule[art-01]
+
+\setupinteraction
+ [state=start]
+
+\setuppagenumbering
+ [alternative=doublesided]
+
+\starttext
+
+\title {Contents}
+
+\placelist[chapter]
+
+\setupreferenceprefix[zero]
+
+\chapter[crap]{foo}
+
+\setupreferenceprefix[one]
+
+test \pagereference[whatever]
+
+\dorecurse{5}{
+ \placefigure
+ [here][bar 1.#1]
+ {xx}{\framed{xx #1}}
+}
+
+\setupreferenceprefix[two]
+
+\dorecurse{5}{
+ \placefigure
+ [here][bar 2.#1]
+ {xx}{\framed{xx #1}}
+}
+
+\in{checked}[bar 1.1]
+\in{checked}[bar 2.1]
+
+\stoptext
diff --git a/tex/context/base/s-structure-sections.mkiv b/tex/context/base/s-structure-sections.mkiv
new file mode 100644
index 000000000..daaab5abc
--- /dev/null
+++ b/tex/context/base/s-structure-sections.mkiv
@@ -0,0 +1,80 @@
+%D \module
+%D [ file=s-structure-sections,
+%D version=2015.02.02,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Structure Sections,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\startluacode
+ local context = context
+ local ctx_NC, ctx_NR = context.NC, context.NR
+ local ctx_bold = context.bold
+
+ structures.tracers = structures.tracers or { }
+
+ function structures.tracers.showsections()
+
+ local list = structures.sections.registered
+ local keys = table.keys(list)
+ table.sort(keys,function(a,b)
+ local la, lb = list[a].level, list[b].level
+ if la == lb then
+ return a < b
+ else
+ return la < lb
+ end
+ end)
+ context.start()
+ context.switchtobodyfont { "tt" }
+ context.starttabulate { "||c||||" }
+ context.FL()
+ ctx_NC() ctx_bold("name")
+ ctx_NC() ctx_bold("level")
+ ctx_NC() ctx_bold("parent")
+ ctx_NC() ctx_bold("section")
+ ctx_NC() ctx_bold("coupling")
+ ctx_NC() context.NR()
+ context.ML()
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ ctx_NC() ctx_bold(k)
+ ctx_NC() context(v.level)
+ ctx_NC() context(v.parent)
+ ctx_NC() context(v.section)
+ ctx_NC() context(v.coupling)
+ ctx_NC() context.NR()
+ end
+ context.LL()
+ context.stoptabulate()
+ context.stop()
+
+ end
+\stopluacode
+
+
+\starttexdefinition showstructuresections
+
+ % no settings yet
+
+ \ctxlua{structures.tracers.showsections()}
+
+\stoptexdefinition
+
+\protect
+
+\continueifinputfile{s-structure-sections.mkiv}
+
+\starttext
+
+ \showstructuresections
+
+\stoptext
diff --git a/tex/context/base/s-syn-01.tex b/tex/context/base/s-syn-01.tex
deleted file mode 100644
index 01c8f6653..000000000
--- a/tex/context/base/s-syn-01.tex
+++ /dev/null
@@ -1,54 +0,0 @@
-%D \module
-%D [ file=s-syn-01,
-%D version=0000.00.00,
-%D title=\CONTEXT\ Style File,
-%D subtitle=Preliminary Syntax Stuff,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D This is needed for the \METAFUN\ manual (this module was
-%D called \type {p-syn-01} on my machine).
-
-\unprotect
-
-\def\Indent #1{\ifvmode\noindent\hbox to 2em{\hss#1}\else#1\fi}
-\def\Sugar #1{\ifhmode\unskip\unskip\unskip\fi\kern.25em{#1}\kern.25em\ignorespaces}
-\def\Something#1{\Sugar{\mathematics{\langle\hbox{#1}\rangle}}}
-\def\Lbrace {\Sugar{\tttf\leftargument}}
-\def\Rbrace {\Sugar{\tttf\rightargument}}
-\def\Or {\Sugar{\mathematics{\vert}}}
-\def\Optional #1{\Sugar{\mathematics{[\hbox{#1}]}}}
-\def\Means {\Sugar{\mathematics{\rightarrow}}}
-\def\Tex #1{\Sugar{\type{#1}}}
-\def\Literal #1{\Sugar{\type{#1}}}
-\def\Syntax #1{\strut\kern-.25em{#1}\kern-.25em}
-\def\Next {\crlf\hbox to 2em{}\nobreak}
-\def\Whatever #1{\Sugar{\mathematics{(\hbox{#1})}}}
-\def\Quote #1{\Sugar{\quote{#1}}}
-
-\def\Or {\Sugar{\Indent{\mathematics{\vert}}}}
-\def\Means {\Sugar{\Indent{\mathematics{\rightarrow}}}}
-
-\def\StartSyntax
- {\goodbreak
- \startlines
- \catcode`\#=12
- \let\L \Literal
- \let\S \Something
- \def\FL##1{\color[darkred]{\L{##1}}}
- \def\FS##1{\S{\color[darkred]{##1}}}
- \let\M \Means
- \let\O \Or
- \let\Q \Quote
- \let\LB\Lbrace
- \let\RB\Rbrace}
-
-\def\StopSyntax
- {\stoplines}
-
-\protect \endinput
diff --git a/tex/context/base/s-syntax.mkii b/tex/context/base/s-syntax.mkii
new file mode 100644
index 000000000..6d1d59697
--- /dev/null
+++ b/tex/context/base/s-syntax.mkii
@@ -0,0 +1,54 @@
+%D \module
+%D [ file=s-syntax, % was: s-syn-01,
+%D version=0000.00.00,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Preliminary Syntax Stuff,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is needed for the \METAFUN\ manual (this module was
+%D called \type {p-syn-01} on my machine).
+
+\unprotect
+
+\def\Indent #1{\ifvmode\noindent\hbox to 2em{\hss#1}\else#1\fi}
+\def\Sugar #1{\ifhmode\unskip\unskip\unskip\fi\kern.25em{#1}\kern.25em\ignorespaces}
+\def\Something#1{\Sugar{\mathematics{\langle\hbox{#1}\rangle}}}
+\def\Lbrace {\Sugar{\tttf\leftargument}}
+\def\Rbrace {\Sugar{\tttf\rightargument}}
+\def\Or {\Sugar{\mathematics{\vert}}}
+\def\Optional #1{\Sugar{\mathematics{[\hbox{#1}]}}}
+\def\Means {\Sugar{\mathematics{\rightarrow}}}
+\def\Tex #1{\Sugar{\type{#1}}}
+\def\Literal #1{\Sugar{\type{#1}}}
+\def\Syntax #1{\strut\kern-.25em{#1}\kern-.25em}
+\def\Next {\crlf\hbox to 2em{}\nobreak}
+\def\Whatever #1{\Sugar{\mathematics{(\hbox{#1})}}}
+\def\Quote #1{\Sugar{\quote{#1}}}
+
+\def\Or {\Sugar{\Indent{\mathematics{\vert}}}}
+\def\Means {\Sugar{\Indent{\mathematics{\rightarrow}}}}
+
+\def\StartSyntax
+ {\goodbreak
+ \startlines
+ \catcode`\#=12
+ \let\L \Literal
+ \let\S \Something
+ \def\FL##1{\color[darkred]{\L{##1}}}
+ \def\FS##1{\S{\color[darkred]{##1}}}
+ \let\M \Means
+ \let\O \Or
+ \let\Q \Quote
+ \let\LB\Lbrace
+ \let\RB\Rbrace}
+
+\def\StopSyntax
+ {\stoplines}
+
+\protect \endinput
diff --git a/tex/context/base/s-syntax.mkiv b/tex/context/base/s-syntax.mkiv
new file mode 100644
index 000000000..96312f771
--- /dev/null
+++ b/tex/context/base/s-syntax.mkiv
@@ -0,0 +1,96 @@
+%D \module
+%D [ file=s-syntax, % was: s-syn-01,
+%D version=0000.00.00,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Preliminary Syntax Stuff,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is needed for the \METAFUN\ manual: quite old code that I would do
+%D differently nowadays.
+
+\unprotect
+
+\unexpanded\def\module_syntax_Indent #1{\ifvmode\noindent\hbox to 2em{\hss#1}\else#1\fi}
+\unexpanded\def\module_syntax_Sugar #1{\removeunwantedspaces\kern.25em{#1}\kern.25em\ignorespaces}
+\unexpanded\def\module_syntax_Something #1{\Sugar{\mathematics{\langle\hbox{#1}\rangle}}}
+\unexpanded\def\module_syntax_Lbrace {\Sugar{\tttf\leftargument}}
+\unexpanded\def\module_syntax_Rbrace {\Sugar{\tttf\rightargument}}
+\unexpanded\def\module_syntax_Lparent {\Sugar{\tttf(}}
+\unexpanded\def\module_syntax_Rparent {\Sugar{\tttf)}}
+\unexpanded\def\module_syntax_Lbracket {\Sugar{\tttf[}}
+\unexpanded\def\module_syntax_Rbracket {\Sugar{\tttf]}}
+\unexpanded\def\module_syntax_Or {\Sugar{\mathematics{\vert}}}
+\unexpanded\def\module_syntax_Optional #1{\Sugar{\mathematics{[\hbox{#1}]}}}
+\unexpanded\def\module_syntax_Means {\Sugar{\mathematics{\rightarrow}}}
+\unexpanded\def\module_syntax_Tex #1{\Sugar{\type{#1}}}
+\unexpanded\def\module_syntax_Literal #1{\Sugar{\type{#1}}}
+\unexpanded\def\module_syntax_Syntax #1{\strut\kern-.25em{#1}\kern-.25em}
+\unexpanded\def\module_syntax_Next {\crlf\hbox to 2em{}\nobreak}
+\unexpanded\def\module_syntax_Whatever #1{\Sugar{\mathematics{(\hbox{#1})}}}
+\unexpanded\def\module_syntax_Quote #1{\Sugar{\quote{#1}}}
+\unexpanded\def\module_syntax_Or {\Sugar{\module_syntax_Indent{\mathematics{\vert}}}}
+\unexpanded\def\module_syntax_Means {\Sugar{\module_syntax_Indent{\mathematics{\rightarrow}}}}
+\unexpanded\def\module_syntax_FlaggedLiteral #1{\color[darkred]{\module_syntax_Literal{#1}}}
+\unexpanded\def\module_syntax_FlaggedSomething#1{\module_syntax_Something{\color[darkred]{#1}}}
+
+\unexpanded\def\StartSyntax
+ {\startlines
+ % formatters
+ \let\Indent \module_syntax_Indent
+ \let\Sugar \module_syntax_Sugar
+ \let\Something \module_syntax_Something
+ \let\Lbrace \module_syntax_Lbrace
+ \let\Rbrace \module_syntax_Rbrace
+ \let\Lparent \module_syntax_Lparent
+ \let\Rparent \module_syntax_Rparent
+ \let\Lbracket \module_syntax_Lbracket
+ \let\Rbracket \module_syntax_Rbracket
+ \let\Or \module_syntax_Or
+ \let\Optional \module_syntax_Optional
+ \let\Means \module_syntax_Means
+ \let\Tex \module_syntax_Tex
+ \let\Literal \module_syntax_Literal
+ \let\Syntax \module_syntax_Syntax
+ \let\Next \module_syntax_Next
+ \let\Whatever \module_syntax_Whatever
+ \let\Quote \module_syntax_Quote
+ \let\Or \module_syntax_Or
+ \let\Means \module_syntax_Means
+ \let\FlaggedLiteral \module_syntax_FlaggedLiteral
+ \let\FlaggedSomething\module_syntax_FlaggedSomething
+ % shortcuts
+ \let\FL \module_syntax_FlaggedLiteral
+ \let\FS \module_syntax_FlaggedSomething
+ \let\L \module_syntax_Literal
+ \let\S \module_syntax_Something
+ \let\M \module_syntax_Means
+ \let\O \module_syntax_Or
+ \let\Q \module_syntax_Quote
+ \let\LB \module_syntax_Lbrace
+ \let\RB \module_syntax_Rbrace
+ \let\LP \module_syntax_Lparent
+ \let\RP \module_syntax_Rparent
+ \let\LS \module_syntax_Lbracket
+ \let\RS \module_syntax_Rbracket
+ \let\{ \module_syntax_Lbrace
+ \let\} \module_syntax_Rbrace
+ \let\( \module_syntax_Lparent
+ \let\) \module_syntax_Rparent
+ \let\[ \module_syntax_Lbracket
+ \let\] \module_syntax_Rbracket
+ % precaution
+ \catcode`\#\othercatcode}
+
+\unexpanded\def\StopSyntax
+ {\stoplines}
+
+\unexpanded\def\SyntaxCommand#1%
+ {\csname module_syntax_#1\endcsname}
+
+\protect \endinput
diff --git a/tex/context/base/s-typesetting-kerning.mkiv b/tex/context/base/s-typesetting-kerning.mkiv
new file mode 100644
index 000000000..48d81ce36
--- /dev/null
+++ b/tex/context/base/s-typesetting-kerning.mkiv
@@ -0,0 +1,209 @@
+%D \module
+%D [ file=s-typesetting-kerning,
+%D version=2014.12.14,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Show Character Kerning,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\definecharacterkerning
+ [typesetting-kerning-demo]
+ [factor=.5]
+
+\startbuffer[showcharacterkerning:boxes]
+ \starttextrule{boxes}
+ \showfontkerns
+ \dontcomplain
+ \startlines
+ test \hbox{!} test
+ test\hbox{!} test
+ test \hbox{!}test
+ test:$x$ test
+ \setcharacterkerning[typesetting-kerning-demo]
+ test \hbox{!} test
+ test\hbox{!} test
+ test \hbox{!}test
+ test:$x$ test
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:ligatures]
+ \starttextrule{ligatures}
+ \dontcomplain
+ \startlines
+ effe flink effectief efficient fietsen
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ effe flink effectief efficient fietsen
+ \blank \hsize\zeropoint
+ effe
+ flink
+ effectief
+ efficient
+ fietsen
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:discretionaries]
+ \starttextrule{discretionary}
+ \dontcomplain
+ \startlines
+ \hbox{\samplediscretionary}
+ \hbox{xxx\samplediscretionary}
+ \hbox{\samplediscretionary xxx}
+ \hbox{xxx\samplediscretionary xxx}
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ \hbox{\samplediscretionary}
+ \hbox{xxx\samplediscretionary}
+ \hbox{\samplediscretionary xxx}
+ \hbox{xxx\samplediscretionary xxx}
+ \blank \hsize\zeropoint
+ \samplediscretionary
+ xxx\samplediscretionary
+ \samplediscretionary xxx
+ xxx\samplediscretionary xxx
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\startbuffer[showcharacterkerning:explicits]
+ \starttextrule{explicits}
+ \exhyphenchar \hyphenasciicode
+ \preexhyphenchar \lessthanasciicode
+ \postexhyphenchar\morethanasciicode
+ \def\TestDisc
+ {\discretionary
+ {\kern\emwidth<}%
+ {>\kern\emwidth}%
+ {\kern\emwidth=\kern\emwidth}%
+ }
+ \dontcomplain
+ \startlines
+ \hbox{super-charged}
+ \hbox{super\-charged}
+ \hbox{super\TestDisc charged}
+ \hbox{super\discretionary{[}{]}{[]}charged}
+ \blank
+ \setcharacterkerning[typesetting-kerning-demo]
+ \hbox{super-charged}
+ \hbox{super\-charged}
+ \hbox{super\TestDisc charged}
+ \hbox{super\discretionary{[}{]}{[]}charged}
+ \blank \hsize\zeropoint
+ super-charged
+ super\-charged
+ super\TestDisc charged
+ super\discretionary{[}{]}{[]}charged
+ \stoplines
+ \stoptextrule
+\stopbuffer
+
+\starttexdefinition unexpanded showcharacterkerning
+ \getbuffer[showcharacterkerning:boxes]
+ \getbuffer[showcharacterkerning:ligatures]
+ \getbuffer[showcharacterkerning:discretionaries]
+ \getbuffer[showcharacterkerning:explicits]
+\stoptexdefinition
+
+
+\starttexdefinition showcharacterkerningstepscompared #1
+ \definecharacterkerning[crap][factor=\KerningStepFactor]%
+ \setbox0=\ruledhbox{\color[color-1]{#1}\hss}
+ \setbox2=\ruledhbox{\setcharacterkerning[crap]\color[color-2]{#1}}
+ \setbox4=\ruledhbox{\setcharacterkerning[crap]\showfontkerns\showglyphs#1}
+ \xdef\KerningStepPercentage{\ctxlua{context("\letterpercent 0.2f",(1-\number\wd0/\number\wd2)*100)}}
+ \scratchwidth\wd0
+ \vtop\bgroup
+ \hbox{\box0\hskip-\scratchwidth\box2}
+ \par
+ \box4
+ \egroup
+\stoptexdefinition
+
+\starttexdefinition showcharacterkerningsteps [#1]
+
+ \start
+
+ \getdummyparameters
+ [\s!font=Regular,
+ \c!sample={Wat een bende, rommelen met het font design!},
+ \c!text={rommelen},
+ \c!first=00,
+ \c!last=95,
+ \c!step=05,
+ \c!option=, % \v!page
+ #1]
+
+ \doif{\dummyparameter\c!option}\v!page {
+ \startTEXpage[\c!offset=1ex]
+ }
+
+ \definecolor[color-1][r=1,t=.5,a=1]
+ \definecolor[color-2][b=1,t=.5,a=1]
+
+ \definedfont[\dummyparameter\s!font*default sa 1]
+
+ \doif {\dummyparameter\c!option}\v!page {
+ \begingroup
+ \tttf \dummyparameter\s!font\space @ default
+ \endgroup
+ \blank
+ }
+
+ \starttabulate[|cT|l|cT|l|cT|]
+
+ \NC \tt\bf factor \NC \tt\bf sample \NC \tt\bf \letterpercent \NC \tt\bf text \NC \tt\bf \letterpercent \NC \NR \HL
+
+ \dostepwiserecurse {\dummyparameter\c!first} {\dummyparameter\c!last} {\dummyparameter\c!step} {
+ \NC
+ \xdef\KerningStepFactor{\ctxlua{context("\letterpercent 0.3f",####1/1000)}}
+ \KerningStepFactor
+ \NC
+ \showcharacterkerningstepscompared{\dummyparameter\c!sample}
+ \NC
+ \KerningStepPercentage
+ \NC
+ \showcharacterkerningstepscompared{\dummyparameter\c!text}
+ \NC
+ \KerningStepPercentage
+ \NC \NR
+ }
+
+ \stoptabulate
+
+ \doif{\dummyparameter\c!option}\v!page {
+ \stopTEXpage
+ }
+
+ \stop
+
+\stoptexdefinition
+
+\protect
+
+\continueifinputfile{s-typesetting-kerning.mkiv}
+
+\starttext
+
+ % \showcharacterkerning
+
+ \showcharacterkerningsteps[font=file:FuturaStd-Book.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-Medium.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-Bold.otf,option=page]
+ \showcharacterkerningsteps[font=file:FuturaStd-heavy.otf,option=page]
+
+\stoptext
+
+% {\hsize1mm efficient\discretionary{\kern1pt!\kern1pt}{\kern1pt!\kern1pt}{\kern1pt!\kern1pt}efficient\par}
+% {\hsize1mm\definedfont[Regular]\setcharacterkerning[typesetting-kerning-demo]efficient\-efficient\par}
+
diff --git a/tex/context/base/s-youless.mkiv b/tex/context/base/s-youless.mkiv
index 247eb5f64..e15973b9c 100644
--- a/tex/context/base/s-youless.mkiv
+++ b/tex/context/base/s-youless.mkiv
@@ -59,9 +59,10 @@
for y=year,year do
- local year = years[y]
- local scale = 20
- local mark = 3
+ local year = years[y]
+ local scale = 20
+ local mark = 3
+ local maxwatt = specification.maxwatt or year.maxwatt
for m=1,12 do
local month = year.months[m]
@@ -69,7 +70,7 @@
context.startMPpage { offset = "10pt" }
context("linecap := butt; pickup pencircle scaled .5")
- for i=0,(math.div(year.maxwatt,1000)+1)*1000,100 do
+ for i=0,(math.div(maxwatt,1000)+1)*1000,100 do
context("draw (%s,%s) -- (%s,%s) withcolor .6white ;",0,i/scale,31 * 24,i/scale)
end
@@ -123,7 +124,7 @@
context("draw (%s,%s) -- (%s,%s) withcolor darkgray ; ",xoffset,0,xoffset,-10)
end
- local max = (math.div(year.maxwatt,1000)+1)
+ local max = (math.div(maxwatt,1000)+1)
for i=0,max*1000,1000 do
context([[draw textext.lft("%s") shifted (%s,%s) ; ]],i,-10,i/scale)
diff --git a/tex/context/base/scrn-bar.mkvi b/tex/context/base/scrn-bar.mkvi
index 1dadc26f3..8a2f9441c 100644
--- a/tex/context/base/scrn-bar.mkvi
+++ b/tex/context/base/scrn-bar.mkvi
@@ -67,7 +67,7 @@
\def\scrn_bar_direct[#tag][#settings]% somewhat messy
{\iflocation
\begingroup
- \doifassignmentelse{#tag}
+ \doifelseassignment{#tag}
{\let\currentinteractionbar\empty
\setupcurrentinteractionbar[#tag]%
\edef\currentinteractionbar{\interactionbarparameter\c!alternative}}%
diff --git a/tex/context/base/scrn-but.lua b/tex/context/base/scrn-but.lua
index 74f6e0cd9..7d883c910 100644
--- a/tex/context/base/scrn-but.lua
+++ b/tex/context/base/scrn-but.lua
@@ -6,12 +6,10 @@ if not modules then modules = { } end modules ['scrn-but'] = {
license = "see context related readme files"
}
-local commands = commands
local context = context
-
local f_two_colon = string.formatters["%s:%s"]
-function commands.registerbuttons(tag,register,language)
+local function registerbuttons(tag,register,language)
local data = sorters.definitions[language]
local orders = daya and data.orders or sorters.definitions.default.orders
local tag = tag == "" and { "" } or { tag }
@@ -20,3 +18,9 @@ function commands.registerbuttons(tag,register,language)
context.menubutton(tag,f_two_colon(register,order),order)
end
end
+
+interfaces.implement {
+ name = "registerbuttons",
+ actions = registerbuttons,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/scrn-but.mkvi b/tex/context/base/scrn-but.mkvi
index fd2da9e08..3fdaf2c5d 100644
--- a/tex/context/base/scrn-but.mkvi
+++ b/tex/context/base/scrn-but.mkvi
@@ -93,7 +93,7 @@
[\c!state=\v!start,
\c!width=\v!fit,
\c!height=\v!broad,
- \c!offset=0.25em,
+ \c!offset=0.25\emwidth,
\c!frame=\v!on,
\c!background=,
\c!backgroundcolor=,
@@ -159,7 +159,7 @@
\attribute\referenceattribute\attributeunsetvalue
\global\setfalse\c_scrn_button_skipped
\chardef\locationboxpagestate\csname\??buttonlocation#currentparameter\c!samepage\endcsname % ?? bt: todo
- \doifreferencefoundelse{#action}\scrn_button_make_yes\scrn_button_make_nop
+ \doifelsereferencefound{#action}\scrn_button_make_yes\scrn_button_make_nop
#currentparameter%
#inheritedframed%
#letparameter%
@@ -217,12 +217,12 @@
{\global\settrue\c_scrn_button_skipped}
\def\scrn_button_make_normal#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\clf_injectcurrentreference
\hbox attr \referenceattribute \lastreferenceattribute
{#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
\def\scrn_button_make_contrast#currentparameter#inheritedframed#letparameter#setparameter#text%
- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+ {\clf_injectcurrentreference
\hbox attr \referenceattribute \lastreferenceattribute
{#setparameter\c!foregroundcolor{#currentparameter\c!contrastcolor}%
#inheritedframed{\ignorespaces#text\removeunwantedspaces}}}
@@ -343,12 +343,12 @@
\def\scrn_menu_define[#tag][#category][#settings]% category reflects location, settings can be parent
{\ifthirdargument
- \doifassignmentelse{#settings}%
+ \doifelseassignment{#settings}%
{\scrn_menu_define_original[#tag][#category][\c!category=#category,#settings]}% child definition
{\scrn_menu_define_original[#tag][#settings][\c!category=#category]}% % child definition
\scrn_menu_register{#tag}{#category}%
\else\ifsecondargument
- \doifassignmentelse{#category}%
+ \doifelseassignment{#category}%
{\scrn_menu_define_original[#tag][#category]}% % root definition
{\scrn_menu_define_original[#tag][#category][\c!category=#category]% % child definition
\scrn_menu_register{#tag}{#category}}%
@@ -368,7 +368,7 @@
%D Fill menus:
-\normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
+\normalexpanded{\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
{\def\currentinteractionmenu{#tag}%
\expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname
\setinteractionmenuparameter\c!menu{#content}}
@@ -691,7 +691,7 @@
\def\scrn_button_make_position#currentparameter#inheritedframed#letparameter#setparameter#text#action%
{\global\advance\c_scrn_menu_position\plusone
- \doifreferencefoundelse{#action}% 0=not found, 1=same page, >1=elsewhere
+ \doifelsereferencefound{#action}% 0=not found, 1=same page, >1=elsewhere
{\c_scrn_menu_page_mode\ifnum\currentreferencerealpage=\realpageno\plusone\else\plustwo\fi}%
{\c_scrn_menu_page_mode\plustwo}%
\doglobal\appendetoks
@@ -709,7 +709,8 @@
\unexpanded\def\scrn_menu_got_start[#action]#text\stopgot
{\scrn_menu_action_start
- \setupcurrentinteractionmenu[\c!frame=\v!off,\c!background=]% needs checking, was buttons
+ \letinteractionmenuparameter\c!frame\v!off
+ \letinteractionmenuparameter\c!background\empty
\scrn_button_make
\interactionmenuparameter
\inheritedinteractionmenuframed
@@ -967,7 +968,7 @@
\scrn_menu_menu_button_a
{#menutag}{#settings}{#text}{#action}%
\else
- \doifassignmentelse{#menutag}\scrn_menu_menu_button_b\scrn_menu_menu_button_c
+ \doifelseassignment{#menutag}\scrn_menu_menu_button_b\scrn_menu_menu_button_c
{#menutag}{#text}{#action}%
\fi}
@@ -1022,9 +1023,14 @@
\def\scrn_menu_register_menu_buttons[#menu][#register]%
{\ifsecondargument
- \ctxcommand{registerbuttons("menu","#register","\currentlanguage")}
+ \clf_registerbuttons{menu}{#register}{\currentlanguage}%
\else
- \ctxcommand{registerbuttons("","#menu","\currentlanguage")}
+ \clf_registerbuttons{}{#menu}{\currentlanguage}%
\fi}
+% or less readable:
+%
+% \def\scrn_menu_register_menu_buttons[#menu][#register]%
+% {\clf_registerbuttons\ifsecondargument{menu}{#register}\else{}{#menu}\fi{\currentlanguage}}
+
\protect \endinput
diff --git a/tex/context/base/scrn-fld.lua b/tex/context/base/scrn-fld.lua
index 69480b887..1563b9005 100644
--- a/tex/context/base/scrn-fld.lua
+++ b/tex/context/base/scrn-fld.lua
@@ -8,6 +8,10 @@ if not modules then modules = { } end modules ['scrn-fld'] = {
-- we should move some code from lpdf-fld to here
+local context = context
+local ctx_doifelse = commands.doifelse
+local implement = interfaces.implement
+
local variables = interfaces.variables
local v_yes = variables.yes
@@ -40,48 +44,141 @@ fields.defineset = defineset
fields.clone = clone
fields.insert = insert
-commands.definefield = define
-commands.definefieldset = defineset
-commands.clonefield = clone
+-- codeinjections are not yet defined
+
+implement {
+ name = "definefield",
+ actions = define,
+ arguments = {
+ {
+ { "name" },
+ { "alternative" },
+ { "type" },
+ { "category" },
+ { "values" },
+ { "default" },
+ }
+ }
+}
-function commands.insertfield(name,specification)
- texsetbox("b_scrn_field_body",insert(name,specification))
-end
+implement {
+ name = "definefieldset",
+ actions = defineset,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "clonefield",
+ actions = clone,
+ arguments = {
+ {
+ { "children" },
+ { "alternative" },
+ { "parent" },
+ { "category" },
+ { "values" },
+ { "default" },
+ }
+ }
+}
+
+implement {
+ name = "insertfield",
+ actions = function(name,specification)
+ texsetbox("b_scrn_field_body",insert(name,specification))
+ end,
+ arguments = {
+ "string",
+ {
+ { "title" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "align" },
+ { "length" },
+ { "fontstyle" },
+ { "fontalternative" },
+ { "fontsize" },
+ { "fontsymbol" },
+ { "colorvalue", "integer" },
+ { "color" },
+ { "backgroundcolorvalue", "integer" },
+ { "backgroundcolor" },
+ { "framecolorvalue", "integer" },
+ { "framecolor" },
+ { "layer" },
+ { "option" },
+ { "align" },
+ { "clickin" },
+ { "clickout" },
+ { "regionin" },
+ { "regionout" },
+ { "afterkey" },
+ { "format" },
+ { "validate" },
+ { "calculate" },
+ { "focusin" },
+ { "focusout" },
+ { "openpage" },
+ { "closepage" },
+ }
+ }
+}
-- (for the monent) only tex interface
-function commands.getfieldcategory(name)
- local g = codeinjections.getfieldcategory(name)
- if g then
- context(g)
+implement {
+ name = "getfieldcategory",
+ arguments = "string",
+ actions = function(name)
+ local g = codeinjections.getfieldcategory(name)
+ if g then
+ context(g)
+ end
end
-end
+}
-function commands.getdefaultfieldvalue(name)
- local d = codeinjections.getdefaultfieldvalue(name)
- if d then
- context(d)
+implement {
+ name = "getdefaultfieldvalue",
+ arguments = "string",
+ actions = function(name)
+ local d = codeinjections.getdefaultfieldvalue(name)
+ if d then
+ context(d)
+ end
end
-end
+}
-function commands.exportformdata(export)
- if export == v_yes then
- codeinjections.exportformdata()
+implement {
+ name = "exportformdata",
+ arguments = "string",
+ actions = function(export)
+ if export == v_yes then
+ codeinjections.exportformdata()
+ end
end
-end
-
-function commands.setformsmethod(method)
- codeinjections.setformsmethod(method)
-end
+}
-function commands.doiffieldcategoryelse(name)
- commands.doifelse(codeinjections.validfieldcategory(name))
-end
+implement {
+ name = "setformsmethod",
+ arguments = "string",
+ actions = function(method)
+ codeinjections.setformsmethod(method)
+ end
+}
-function commands.doiffieldsetelse(tag)
- commands.doifelse(codeinjections.validfieldset(name))
-end
+implement {
+ name = "doifelsefieldcategory",
+ arguments = "string",
+ actions = function(name)
+ ctx_doifelse(codeinjections.validfieldcategory(name))
+ end
+}
-function commands.doiffieldelse(name)
- commands.doifelse(codeinjections.validfield(name))
-end
+implement {
+ name = "doiffieldsetelse",
+ arguments = "string",
+ actions = function(name)
+ ctx_doifelse(codeinjections.validfieldset(name))
+ end
+}
diff --git a/tex/context/base/scrn-fld.mkvi b/tex/context/base/scrn-fld.mkvi
index 049ac92c3..4b4c9d0ee 100644
--- a/tex/context/base/scrn-fld.mkvi
+++ b/tex/context/base/scrn-fld.mkvi
@@ -105,7 +105,7 @@
\installdirectcommandhandler \??forms {forms}
\appendtoks
- \ctxcommand{setformsmethod("\formsparameter\c!method")}%
+ \clf_setformsmethod{\formsparameter\c!method}%
\to \everysetupforms
\setupforms
@@ -113,7 +113,7 @@
\appendtoks
\iflocation
- \ctxcommand{exportformdata("\formsparameter\c!export")}%
+ \clf_exportformdata{\formsparameter\c!export}%
\fi
\to \everystoptext
@@ -127,7 +127,7 @@
{\processcommalist[#set]\scrn_symbols_preset_indeed}%
\def\scrn_symbols_preset_indeed#tag%
- {\doifobjectfoundelse{SYM}{#tag}
+ {\doifelseobjectfound{SYM}{#tag}
{}
{\settightobject{SYM}{#tag}\hbox{\symbol[#tag]}% % todo: set this as immediate xform
\page_otr_add_special_content{\hskip-\maxdimen\getobject{SYM}{#tag}}}} % and then force it into the file
@@ -177,23 +177,23 @@
\appendtoks % we cannot use parent .. maybe s!parent has to change
\ifx\currentfieldbodyparent\empty
\scrn_field_check_category
- \ctxcommand{definefield{
- name = "\currentfieldbody",
- alternative = "normal",
- type = "\fieldbodyparameter\c!type",
- category = "\fieldbodyparameter\c!category",
- values = \!!bs\fieldbodyparameter\c!values\!!es,
- default = \!!bs\fieldbodyparameter\c!default\!!es
- }}%
+ \clf_definefield
+ name {\currentfieldbody}%
+ alternative {normal}%
+ type {\fieldbodyparameter\c!type}%
+ category {\fieldbodyparameter\c!category}%
+ values {\fieldbodyparameter\c!values}%
+ default {\fieldbodyparameter\c!default}%
+ \relax
\else
- \ctxcommand{clonefield{
- children = "\currentfieldbody",
- alternative = "clone",
- parent = "\currentfieldbodyparent",
- category = "\fieldbodyparameter\c!category",
- values = \!!bs\fieldbodyparameter\c!values\!!es,
- default = \!!bs\fieldbodyparameter\c!default\!!es
- }}%
+ \clf_clonefield
+ children {\currentfieldbody}%
+ alternative {clone}%
+ parent {\currentfieldbodyparent}%
+ category {\fieldbodyparameter\c!category}%
+ values {\fieldbodyparameter\c!values}%
+ default {\fieldbodyparameter\c!default}%
+ \relax
\fi
\to \everydefinefieldbody
@@ -224,43 +224,46 @@
% == \edef\currentfieldbackgroundcolorvalue{\thecolorattribute\currentfieldbackgroundcolor}%
\fi
\usefieldbodystyleandcolor\c!style\c!color
- \ctxcommand{insertfield("\currentfieldbody", {
- title = "\currentfieldbody",
- width = \number\dimexpr\fieldbodyparameter\c!width \relax,
- height = \number\dimexpr\fieldbodyparameter\c!height\relax,
- depth = \number\dimexpr\fieldbodyparameter\c!depth \relax,
- align = "\fieldbodyparameter\c!align",
- length = "\fieldbodyparameter\c!n",
- fontstyle = "\fontstyle",
- fontalternative = "\fontalternative",
- fontsize = "\fontbody",
- fontsymbol = "\fieldbodyparameter\c!symbol",
- color = "\fieldbodyparameter\c!color",
- colorvalue = \number\attribute\colorattribute,
- \ifx\currentfieldbackgroundcolor\empty \else
- backgroundcolor = "\currentfieldbackgroundcolor",
- backgroundcolorvalue = "\currentfieldbackgroundcolorvalue",
- \fi
- \ifx\currentfieldframecolor\empty \else
- framecolor = "\currentfieldframecolor",
- framecolorvalue = "\currentfieldframecolorvalue",
- \fi
- layer = "\fieldbodyparameter\c!fieldlayer",
- option = "\fieldbodyparameter\c!option",
- align = "\fieldbodyparameter\c!align",
- clickin = "\fieldbodyparameter\c!clickin",
- clickout = "\fieldbodyparameter\c!clickout",
- regionin = "\fieldbodyparameter\c!regionin",
- regionout = "\fieldbodyparameter\c!regionout",
- afterkey = "\fieldbodyparameter\c!afterkey",
- format = "\fieldbodyparameter\c!format",
- validate = "\fieldbodyparameter\c!validate",
- calculate = "\fieldbodyparameter\c!calculate",
- focusin = "\fieldbodyparameter\c!focusin",
- focusout = "\fieldbodyparameter\c!focusout",
- openpage = "\fieldbodyparameter\c!openpage",
- closepage = "\fieldbodyparameter\c!closepage",
- })}}
+ \clf_insertfield
+ {\currentfieldbody}%
+ {%
+ title {\currentfieldbody}
+ width \dimexpr\fieldbodyparameter\c!width \relax
+ height \dimexpr\fieldbodyparameter\c!height\relax
+ depth \dimexpr\fieldbodyparameter\c!depth \relax
+ align {\fieldbodyparameter\c!align}%
+ length {\fieldbodyparameter\c!n}%
+ fontstyle {\fontstyle}%
+ fontalternative {\fontalternative}%
+ fontsize {\fontbody}%
+ fontsymbol {\fieldbodyparameter\c!symbol}%
+ color {\fieldbodyparameter\c!color}%
+ colorvalue \attribute\colorattribute
+ \ifx\currentfieldbackgroundcolor\empty \else
+ backgroundcolor {\currentfieldbackgroundcolor}%
+ backgroundcolorvalue \numexpr\currentfieldbackgroundcolorvalue\relax
+ \fi
+ \ifx\currentfieldframecolor\empty \else
+ framecolor {\currentfieldframecolor}%
+ framecolorvalue \numexpr\currentfieldframecolorvalue\relax
+ \fi
+ layer {\fieldbodyparameter\c!fieldlayer}%
+ option {\fieldbodyparameter\c!option}%
+ align {\fieldbodyparameter\c!align}%
+ clickin {\fieldbodyparameter\c!clickin}%
+ clickout {\fieldbodyparameter\c!clickout}%
+ regionin {\fieldbodyparameter\c!regionin}%
+ regionout {\fieldbodyparameter\c!regionout}%
+ afterkey {\fieldbodyparameter\c!afterkey}%
+ format {\fieldbodyparameter\c!format}%
+ validate {\fieldbodyparameter\c!validate}%
+ calculate {\fieldbodyparameter\c!calculate}%
+ focusin {\fieldbodyparameter\c!focusin}%
+ focusout {\fieldbodyparameter\c!focusout}%
+ openpage {\fieldbodyparameter\c!openpage}%
+ closepage {\fieldbodyparameter\c!closepage}%
+ }%
+ \relax}
%D The sets are used in grouped calculations.
%D
@@ -270,16 +273,20 @@
{\dodoubleempty\scrn_field_define_set}
\def\scrn_field_define_set[#tag][#list]%
- {\ctxcommand{definefieldset("#tag","#list")}}
+ {\clf_definefieldset{#tag}{#list}}
\let\dodefinefieldset\definefieldbodyset % compatibility
%D A few testing macros:
-\def\doiffieldbodyelse #tag{\ctxcommand{doiffieldelse("#tag")}}
-\def\doiffieldcategoryelse#tag{\ctxcommand{doiffieldcategoryelse("#tag")}}
+\def\doifelsefieldbody #tag{\clf_doifelsefield{#tag}}
+\def\doifelsefieldcategory#tag{\clf_doifelsefieldcategory{#tag}}
+
+\let\doiffieldbodyelse \doifelsefieldbody
+\let\doiffieldcategoryelse\doifelsefieldcategory
-\let\doiffieldelse\doiffieldbodyelse % compatibility
+\let\doiffieldelse \doifelsefieldbody % compatibility / will be dropped
+\let\doifelsefield \doifelsefieldbody % compatibility / will be dropped
%D We still support the traditional method of defining fields:
%D
@@ -365,7 +372,7 @@
\def\scrn_field_fit[#tag][#settings]%
{\iflocation
\begingroup
- \edef\currentdefaultfieldvalue{\ctxcommand{getdefaultfieldvalue("#tag")}}%
+ \edef\currentdefaultfieldvalue{\clf_getdefaultfieldvalue{#tag}}%
\setbox\b_scrn_field_fit_symbol\hbox{\symbol[\currentdefaultfieldvalue]}%
\fitfieldframed
{\fieldbody[#tag]
@@ -494,7 +501,7 @@
\scrn_field_load_scripts
\edef\currentfieldbody {#tag}%
\edef\currentfieldlabel {#label}%
- \edef\currentfieldcategory{\ctxcommand{getfieldcategory("#tag")}}%
+ \edef\currentfieldcategory{\clf_getfieldcategory{#tag}}%
\ifx\currentfieldlabel\empty
\let\currentfieldlabel\currentfieldbody
\fi
@@ -679,7 +686,7 @@
\edef\currenttooltipname{tooltip:\number\c_scrn_tooltip_n}%
\setbox\b_scrn_tooltip_anchor\hbox
{\strut#anchortext}%
- \doifassignmentelse{#settings}
+ \doifelseassignment{#settings}
{\setupcurrenttooltip[#settings]}%
{\setupcurrenttooltip[\c!location=#settings]}%
\setbox\b_scrn_tooltip_text\hbox
@@ -740,6 +747,8 @@
%D \goto{walk field}[Walk{mine}]
%D \stoptyping
+% todo: expand #symbols
+
\unexpanded\def\definefieldstack
{\dotripleargument\scrn_fieldstack_define}
@@ -868,7 +877,7 @@
\setvalue{pushbutton:#tag}{\scrn_pushbutton_handle{#tag}{#settings}}}
\def\scrn_pushbutton_define_variant#tag#variant#content%
- {\doifsymboldefinedelse{pushsymbol:#tag:#variant}
+ {\doifelsesymboldefined{pushsymbol:#tag:#variant}
\donothing
{\definesymbol[pushsymbol:#tag:#variant][{#content}]}}
@@ -975,7 +984,7 @@
\setupcurrentinteractionmenu[#settings]%
\let\scrn_rollbutton_symbol\scrn_rollbutton_symbol_m
\else
- \doifassignmentelse{#tag}
+ \doifelseassignment{#tag}
{\let\currentbutton\empty
\setupcurrentbutton[#tag]%
\let\scrn_rollbutton_symbol\scrn_rollbutton_symbol_b}%
diff --git a/tex/context/base/scrn-hlp.lua b/tex/context/base/scrn-hlp.lua
index d344ce280..99c0565a8 100644
--- a/tex/context/base/scrn-hlp.lua
+++ b/tex/context/base/scrn-hlp.lua
@@ -6,13 +6,15 @@ if not modules then modules = { } end modules ['scrn-hlp'] = {
license = "see context related readme files"
}
-local format = string.format
+local tonumber = tonumber
local help = { }
interactions.help = help
local context = context
-local commands = commands
+local implement = interfaces.implement
+
+local formatters = string.formatters
local a_help = attributes.private("help")
@@ -48,21 +50,26 @@ local helpscript = [[
local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})"
-function help.register(number,name,box)
- if helpscript then
- interactions.javascripts.setpreamble("HelpTexts",helpscript)
- helpscript = false
- end
- local b = copy_nodelist(texgetbox(box))
- register_list(b)
- data[number] = b
- if name and name ~= "" then
- references[name] = number
- structures.references.define("",name,format(template,number))
+local function register(specification)
+ local number = specification.number
+ local name = specification.name
+ local box = specification.box
+ if number and name and box then
+ if helpscript then
+ interactions.javascripts.setpreamble("HelpTexts",helpscript)
+ helpscript = false
+ end
+ local b = copy_nodelist(texgetbox(box))
+ register_list(b)
+ data[number] = b
+ if name and name ~= "" then
+ references[name] = number
+ structures.references.define("",name,formatters[template](number))
+ end
end
end
-local function collect(head,used)
+local function collectused(head,used)
while head do
local id = head.id
if id == hlist_code then
@@ -74,51 +81,77 @@ local function collect(head,used)
used[#used+1] = a
end
else
- used = collect(head.list,used)
+ used = collectused(head.list,used)
end
elseif id == vlist_code then
- used = collect(head.list,used)
+ used = collectused(head.list,used)
end
head = head.next
end
return used
end
-function help.collect(box)
+local function collect(box)
if next(data) then
- return collect(texgetbox(box).list)
+ return collectused(texgetbox(box).list)
end
end
-commands.registerhelp = help.register
-
-function commands.collecthelp(box)
- local used = help.collect(box)
- if used then
- local done = { }
- context.startoverlay()
- for i=1,#used do
- local d = data[used[i]]
- if d and not done[d] then
- local box = hpack_nodelist(copy_nodelist(d))
- context(false,box)
- done[d] = true
- else
- -- error
+local function reference(name)
+ return references[name] or tonumber(name) or 0
+end
+
+help.register = register
+help.collect = collect
+help.reference = reference
+
+implement {
+ name = "registerhelp",
+ actions = register,
+ arguments = {
+ {
+ { "number", "integer" },
+ { "name" },
+ { "box" , "integer" }
+ }
+ }
+}
+
+implement {
+ name = "collecthelp",
+ arguments = "integer",
+ actions = function(box)
+ local used = collect(box)
+ if used then
+ local done = { }
+ context.startoverlay()
+ for i=1,#used do
+ local d = data[used[i]]
+ if d and not done[d] then
+ local box = hpack_nodelist(copy_nodelist(d))
+ context(false,box)
+ done[d] = true
+ else
+ -- error
+ end
end
+ context.stopoverlay()
end
- context.stopoverlay()
end
-end
-
-function help.reference(name)
- return references[name] or tonumber(name) or 0
-end
+}
-function commands.helpreference(name)
- context(references[name] or tonumber(name) or 0)
-end
+implement {
+ name = "helpreference",
+ arguments = "string",
+ actions = function(name)
+ context(reference(name))
+ end
+}
-function commands.helpaction(name)
- context(template,references[name] or tonumber(name) or 0)
-end
+implement {
+ name = "helpaction",
+ arguments = "string",
+ actions = function(name)
+ context(template,reference(name))
+ end
+}
diff --git a/tex/context/base/scrn-hlp.mkvi b/tex/context/base/scrn-hlp.mkvi
index f5a78fb08..eca79c90a 100644
--- a/tex/context/base/scrn-hlp.mkvi
+++ b/tex/context/base/scrn-hlp.mkvi
@@ -130,25 +130,31 @@
\c!values=\currenthelpname]%
\setbox\b_scrn_help_box\hbox
{\fieldbody[\currenthelpname]}%
- \ctxcommand{registerhelp(\number\c_scrn_help_n,"\currenthelpreference",\number\b_scrn_help_box)}}
+ \clf_registerhelp
+ number \c_scrn_help_n
+ name {\currenthelpreference}%
+ box \b_scrn_help_box
+ \relax}
-\def\doifelsehelp
+\unexpanded\def\doifelsehelp
{\ifcase\c_scrn_help_n
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
-\def\placehelp % was \helpdata
+\let\doifhelpelse\doifelsehelp
+
+\unexpanded\def\placehelp % was \helpdata
{\ifinpagebody\ifcase\c_scrn_help_n\else
- \ctxcommand{collecthelp(255)}% rather hard coded ... bad
+ \clf_collecthelp\normalpagebox
\fi\fi}
\def\helpreference#category%
- {\ctxcommand{helpreference("#category")}}
+ {\clf_helpreference{#category}}
\def\helpaction#category%
- {\ctxcommand{helpaction("#category")}}
+ {\clf_helpaction{#category}}
\unexpanded\def\helpsignal#category%
{\hbox attr \helpattribute \helpreference{#category}{}}
diff --git a/tex/context/base/scrn-ini.lua b/tex/context/base/scrn-ini.lua
index 4831408f9..ce9f9f71b 100644
--- a/tex/context/base/scrn-ini.lua
+++ b/tex/context/base/scrn-ini.lua
@@ -16,7 +16,7 @@ local codeinjections = backends.codeinjections
local identitydata = { }
-local function setupidentity(specification)
+function general.setupidentity(specification)
for k, v in next, specification do
identitydata[k] = v
end
@@ -27,6 +27,17 @@ function general.getidentity()
return identitydata
end
-general.setupidentity = setupidentity
-
-commands.setupidentity = setupidentity
+interfaces.implement {
+ name = "setupidentity",
+ actions = general.setupidentity,
+ arguments = {
+ {
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "creator" },
+ { "date" },
+ { "keywords" },
+ }
+ }
+}
diff --git a/tex/context/base/scrn-ini.mkvi b/tex/context/base/scrn-ini.mkvi
index f5b294624..2ed822c6e 100644
--- a/tex/context/base/scrn-ini.mkvi
+++ b/tex/context/base/scrn-ini.mkvi
@@ -51,6 +51,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflocationelse\doifelselocation
+
\setupinteraction
[\c!state=\v!stop]
@@ -176,14 +178,14 @@
%D Identity
\def\scrn_identity_synchronize
- {\ctxcommand{setupidentity{
- title = \!!bs\interactionparameter\c!title\!!es,
- subtitle = \!!bs\interactionparameter\c!subtitle\!!es,
- author = \!!bs\interactionparameter\c!author\!!es,
- creator = \!!bs ConTeXt - \contextversion\!!es,
- date = \!!bs\interactionparameter\c!date\!!es,
- keywords = \!!bs\interactionparameter\c!keyword\!!es,
- }}}
+ {\clf_setupidentity
+ title {\interactionparameter\c!title}%
+ subtitle {\interactionparameter\c!subtitle}%
+ author {\interactionparameter\c!author}%
+ creator { ConTeXt - \contextversion}%
+ date {\interactionparameter\c!date}%
+ keywords {\interactionparameter\c!keyword}%
+ \relax}
\appendtoks
\scrn_identity_synchronize
diff --git a/tex/context/base/scrn-pag.lua b/tex/context/base/scrn-pag.lua
index 7003d0285..4d7b388ee 100644
--- a/tex/context/base/scrn-pag.lua
+++ b/tex/context/base/scrn-pag.lua
@@ -10,18 +10,43 @@ interactions = interactions or { }
interactions.pages = interactions.pages or { }
local pages = interactions.pages
+local implement = interfaces.implement
+
local codeinjections = backends.codeinjections
-local function setupcanvas(specification)
+function pages.setupcanvas(specification)
codeinjections.setupcanvas(specification)
end
-local function setpagetransition(specification)
+function pages.setpagetransition(specification)
codeinjections.setpagetransition(specification)
end
-pages.setupcanvas = setupcanvas
-pages.setpagetransition = setpagetransition
+implement {
+ name = "setupcanvas",
+ actions = pages.setupcanvas,
+ arguments = {
+ {
+ { "mode" },
+ { "singlesided", "boolean" },
+ { "doublesided", "boolean" },
+ { "leftoffset", "dimen" },
+ { "topoffset", "dimen" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "paperwidth", "dimen" },
+ { "paperheight", "dimen" },
+ }
+ }
+}
-commands.setupcanvas = setupcanvas
-commands.setpagetransition = setpagetransition
+implement {
+ name = "setpagetransition",
+ actions = pages.setpagetransition,
+ arguments = {
+ {
+ { "n" },
+ { "delay", "integer" },
+ }
+ }
+}
diff --git a/tex/context/base/scrn-pag.mkvi b/tex/context/base/scrn-pag.mkvi
index 5bbdadda8..3dfcd65c5 100644
--- a/tex/context/base/scrn-pag.mkvi
+++ b/tex/context/base/scrn-pag.mkvi
@@ -124,30 +124,49 @@
%
% \starttext \input ward \stoptext
-\def\scrn_canvas_synchronize_simple
- {\ctxcommand{setupcanvas{
- paperwidth = \number\printpaperwidth,
- paperheight = \number\printpaperheight
- }}}
+\let\scrn_canvas_synchronize_simple \relax
+\let\scrn_canvas_synchronize_complex\relax
-\def\scrn_canvas_synchronize_complex
+\appendtoks
+ \global\let\scrn_canvas_synchronize_simple \scrn_canvas_synchronize_simple_indeed
+ \global\let\scrn_canvas_synchronize_complex\scrn_canvas_synchronize_complex_indeed
+\to \everysetuplayout
+
+\def\scrn_canvas_synchronize_simple_indeed
+ {\clf_setupcanvas
+ paperwidth \printpaperwidth
+ paperheight \printpaperheight
+ \relax
+ %\global\let\scrn_canvas_synchronize_simple \relax
+ \global\let\scrn_canvas_synchronize_complex\relax}
+
+\def\scrn_canvas_synchronize_complex_indeed
{\scrn_canvas_calculate % otherwise we need to hook it into setuppage etc
- \ctxcommand{setupcanvas{
- mode = "\interactionscreenparameter\c!option",
- singlesided = \ifsinglesided true\else false\fi,
- doublesided = \ifdoublesided true\else false\fi,
- leftoffset = \number\canvasbackoffset,
- topoffset = \number\canvastopoffset,
- width = \number\canvaswidth,
- height = \number\canvasheight,
- paperwidth = \number\canvasmaxwidth,
- paperheight = \number\canvasmaxheight
- }}}
+ \clf_setupcanvas
+ mode {\interactionscreenparameter\c!option}%
+ singlesided \ifsinglesided true\else false\fi\space
+ doublesided \ifdoublesided true\else false\fi\space
+ leftoffset \canvasbackoffset
+ topoffset \canvastopoffset
+ width \canvaswidth
+ height \canvasheight
+ paperwidth \canvasmaxwidth
+ paperheight \canvasmaxheight
+ \relax
+ %\global\let\scrn_canvas_synchronize_simple \relax
+ \global\let\scrn_canvas_synchronize_complex\relax}
\appendtoks
- \doifcommonelse{\interactionscreenparameter\c!option}{\v!max,\v!fit}%
- {\global\settrue \c_scrn_canvas_tight_page}%
- {\global\setfalse\c_scrn_canvas_tight_page}%
+ \begingroup
+ \edef\p_option{\interactionscreenparameter\c!option}%
+ \ifx\p_option\v!max
+ \global\settrue \c_scrn_canvas_tight_page
+ \else\ifx\p_option\v!fit
+ \global\settrue \c_scrn_canvas_tight_page
+ \else
+ \global\setfalse\c_scrn_canvas_tight_page
+ \fi\fi
+ \endgroup
\to \everysetupinteractionscreen
\setupinteractionscreen
@@ -159,6 +178,11 @@
\c!topspace=\topspace,
\c!option=\v!auto]
+\appendtoks
+ \global\let\scrn_canvas_synchronize_simple \scrn_canvas_synchronize_simple
+ \global\let\scrn_canvas_synchronize_complex\scrn_canvas_synchronize_complex
+\to \everysetupinteractionscreen
+
%D Conditional page breaks:
\unexpanded\def\screen
@@ -177,7 +201,10 @@
{\dosingleempty\scrn_transitions_setup}
\def\scrn_transitions_setup[#list]%
- {\edef\scrn_transitions_list{#list}}
+ {\edef\scrn_transitions_list{#list}%
+ \ifx\scrn_transitions_list\v!reset
+ \let\scrn_transitions_list\empty
+ \fi}
\def\scrn_transitions_set
{\iflocation \ifx\scrn_transitions_list\empty \else
@@ -187,10 +214,10 @@
\def\scrn_transitions_set_indeed
{\begingroup
\edef\currentinteractionscreendelay{\interactionscreenparameter\c!delay}%
- \ctxcommand{setpagetransition{
- n = "\scrn_transitions_list",
- delay = "\ifx\currentinteractionscreendelay\v!none 0\else\currentinteractionscreendelay\fi"
- }}%
+ \clf_setpagetransition
+ n {\scrn_transitions_list}%
+ delay \ifx\currentinteractionscreendelay\v!none \zerocount\else\currentinteractionscreendelay\fi
+ \relax
\endgroup}
\prependtoks
diff --git a/tex/context/base/scrn-ref.lua b/tex/context/base/scrn-ref.lua
index df71b6a97..c1fc94871 100644
--- a/tex/context/base/scrn-ref.lua
+++ b/tex/context/base/scrn-ref.lua
@@ -15,6 +15,8 @@ local codeinjections = backends.codeinjections
local expandcurrent = structures.references.expandcurrent
local identify = structures.references.identify
+local implement = interfaces.implement
+
local function check(what)
if what and what ~= "" then
local set, bug = identify("",what)
@@ -54,12 +56,12 @@ local function setclosepageaction(close)
end
end
-references.setopendocument = setopendocumentaction
-references.setclosedocument = setclosedocumentaction
-references.setopenpage = setopenpageaction
-references.setclosepage = setclosepageaction
+references.setopendocument = setopendocumentaction
+references.setclosedocument = setclosedocumentaction
+references.setopenpage = setopenpageaction
+references.setclosepage = setclosepageaction
-commands.setopendocumentaction = setopendocumentaction
-commands.setclosedocumentaction = setclosedocumentaction
-commands.setopenpageaction = setopenpageaction
-commands.setclosepageaction = setclosepageaction
+implement { name = "setopendocumentaction", arguments = "string", actions = setopendocumentaction }
+implement { name = "setclosedocumentaction", arguments = "string", actions = setclosedocumentaction }
+implement { name = "setopenpageaction", arguments = "string", actions = setopenpageaction }
+implement { name = "setclosepageaction", arguments = "string", actions = setclosepageaction }
diff --git a/tex/context/base/scrn-ref.mkvi b/tex/context/base/scrn-ref.mkvi
index a06ba1ad8..2b15b4677 100644
--- a/tex/context/base/scrn-ref.mkvi
+++ b/tex/context/base/scrn-ref.mkvi
@@ -25,7 +25,7 @@
\to \everysetupinteraction
\def\scrn_reference_enable_page_destinations % no reset
- {\ctxlua{structures.references.setinnermethod("\interactionparameter\c!page")}}
+ {\clf_setinnerreferencemethod{\interactionparameter\c!page}}
\setupinteraction % start fit page and reset form
[\c!page=\v!no,
@@ -34,7 +34,7 @@
\c!focus=\v!fit,
\c!calculate=,
% rendering:
- \c!width=1em,
+ \c!width=\emwidth,
\c!height=\zeropoint,
\c!depth=\zeropoint,
\c!symbolset=]
@@ -46,10 +46,10 @@
\edef\currentinteractionopenaction {\interactionparameter\c!openaction }%
\edef\currentinteractioncloseaction{\interactionparameter\c!closeaction}%
\ifx\currentinteractionopenaction\empty \else
- \ctxcommand{setopendocumentaction("\currentinteractionopenaction")}%
+ \clf_setopendocumentaction{\currentinteractionopenaction}%
\fi
\ifx\currentinteractioncloseaction\empty \else
- \ctxcommand{setclosedocumentaction("\currentinteractioncloseaction")}%
+ \clf_setclosedocumentaction{\currentinteractioncloseaction}%
\fi
\glet\scrn_reference_set_text_actions\relax
\fi}
@@ -59,10 +59,10 @@
\edef\currentinteractionopenpageaction {\interactionparameter\c!openpageaction }%
\edef\currentinteractionclosepageaction{\interactionparameter\c!closepageaction}%
\ifx\currentinteractionopenpageaction\empty \else
- \ctxcommand{setopenpageaction("\currentinteractionopenpageaction")}%
+ \clf_setopenpageaction{\currentinteractionopenpageaction}%
\fi
\ifx\currentinteractionclosepageaction\empty \else
- \ctxcommand{setclosepageaction("\currentinteractionclosepageaction")}%
+ \clf_setclosepageaction{\currentinteractionclosepageaction}%
\fi
\fi}
@@ -73,7 +73,7 @@
\def\scrn_reference_enable_references
{\ifproductionrun
- \ctxlua{structures.references.enableinteraction()}%
+ \clf_enableinteraction % only once anyway
\glet\scrn_reference_enable_references\relax
\fi}
diff --git a/tex/context/base/scrn-wid.lua b/tex/context/base/scrn-wid.lua
index 5b319b07e..72c9bc733 100644
--- a/tex/context/base/scrn-wid.lua
+++ b/tex/context/base/scrn-wid.lua
@@ -10,6 +10,7 @@ interactions = interactions or { }
local interactions = interactions
local context = context
+local implement = interfaces.implement
local allocate = utilities.storage.allocate
@@ -42,9 +43,13 @@ local report_attachments = logs.reporter("widgets","attachments")
-- Symbols
-function commands.presetsymbollist(list)
- codeinjections.presetsymbollist(list)
-end
+implement {
+ name = "presetsymbollist",
+ arguments = "string",
+ actions = function(list)
+ codeinjections.presetsymbollist(list)
+ end
+}
-- Attachments
--
@@ -108,11 +113,51 @@ function attachments.insert(specification)
return nodeinjections.attachfile(specification)
end
-commands.registerattachment = attachments.register
+implement {
+ name = "registerattachment",
+ actions = attachments.register,
+ arguments = {
+ {
+ { "tag" },
+ { "registered" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "file" },
+ { "name" },
+ { "buffer" },
+ }
+ }
+}
-function commands.insertattachment(specification)
- texsetbox("b_scrn_attachment_link",(attachments.insert(specification)))
-end
+implement {
+ name = "insertattachment",
+ actions = function(specification)
+ texsetbox("b_scrn_attachment_link",(attachments.insert(specification)))
+ end,
+ arguments = {
+ {
+ { "tag" },
+ { "registered" },
+ { "method" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "colormodel", "integer" },
+ { "colorvalue", "integer" },
+ { "color" },
+ { "transparencyvalue", "integer" },
+ { "symbol" },
+ { "layer" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "file" },
+ { "name" },
+ { "buffer" },
+ }
+ }
+}
-- Comment
@@ -124,9 +169,32 @@ function comments.insert(specification)
return nodeinjections.comment(specification)
end
-function commands.insertcomment(specification)
- texsetbox("b_scrn_comment_link",(comments.insert(specification)))
-end
+implement {
+ name = "insertcomment",
+ actions = function(specification)
+ texsetbox("b_scrn_comment_link",(comments.insert(specification)))
+ end,
+ arguments = {
+ {
+ { "tag" },
+ { "title" },
+ { "subtitle" },
+ { "author" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "depth", "dimen" },
+ { "nx" },
+ { "ny" },
+ { "colormodel", "integer" },
+ { "colorvalue", "integer" },
+ { "transparencyvalue", "integer" },
+ { "option" },
+ { "symbol" },
+ { "buffer" },
+ { "layer" },
+ }
+ }
+}
-- Soundclips
@@ -153,8 +221,27 @@ function soundclips.insert(tag)
end
end
-commands.registersoundclip = soundclips.register
-commands.insertsoundclip = soundclips.insert
+implement {
+ name = registersoundclip,
+ actions = soundclips.register,
+ arguments = {
+ {
+ { "tag" },
+ { "file" }
+ }
+ }
+}
+
+implement {
+ name = insertsoundclip,
+ actions = soundclips.insert,
+ arguments = {
+ {
+ { "tag" },
+ { "repeat" }
+ }
+ }
+}
-- Renderings
@@ -175,48 +262,84 @@ function renderings.rendering(label)
end
end
-local function var(label,key)
+function renderings.var(label,key)
local rn = renderings[label]
return rn and rn[key] or ""
end
-renderings.var = var
-
-function commands.renderingvar(label,key)
- context(var(label,key))
-end
+implement {
+ name = "renderingvar",
+ actions = { renderings.var, context },
+ arguments = { "string", "string" }
+}
-commands.registerrendering = renderings.register
+implement {
+ name = "registerrendering",
+ actions = renderings.register,
+ arguments = {
+ {
+ { "type" },
+ { "label" },
+ { "mime" },
+ { "filename" },
+ { "option" },
+ }
+ }
+}
-- Rendering:
-function commands.insertrenderingwindow(specification)
- codeinjections.insertrenderingwindow(specification)
-end
+implement {
+ name = "insertrenderingwindow",
+ actions = function(specification)
+ codeinjections.insertrenderingwindow(specification)
+ end,
+ arguments = {
+ {
+ { "label" },
+ { "width", "dimen" },
+ { "height", "dimen" },
+ { "option" },
+ { "page", "integer" },
+ }
+ }
+}
-- Linkedlists (only a context interface)
-function commands.definelinkedlist(tag)
- -- no need
-end
+implement {
+ name = "definelinkedlist",
+ arguments = "string",
+ actions = function(tag)
+ -- no need
+ end
+}
-function commands.enhancelinkedlist(tag,n)
- local ll = jobpasses.gettobesaved(tag)
- if ll then
- ll[n] = texgetcount("realpageno")
- end
-end
+implement {
+ name = "enhancelinkedlist",
+ arguments = { "string", "integer" },
+ actions = function(tag,n)
+ local ll = jobpasses.gettobesaved(tag)
+ if ll then
+ ll[n] = texgetcount("realpageno")
+ end
+ end
+}
-function commands.addlinklistelement(tag)
- local tobesaved = jobpasses.gettobesaved(tag)
- local collected = jobpasses.getcollected(tag) or { }
- local currentlink = #tobesaved + 1
- local noflinks = #collected
- tobesaved[currentlink] = 0
- local f = collected[1] or 0
- local l = collected[noflinks] or 0
- local p = collected[currentlink-1] or f
- local n = collected[currentlink+1] or l
- context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
- -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
-end
+implement {
+ name = "addlinklistelement",
+ arguments = "string",
+ actions = function(tag)
+ local tobesaved = jobpasses.gettobesaved(tag)
+ local collected = jobpasses.getcollected(tag) or { }
+ local currentlink = #tobesaved + 1
+ local noflinks = #collected
+ tobesaved[currentlink] = 0
+ local f = collected[1] or 0
+ local l = collected[noflinks] or 0
+ local p = collected[currentlink-1] or f
+ local n = collected[currentlink+1] or l
+ context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
+ -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
+ end
+}
diff --git a/tex/context/base/scrn-wid.mkvi b/tex/context/base/scrn-wid.mkvi
index fad451651..57a4be276 100644
--- a/tex/context/base/scrn-wid.mkvi
+++ b/tex/context/base/scrn-wid.mkvi
@@ -100,16 +100,16 @@
\begingroup
\def\currentattachment{_}%
\setupcurrentattachment[#settings,\s!parent=\??attachment]%
- \ctxcommand{registerattachment{
- tag = "#tag",
- registered = "#tag",
- title = "\attachmentparameter\c!title",
- subtitle = "\attachmentparameter\c!subtitle",
- author = "\attachmentparameter\c!author",
- file = "\attachmentparameter\c!file",
- name = "\attachmentparameter\c!name",
- buffer = "\attachmentparameter\c!buffer",
- }}%
+ \clf_registerattachment
+ tag {#tag}%
+ registered {#tag}%
+ title {\attachmentparameter\c!title}%
+ subtitle {\attachmentparameter\c!subtitle}%
+ author {\attachmentparameter\c!author}%
+ file {\attachmentparameter\c!file}%
+ name {\attachmentparameter\c!name}%
+ buffer {\attachmentparameter\c!buffer}%
+ \relax
\endgroup
\else
% todo
@@ -136,7 +136,7 @@
{\bgroup
\doifelsenothing{#registered}
{\scrn_attachment_inject[\v!auto][]}
- {\doifassignmentelse{#registered}
+ {\doifelseassignment{#registered}
{\scrn_attachment_inject[\v!auto][#registered]}
{\scrn_attachment_inject[#registered][#settings]}}%
\egroup}
@@ -162,7 +162,7 @@
{\bgroup
\doifelsenothing{#registered}
{\def\scrn_attachment_stop{\scrn_attachment_inject[\v!auto][\c!buffer=\v!attachment]\egroup}}%
- {\doifassignmentelse{#registered}
+ {\doifelseassignment{#registered}
{\def\scrn_attachment_stop{\scrn_attachment_inject[\v!auto][\c!buffer=\v!attachment,#registered]\egroup}}%
{\def\scrn_attachment_stop{\scrn_attachment_inject[#registered][\c!buffer=\v!attachment,#settings]\egroup}}}%
\grabbufferdatadirect\v!attachment{\e!start\currentattachment}{\e!stop\currentattachment}}
@@ -181,37 +181,37 @@
\edef\currentattachmentheight{\attachmentparameter\c!height}%
\edef\currentattachmentdepth {\attachmentparameter\c!depth }%
\ifx\currentattachmentsymbol\empty
- \ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {.5em}\fi
- \ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{.5em}\fi
+ \ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {.5\emwidth}\fi
+ \ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{.5\emwidth}\fi
\ifx\currentattachmentdepth \v!fit\let \currentattachmentdepth \zeropoint\fi
\else
- \ctxcommand{presetsymbollist("\attachmentparameter\c!symbol")}%
+ \clf_presetsymbollist{\attachmentparameter\c!symbol}%
% we cannot yet ask for the wd/ht/dp of an xform else we could use those
\setbox\b_scrn_attachment_symbol\hbox{\symbol[\lastpredefinedsymbol]}%
\ifx\currentattachmentwidth \v!fit\edef\currentattachmentwidth {\wd\b_scrn_attachment_symbol}\fi
\ifx\currentattachmentheight\v!fit\edef\currentattachmentheight{\ht\b_scrn_attachment_symbol}\fi
\ifx\currentattachmentdepth \v!fit\edef\currentattachmentdepth {\dp\b_scrn_attachment_symbol}\fi
\fi
- \ctxcommand{insertattachment{
- tag = "\currentattachment",
- registered = "\currentattachmentregistered",
- width = \number\dimexpr\currentattachmentwidth \relax,
- height = \number\dimexpr\currentattachmentheight\relax,
- depth = \number\dimexpr\currentattachmentdepth \relax,
- color = "\attachmentparameter\c!color",
- colormodel = \number\attribute\colormodelattribute,
- colorvalue = \thecolorattribute{\attachmentparameter\c!color},
- transparencyvalue = \thetransparencyattribute{\attachmentparameter\c!color},
- symbol = "\currentattachmentsymbol",
- layer = "\attachmentparameter\c!textlayer",
+ \clf_insertattachment
+ tag {\currentattachment}%
+ registered {\currentattachmentregistered}%
+ width \dimexpr\currentattachmentwidth \relax
+ height \dimexpr\currentattachmentheight\relax
+ depth \dimexpr\currentattachmentdepth \relax
+ color {\attachmentparameter\c!color}%
+ colormodel \attribute\colormodelattribute
+ colorvalue \numexpr\thecolorattribute{\attachmentparameter\c!color}\relax % or are these chardefs
+ transparencyvalue \numexpr\thetransparencyattribute{\attachmentparameter\c!color}\relax % or are these chardefs
+ symbol {\currentattachmentsymbol}%
+ layer {\attachmentparameter\c!textlayer}%
% these will be overloaded by registered when available
- title = "\attachmentparameter\c!title",
- subtitle = "\attachmentparameter\c!subtitle",
- author = "\attachmentparameter\c!author",
- file = "\attachmentparameter\c!file",
- name = "\attachmentparameter\c!name",
- buffer = "\attachmentparameter\c!buffer",
- }}%
+ title {\attachmentparameter\c!title}%
+ subtitle {\attachmentparameter\c!subtitle}%
+ author {\attachmentparameter\c!author}%
+ file {\attachmentparameter\c!file}%
+ name {\attachmentparameter\c!name}%
+ buffer {\attachmentparameter\c!buffer}%
+ \relax
\setbox\b_scrn_attachment_link\hbox{\scrn_attachment_place}%
\wd\b_scrn_attachment_link\currentattachmentwidth
\ht\b_scrn_attachment_link\currentattachmentheight
@@ -219,11 +219,11 @@
\box\b_scrn_attachment_link}
\setvalue{\??attachmentmethod\v!hidden}%
- {\ctxcommand{insertattachment{
- tag = "\currentattachment",
- registered = "\currentattachmentregistered",
- method = "\v!hidden"
- }}}
+ {\clf_insertattachment
+ tag {\currentattachment}%
+ registered {\currentattachmentregistered}%
+ method {\v!hidden}%
+ \relax}
\def\scrn_attachment_place
{\executeifdefined
@@ -398,10 +398,10 @@
\scrn_comment_argument_ignore}
\def\scrn_comment_argument_indeed[#title][#settings]#text%
- {\doifassignmentelse{#title}
+ {\doifelseassignment{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
- \ctxlua{buffers.assign("\v!comment",\!!bs#text\!!es)}% todo: expansion control, but expanded by default (xml)
+ \clf_assignbuffer{\v!comment}{#text}\catcodetable\relax% todo: expansion control, but expanded by default (xml)
\scrn_comment_inject
\ignorespaces}
@@ -421,7 +421,7 @@
\def\scrn_comment_start_indeed[#title][#settings]%
{\bgroup
- \doifassignmentelse{#title}
+ \doifelseassignment{#title}
{\setupcurrentcomment[#title]}
{\setupcurrentcomment[\c!title=#title,#settings]}%
\unexpanded\def\scrn_comment_stop{\scrn_comment_inject\egroup}%
@@ -447,35 +447,35 @@
\edef\currentcommentheight{\commentparameter\c!height}%
\edef\currentcommentdepth {\commentparameter\c!depth }%
\ifx\currentcommentsymbol\empty
- \ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {.5em}\fi
- \ifx\currentcommentheight\v!fit\edef\currentcommentheight{.5em}\fi
+ \ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {.5\emwidth}\fi
+ \ifx\currentcommentheight\v!fit\edef\currentcommentheight{.5\emwidth}\fi
\ifx\currentcommentdepth \v!fit\let \currentcommentdepth \zeropoint\fi
\else
- \ctxcommand{presetsymbollist("\commentparameter\c!symbol")}%
+ \clf_presetsymbollist{\commentparameter\c!symbol}%
% we cannot yet ask for the wd/ht/dp of an xform else we could use those
\setbox\b_scrn_comment_symbol\hbox{\symbol[\lastpredefinedsymbol]}%
\ifx\currentcommentwidth \v!fit\edef\currentcommentwidth {\wd\b_scrn_comment_symbol}\fi
\ifx\currentcommentheight\v!fit\edef\currentcommentheight{\ht\b_scrn_comment_symbol}\fi
\ifx\currentcommentdepth \v!fit\edef\currentcommentdepth {\dp\b_scrn_comment_symbol}\fi
\fi
- \ctxcommand{insertcomment{
- tag = "\currentcomment",
- title = "\commentparameter\c!title",
- subtitle = "\commentparameter\c!subtitle",
- author = "\commentparameter\c!author",
- width = \number\dimexpr\currentcommentwidth,
- height = \number\dimexpr\currentcommentheight,
- depth = \number\dimexpr\currentcommentdepth,
- nx = \commentparameter\c!nx,
- ny = \commentparameter\c!ny,
- colormodel = \number\attribute\colormodelattribute,
- colorvalue = \thecolorattribute{\commentparameter\c!color},
- transparencyvalue = \thetransparencyattribute{\commentparameter\c!color},
- option = "\commentparameter\c!option", % todo
- symbol = "\commentparameter\c!symbol",
- buffer = "\v!comment",
- layer = "\commentparameter\c!textlayer"
- }}%
+ \clf_insertcomment
+ tag {\currentcomment}%
+ title {\commentparameter\c!title}%
+ subtitle {\commentparameter\c!subtitle}%
+ author {\commentparameter\c!author}%
+ width \dimexpr\currentcommentwidth\relax
+ height \dimexpr\currentcommentheight\relax
+ depth \dimexpr\currentcommentdepth\relax
+ nx {\commentparameter\c!nx}%
+ ny {\commentparameter\c!ny}%
+ colormodel \attribute\colormodelattribute
+ colorvalue \numexpr\thecolorattribute{\commentparameter\c!color}\relax
+ transparencyvalue \numexpr\thetransparencyattribute{\commentparameter\c!color}\relax
+ option {\commentparameter\c!option}% % todo
+ symbol {\commentparameter\c!symbol}%
+ buffer {\v!comment}%
+ layer {\commentparameter\c!textlayer}%
+ \relax
\wd\b_scrn_comment_link\currentcommentwidth
\ht\b_scrn_comment_link\currentcommentheight
\dp\b_scrn_comment_link\currentcommentdepth
@@ -542,17 +542,17 @@
{\dodoubleargument\scrn_soundtrack_indeed}
\def\scrn_soundtrack_indeed[#tag][#filename]%
- {\ctxcommand{registersoundclip{
- tag = "#tag",
- file = "#filename"
- }}}
+ {\clf_registersoundclip
+ tag {#tag}%
+ file {#filename}%
+ \relax}
\def\checksoundtrack#tag% yet untested in mkiv (also move management to lua)
{\iflocation
- \ctxcommand{insertsoundclip{
- tag = "#tag",
- ["repeat"] = "\directexternalsoundtrackparameter\c!option", % todo: pass option as-is
- }}%
+ \clf_insertsoundclip
+ tag {#tag}%
+ repeat {\directexternalsoundtrackparameter\c!option}%
+ \relax
\fi}
%D Renderings (not yet tested in mkvi):
@@ -571,31 +571,31 @@
\unexpanded\def\setinternalrendering{\dodoubleempty \scrn_rendering_set}
\def\scrn_rendering_use[#tag][#mime][#file][#option]%
- {\ctxcommand{registerrendering{
- type = "external",
- label = "#tag",
- mime = "#mime",
- filename = "#file",
- option = "#option",
- }}}
+ {\clf_registerrendering
+ type {external}%
+ label {#tag}%
+ mime {#mime}%
+ filename {#file}%
+ option {#option}%
+ \relax}
\def\scrn_rendering_set[#tag][#option]% {content} % crappy
{\bgroup
\dowithnextbox
- {\ctxcommand{registerrendering{
- type = "internal",
- label = "#tag",
- mime = "IRO", % brrr
- filename = "#tag",
- option = "#option",
- }}%
+ {\clf_registerrendering
+ type {internal}%
+ label {#tag}%
+ mime {IRO}% brrr
+ filename {#tag}%
+ option {#option}%
+ \relax
\let\objectoffset\zeropoint
\setobject{IRO}{#tag}\hbox{\box\nextbox}%
\egroup}%
\hbox}
-\def\renderingtype #tag{\ctxcommand{renderingvar("#tag","type")}}
-\def\renderingoption#tag{\ctxcommand{renderingvar("#tag","option")}}
+\def\renderingtype #tag{\clf_renderingvar{#tag}{type}}
+\def\renderingoption#tag{\clf_renderingvar{#tag}{option}}
\newdimen\d_scrn_rendering_width \d_scrn_rendering_width 8cm
\newdimen\d_scrn_rendering_height \d_scrn_rendering_height 6cm
@@ -648,13 +648,14 @@
\letrenderingwindowparameter\c!offset\v!overlay
\inheritedrenderingwindowframed
{\vfill
- \ctxcommand{insertrenderingwindow {
- label = "\currentrendering",
- width = \number\d_scrn_rendering_width,
- height = \number\d_scrn_rendering_height,
- option = "\renderingoption\currentrendering",
- page = \number\m_scrn_rendering_page,
- }}\hfill}%
+ \clf_insertrenderingwindow
+ label {\currentrendering}%
+ width \d_scrn_rendering_width
+ height \d_scrn_rendering_height
+ option {\renderingoption\currentrendering}%
+ page \m_scrn_rendering_page
+ \relax
+ \hfill}%
\egroup}
%D Linkedlists (not tested in mkvi):
@@ -676,7 +677,7 @@
% \let\setupbutton\setuplinkedlists\setuplinkedlist
%
% \appendtoks
-% \ctxcommand{definelinkedlist("\currentlinkedlist")}%
+% \clf_definelinkedlist{\currentlinkedlist}%
% \to \everydefinelinkedlist
%
% \def\setlinkedlistproperties#1#2#3#4#5#6%
@@ -694,8 +695,8 @@
% \edef\currentlinkedlist{#1}%
% \ifcsname\??lk\currentlinkedlist\s!parent\endcsname
% \hskip\linkedlistparameter\c!distance
-% \ctxcommand{addlinklistelement("\currentlinkedlist")}%
-% \expanded{\ctxlatelua{commands.enhancelinkedlist("\currentlinkedlist",\currentlink)}}% can also be done at the lua end
+% \clf_addlinklistelement{\currentlinkedlist}%
+% \expanded{\ctxlatecommand{enhancelinkedlist("\currentlinkedlist",\currentlink)}}% can also be done at the lua end
% \dogotosomepage {\??lk\currentlinkedlist}\gotobegincharacter \firstlink
% \ifnum\noflinks>\plustwo
% \dogotosomepage{\??lk\currentlinkedlist}\gobackwardcharacter\previouslink
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index 681fc4c43..9050da6be 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -14,15 +14,29 @@ if not modules then modules = { } end modules ['scrp-cjk'] = {
-- sense either because otherwise a wanted space at the end of a
-- line would have to be a hard coded ones.
-local utfchar = utf.char
-
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-local remove_node = nodes.remove
-local copy_node = nodes.copy
-local traverse_id = nodes.traverse_id
-
-local nodepool = nodes.pool
+local utfchar = utf.getchar
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local copy_node = nuts.copy
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_penalty = nodepool.penalty
@@ -88,20 +102,20 @@ end
-- at font definition time and/or just assume a correct font
local function trace_detail(current,what)
- local prev = current.prev
- local c_id = current.id
- local p_id = prev and prev.id
+ local prev = getprev(current)
+ local c_id = getid(current)
+ local p_id = prev and getid(prev)
if c_id == glyph_code then
- local c_ch = current.char
+ local c_ch = getchar(current)
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
else
report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
end
else
if p_id == glyph_code then
- local p_ch = p_id and prev.char
+ local p_ch = p_id and getchar(prev)
report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
else
report_details("[%s]",what)
@@ -110,8 +124,8 @@ local function trace_detail(current,what)
end
local function trace_detail_between(p,n,what)
- local p_ch = p.char
- local n_ch = n.char
+ local p_ch = getchar(p)
+ local n_ch = getchar(n)
report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
end
@@ -427,29 +441,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -495,23 +509,24 @@ scripts.installmethod {
}
function scripts.decomposehangul(head)
+ local head = tonut(head)
local done = false
for current in traverse_id(glyph_code,head) do
- local lead_consonant, medial_vowel, tail_consonant = decomposed(current.char)
+ local lead_consonant, medial_vowel, tail_consonant = decomposed(getchar(current))
if lead_consonant then
- current.char = lead_consonant
+ setfield(current,"char",lead_consonant)
local m = copy_node(current)
- m.char = medial_vowel
+ setfield(m,"char",medial_vowel)
head, current = insert_node_after(head,current,m)
if tail_consonant then
local t = copy_node(current)
- t.char = tail_consonant
+ setfield(t,"char",tail_consonant)
head, current = insert_node_after(head,current,t)
end
done = true
end
end
- return head, done
+ return tonode(head), done
end
-- nodes.tasks.prependaction("processors","normalizers","scripts.decomposehangul")
@@ -682,29 +697,29 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
else -- glue
- local p, n = first.prev, upcoming
+ local p, n = getprev(first), upcoming
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -904,34 +919,32 @@ local function process(head,first,last)
if first ~= last then
local lastfont, previous, last = nil, "start", nil
while true do
- local upcoming, id = first.next, first.id
+ local upcoming, id = getnext(first), getid(first)
if id == glyph_code then
- local a = first[a_scriptstatus]
+ local a = getattr(first,a_scriptstatus)
local current = numbertocategory[a]
local action = injectors[previous]
if action then
action = action[current]
if action then
- local font = first.font
+ local font = getfont(first)
if font ~= lastfont then
lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ set_parameters(font,numbertodataset[getattr(first,a_scriptinjection)])
end
action(head,first)
end
end
previous = current
-
--- elseif id == math_code then
--- upcoming = end_of_math(current).next
--- previous = "start"
-
+ -- elseif id == math_code then
+ -- upcoming = getnext(end_of_math(current))
+ -- previous = "start"
else -- glue
- local p, n = first.prev, upcoming -- we should remember prev
+ local p, n = getprev(first), upcoming -- we should remember prev
if p and n then
- local pid, nid = p.id, n.id
+ local pid, nid = getid(p), getid(n)
if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pa, na = getattr(p,a_scriptstatus), getattr(n,a_scriptstatus)
local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
if not pcjk or not ncjk
or pcjk == "korean" or ncjk == "korean"
@@ -940,17 +953,17 @@ local function process(head,first,last)
or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
previous = "start"
else -- if head ~= first then
-if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = first.spec.width
- local s = spacedata[p.font]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
-end
+ if id == glue_code and getsubtype(first) == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = getfield(getfield(first,"spec"),"width")
+ local s = spacedata[getfont(p)]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+ end
previous = pcjk
-- else
-- previous = pcjk
diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua
index 597afa1b5..8ecbce522 100644
--- a/tex/context/base/scrp-eth.lua
+++ b/tex/context/base/scrp-eth.lua
@@ -9,9 +9,17 @@ if not modules then modules = { } end modules ['scrp-eth'] = {
-- at some point I will review the script code but for the moment we
-- do it this way; so space settings like with cjk yet
-local insert_node_before = node.insert_before
+local nuts = nodes.nuts
-local nodepool = nodes.pool
+local getnext = nuts.getnext
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getattr = nuts.getattr
+
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_penalty = nodepool.penalty
@@ -37,13 +45,13 @@ local inter_character_stretch_factor = 1
local inter_character_shrink_factor = 1
local function space_glue(current)
- local data = numbertodataset[current[a_scriptinjection]]
+ local data = numbertodataset[getattr(current,a_scriptinjection)]
if data then
inter_character_space_factor = data.inter_character_space_factor or 1
inter_character_stretch_factor = data.inter_character_stretch_factor or 1
inter_character_shrink_factor = data.inter_character_shrink_factor or 1
end
- local font = current.font
+ local font = getfont(current)
if lastfont ~= font then
local pf = parameters[font]
space = pf.space
@@ -104,9 +112,9 @@ local function process(head,first,last)
local injector = false
local current = first
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local scriptstatus = current[a_scriptstatus]
+ local scriptstatus = getattr(current,a_scriptstatus)
local category = numbertocategory[scriptstatus]
if injector then
local action = injector[category]
@@ -121,7 +129,7 @@ local function process(head,first,last)
if current == last then
break
else
- current = current.next
+ current = getnext(current)
end
end
end
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index 56422e622..3c3517542 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -14,7 +14,7 @@ local attributes, nodes, node = attributes, nodes, node
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
local trace_splitting = false trackers.register("scripts.splitting", function(v) trace_splitting = v end)
-local trace_splitdetail = false trackers.register("scripts.splitring.detail", function(v) trace_splitdetail = v end)
+local trace_splitdetail = false trackers.register("scripts.splitting.detail", function(v) trace_splitdetail = v end)
local report_preprocessing = logs.reporter("scripts","preprocessing")
local report_splitting = logs.reporter("scripts","splitting")
@@ -22,14 +22,13 @@ local report_splitting = logs.reporter("scripts","splitting")
local utfbyte, utfsplit = utf.byte, utf.split
local gmatch = string.gmatch
-local first_glyph = node.first_glyph or node.first_character
-local traverse_id = node.traverse_id
-
local texsetattribute = tex.setattribute
local nodecodes = nodes.nodecodes
local unsetvalue = attributes.unsetvalue
+local implement = interfaces.implement
+
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
@@ -48,9 +47,23 @@ local setmetatableindex = table.setmetatableindex
local enableaction = nodes.tasks.enableaction
local disableaction = nodes.tasks.disableaction
-local insert_node_after = node.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local first_glyph = nuts.first_glyph
+local traverse_id = nuts.traverse_id
+
+local nodepool = nuts.pool
-local nodepool = nodes.pool
local new_glue = nodepool.glue
local new_rule = nodepool.rule
local new_penalty = nodepool.penalty
@@ -400,7 +413,7 @@ scripts.numbertocategory = numbertocategory
local function colorize(start,stop)
for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[n[a_scriptstatus]]
+ local kind = numbertocategory[getattr(n,a_scriptstatus)]
if kind then
local ac = scriptcolors[kind]
if ac then
@@ -432,16 +445,17 @@ end
-- we can have a fonts.hashes.originals
function scripts.injectors.handler(head)
+ head = tonut(head)
local start = first_glyph(head) -- we already have glyphs here (subtype 1)
if not start then
- return head, false
+ return tonode(head), false
else
local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
local done, first, last, ok = false, nil, nil, false
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local a = start[a_scriptinjection]
+ local a = getattr(start,a_scriptinjection)
if a then
if a ~= last_a then
if first then
@@ -463,23 +477,24 @@ function scripts.injectors.handler(head)
normal_process = handler.injector
end
if normal_process then
- local f = start.font
+ -- wrong: originals are indices !
+ local f = getfont(start)
if f ~= lastfont then
originals = fontdata[f].resources
if resources then
originals = resources.originals
else
- -- can't happen
+ originals = nil -- can't happen
end
lastfont = f
end
- local c = start.char
- if originals then
+ local c = getchar(start)
+ if originals and type(originals) == "number" then
c = originals[c] or c
end
local h = hash[c]
if h then
- start[a_scriptstatus] = categorytonumber[h]
+ setattr(start,a_scriptstatus,categorytonumber[h])
if not first then
first, last = start, start
else
@@ -540,7 +555,7 @@ function scripts.injectors.handler(head)
first, last = nil, nil
end
end
- start = start.next
+ start = getnext(start)
end
if ok then
if trace_analyzing then
@@ -553,7 +568,7 @@ function scripts.injectors.handler(head)
end
done = true
end
- return head, done
+ return tonode(head), done
end
end
@@ -683,11 +698,11 @@ end)
local categories = characters.categories or { }
local function hit(root,head)
- local current = head.next
+ local current = getnext(head)
local lastrun = false
local lastfinal = false
- while current and current.id == glyph_code do
- local char = current.char
+ while current and getid(current) == glyph_code do
+ local char = getchar(current)
local newroot = root[char]
if newroot then
local final = newroot.final
@@ -701,7 +716,7 @@ local function hit(root,head)
else
return lastrun, lastfinal
end
- current = current.next
+ current = getnext(current)
end
if lastrun then
return lastrun, lastfinal
@@ -710,12 +725,13 @@ end
local tree, attr, proc
-function splitters.handler(head)
+function splitters.handler(head) -- todo: also first_glyph test
+ head = tonut(head)
local current = head
local done = false
while current do
- if current.id == glyph_code then
- local a = current[a_scriptsplitting]
+ if getid(current) == glyph_code then
+ local a = getattr(current,a_scriptsplitting)
if a then
if a ~= attr then
local handler = numbertohandler[a]
@@ -724,14 +740,14 @@ function splitters.handler(head)
proc = handler.splitter
end
if proc then
- local root = tree[current.char]
+ local root = tree[getchar(current)]
if root then
-- we don't check for attributes in the hitter (yet)
local last, final = hit(root,current)
if last then
- local next = last.next
- if next and next.id == glyph_code then
- local nextchar = next.char
+ local next = getnext(last)
+ if next and getid(next) == glyph_code then
+ local nextchar = getchar(next)
if tree[nextchar] then
if trace_splitdetail then
if type(final) == "string" then
@@ -760,9 +776,9 @@ function splitters.handler(head)
end
end
end
- current = current.next
+ current = getnext(current)
end
- return head, done
+ return tonode(head), done
end
local function marker(head,current,font,color) -- could become: nodes.tracers.marker
@@ -792,8 +808,8 @@ end
local last_a, last_f, last_s, last_q
function splitters.insertafter(handler,head,first,last,detail)
- local a = first[a_scriptsplitting]
- local f = first.font
+ local a = getattr(first,a_scriptsplitting)
+ local f = getfont(first)
if a ~= last_a or f ~= last_f then
last_s = emwidths[f] * numbertodataset[a].inter_word_stretch_factor
last_a = a
@@ -870,15 +886,15 @@ setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-- playing nice
function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,head) do
- -- if n[a_scriptinjection] then
+ for n in traverse_id(glyph_code,tonut(head)) do
+ -- if getattr(n,a_scriptinjection) then
-- -- already tagged by script feature, maybe some day adapt
-- else
- local char = n.char
+ local char = getchar(n)
local script = otfscripts[char]
if script then
- local dynamic = n[0] or 0
- local font = n.font
+ local dynamic = getattr(n,0) or 0
+ local font = getfont(n)
if dynamic > 0 then
local slot = cache_yes[font]
local attr = slot[script]
@@ -904,7 +920,7 @@ function autofontfeature.handler(head)
end
end
if attr ~= 0 then
- n[0] = attr
+ setattr(n,0,attr)
-- maybe set scriptinjection when associated
end
end
@@ -924,5 +940,22 @@ function autofontfeature.disable()
disableaction("processors","scripts.autofontfeature.handler")
end
-commands.enableautofontscript = autofontfeature.enable
-commands.disableautofontscript = autofontfeature.disable
+implement {
+ name = "enableautofontscript",
+ actions = autofontfeature.enable
+}
+
+implement {
+ name = "disableautofontscript",
+ actions = autofontfeature.disable }
+
+implement {
+ name = "setscript",
+ actions = scripts.set,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "resetscript",
+ actions = scripts.reset
+}
diff --git a/tex/context/base/scrp-ini.mkiv b/tex/context/base/scrp-ini.mkiv
index 4a27dd8e2..cd060c02b 100644
--- a/tex/context/base/scrp-ini.mkiv
+++ b/tex/context/base/scrp-ini.mkiv
@@ -41,14 +41,14 @@
\to \everydefinescript
\unexpanded\def\scripts_basics_set
- {\ctxlua{scripts.set("\currentscript","\scriptparameter\c!method","\scriptparameter\c!preset")}}
+ {\clf_setscript{\currentscript}{\scriptparameter\c!method}{\scriptparameter\c!preset}}
\unexpanded\def\setscript[#1]%
{\edef\currentscript{#1}%
\scripts_basics_set}
\unexpanded\def\resetscript
- {\ctxlua{scripts.reset()}}
+ {\clf_resetscript}
\unexpanded\def\startscript[#1]%
{\begingroup
@@ -86,8 +86,8 @@
\fi
\to \everysetupscript
-\unexpanded\def\enableautofontscript {\ctxcommand{enableautofontscript ()}}
-\unexpanded\def\disableautofontscript{\ctxcommand{disableautofontscript()}}
+\unexpanded\def\enableautofontscript {\clf_enableautofontscript }
+\unexpanded\def\disableautofontscript{\clf_disableautofontscript}
\definefontfeature[latn][script=latn]
\definefontfeature[grek][script=grek]
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 479d1c489..9c4d5acee 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -39,19 +39,28 @@ relatively easy to do.
how they map onto this mechanism. I've learned that users can come up
with any demand so nothing here is frozen.
+Todo: I ran into the Unicode Collation document and noticed that
+there are some similarities (like the weights) but using that method
+would still demand extra code for language specifics. One option is
+to use the allkeys.txt file for the uc vectors but then we would also
+use the collapsed key (sq, code is now commented). In fact, we could
+just hook those into the replacer code that we reun beforehand.
+
In the future index entries will become more clever, i.e. they will
have language etc properties that then can be used.
]]--
-local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat
+local gsub, rep, sub, sort, concat, tohash, format = string.gsub, string.rep, string.sub, table.sort, table.concat, table.tohash, string.format
local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values
local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset
+local P, Cs, R, S, lpegmatch = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.match
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end)
+local trace_orders = false trackers.register("sorters.orders", function(v) trace_orders = v end)
local report_sorters = logs.reporter("languages","sorters")
@@ -65,7 +74,9 @@ local digitsoffset = 0x20000 -- frozen
local digitsmaximum = 0xFFFFF -- frozen
local lccodes = characters.lccodes
+local uccodes = characters.uccodes
local lcchars = characters.lcchars
+local ucchars = characters.ucchars
local shchars = characters.shchars
local fscodes = characters.fscodes
local fschars = characters.fschars
@@ -81,8 +92,8 @@ local v_after = variables.after
local v_first = variables.first
local v_last = variables.last
-local validmethods = table.tohash {
- -- "ch", -- raw character
+local validmethods = tohash {
+ "ch", -- raw character (for tracing)
"mm", -- minus mapping
"zm", -- zero mapping
"pm", -- plus mapping
@@ -101,11 +112,11 @@ local predefinedmethods = {
}
sorters = {
- comparers = comparers,
- splitters = splitters,
- definitions = definitions,
- tracers = tracers,
- constants = {
+ comparers = comparers,
+ splitters = splitters,
+ definitions = definitions,
+ tracers = tracers,
+ constants = {
ignoredoffset = ignoredoffset,
replacementoffset = replacementoffset,
digitsoffset = digitsoffset,
@@ -113,6 +124,7 @@ sorters = {
defaultlanguage = v_default,
defaultmethod = v_default,
defaultdigits = v_numbers,
+ validmethods = validmethods,
}
}
@@ -120,7 +132,7 @@ local sorters = sorters
local constants = sorters.constants
local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
+local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence, usedinsequence
local thefirstofsplit
local mte = { -- todo: assign to t
@@ -156,6 +168,7 @@ local mte = { -- todo: assign to t
}
local noorder = false
+local nothing = { 0 }
local function preparetables(data)
local orders, lower, m_mappings, z_mappings, p_mappings = data.orders, data.lower, { }, { }, { }
@@ -168,12 +181,12 @@ local function preparetables(data)
__index = function(t,k)
local n, nn
if k then
- if trace_tests then
+ if trace_orders then
report_sorters("simplifing character %C",k)
end
local l = lower[k] or lcchars[k]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 1 lower: %C",l)
end
local ml = rawget(t,l)
@@ -184,7 +197,7 @@ local function preparetables(data)
nn = nn + 1
n[nn] = ml[i] + (t.__delta or 0)
end
- if trace_tests then
+ if trace_orders then
report_sorters(" 2 order: % t",n)
end
end
@@ -192,7 +205,7 @@ local function preparetables(data)
if not n then
local s = shchars[k] -- maybe all components?
if s and s ~= k then
- if trace_tests then
+ if trace_orders then
report_sorters(" 3 shape: %C",s)
end
n = { }
@@ -200,7 +213,7 @@ local function preparetables(data)
for l in utfcharacters(s) do
local ml = rawget(t,l)
if ml then
- if trace_tests then
+ if trace_orders then
report_sorters(" 4 keep: %C",l)
end
if ml then
@@ -212,7 +225,7 @@ local function preparetables(data)
else
l = lower[l] or lcchars[l]
if l then
- if trace_tests then
+ if trace_orders then
report_sorters(" 5 lower: %C",l)
end
local ml = rawget(t,l)
@@ -226,44 +239,45 @@ local function preparetables(data)
end
end
else
- -- -- we probably never enter this branch
- -- -- fschars returns a single char
- --
- -- s = fschars[k]
- -- if s and s ~= k then
- -- if trace_tests then
- -- report_sorters(" 6 split: %s",s)
- -- end
- -- local ml = rawget(t,s)
- -- if ml then
- -- n = { }
- -- nn = 0
- -- for i=1,#ml do
- -- nn = nn + 1
- -- n[nn] = ml[i]
- -- end
- -- end
- -- end
- local b = utfbyte(k)
- n = decomposed[b] or { b }
- if trace_tests then
- report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ -- this is a kind of last resort branch that we might want to revise
+ -- one day
+ --
+ -- local b = utfbyte(k)
+ -- n = decomposed[b] or { b }
+ -- if trace_tests then
+ -- report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ -- end
+ --
+ -- we need to move way above valid order (new per 2014-10-16) .. maybe we
+ -- need to move it even more up to get numbers right (not all have orders)
+ --
+ if k == "\000" then
+ n = nothing -- shared
+ if trace_orders then
+ report_sorters(" 6 split: space") -- todo
+ end
+ else
+ local b = 2 * #orders + utfbyte(k)
+ n = decomposed[b] or { b } -- could be shared tables
+ if trace_orders then
+ report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
+ end
end
end
if n then
- if trace_tests then
+ if trace_orders then
report_sorters(" 7 order: % t",n)
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 8 order: 0")
end
end
end
else
n = noorder
- if trace_tests then
+ if trace_orders then
report_sorters(" 9 order: 0")
end
end
@@ -298,11 +312,11 @@ local function update() -- prepare parent chains, needed when new languages are
end
end
-local function setlanguage(l,m,d,u)
+local function setlanguage(l,m,d,u) -- this will become a specification table
language = (l ~= "" and l) or constants.defaultlanguage
- data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage]
- method = (m ~= "" and m) or data.method or constants.defaultmethod
- digits = (d ~= "" and d) or data.digits or constants.defaultdigits
+ data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage]
+ method = (m ~= "" and m) or (data.method ~= "" and data.method) or constants.defaultmethod
+ digits = (d ~= "" and d) or (data.digits ~= "" and data.digits) or constants.defaultdigits
if trace_tests then
report_sorters("setting language %a, method %a, digits %a",language,method,digits)
end
@@ -333,7 +347,10 @@ local function setlanguage(l,m,d,u)
report_sorters("invalid sorter method %a in %a",s,method)
end
end
+ usedinsequence = tohash(sequence)
data.sequence = sequence
+ data.usedinsequence = usedinsequence
+-- usedinsequence.ch = true -- better just store the string
if trace_tests then
report_sorters("using sort sequence: % t",sequence)
end
@@ -353,6 +370,8 @@ end
-- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them
+-- inlining and checking first slot first doesn't speed up (the 400K complex author sort)
+
local function basicsort(sort_a,sort_b)
if sort_a and sort_b then
local na = #sort_a
@@ -360,19 +379,27 @@ local function basicsort(sort_a,sort_b)
if na > nb then
na = nb
end
- for i=1,na do
- local ai, bi = sort_a[i], sort_b[i]
- if ai > bi then
- return 1
- elseif ai < bi then
- return -1
+ if na > 0 then
+ for i=1,na do
+ local ai, bi = sort_a[i], sort_b[i]
+ if ai > bi then
+ return 1
+ elseif ai < bi then
+ return -1
+ end
end
end
end
return 0
end
-function comparers.basic(a,b) -- trace ea and eb
+-- todo: compile compare function
+
+local function basic(a,b) -- trace ea and eb
+ if a == b then
+ -- hashed (shared) entries
+ return 0
+ end
local ea, eb = a.split, b.split
local na, nb = #ea, #eb
if na == 0 and nb == 0 then
@@ -432,25 +459,72 @@ function comparers.basic(a,b) -- trace ea and eb
end
end
-local function numify(s)
- s = digitsoffset + tonumber(s) -- alternatively we can create range
- if s > digitsmaximum then
- s = digitsmaximum
+-- if we use sq:
+--
+-- local function basic(a,b) -- trace ea and eb
+-- local ea, eb = a.split, b.split
+-- local na, nb = #ea, #eb
+-- if na == 0 and nb == 0 then
+-- -- simple variant (single word)
+-- return basicsort(ea.sq,eb.sq)
+-- else
+-- -- complex variant, used in register (multiple words)
+-- local result = 0
+-- for i=1,nb < na and nb or na do
+-- local eai, ebi = ea[i], eb[i]
+-- result = basicsort(ea.sq,eb.sq)
+-- if result ~= 0 then
+-- return result
+-- end
+-- end
+-- if result ~= 0 then
+-- return result
+-- elseif na > nb then
+-- return 1
+-- elseif nb > na then
+-- return -1
+-- else
+-- return 0
+-- end
+-- end
+-- end
+
+comparers.basic = basic
+
+function sorters.basicsorter(a,b)
+ return basic(a,b) == -1
+end
+
+local function numify(old)
+ if digits == v_numbers then -- was swapped, fixed 2014-11-10
+ local new = digitsoffset + tonumber(old) -- alternatively we can create range
+ if new > digitsmaximum then
+ new = digitsmaximum
+ end
+ return utfchar(new)
+ else
+ return old
end
- return utfchar(s)
+end
+
+local pattern = nil
+
+local function prepare()
+ pattern = Cs( (
+ characters.tex.toutfpattern()
+ + lpeg.patterns.whitespace / "\000"
+ + (P("\\") / "") * R("AZ")^0 * (P(-1) + #(1-R("AZ")))
+ + (P("\\") * P(1) * R("az","AZ")^0) / ""
+ + S("[](){}$\"'") / ""
+ + R("09")^1 / numify
+ + P(1)
+ )^0 )
+ return pattern
end
function sorters.strip(str) -- todo: only letters and such
if str and str ~= "" then
- -- todo: make a decent lpeg
- str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy
- str = gsub(str,"\\%S*","") -- the rest
- str = gsub(str,"%s","\001") -- can be option
- str = gsub(str,"[%s%[%](){}%$\"\']*","")
- if digits == v_numbers then
- str = gsub(str,"(%d+)",numify) -- sort numbers properly
- end
- return str
+ return lpegmatch(pattern or prepare(),str)
else
return ""
end
@@ -477,7 +551,7 @@ sorters.firstofsplit = firstofsplit
-- for the moment we use an inefficient bunch of tables but once
-- we know what combinations make sense we can optimize this
-function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
+function splitters.utf(str,checked) -- we could append m and u but this is cleaner, s is for tracing
if #replacements > 0 then
-- todo make an lpeg for this
for k=1,#replacements do
@@ -518,10 +592,15 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
else
n = n + 1
local l = lower[sc]
- l = l and utfbyte(l) or lccodes[b]
+ l = l and utfbyte(l) or lccodes[b] or b
+ -- local u = upper[sc]
+ -- u = u and utfbyte(u) or uccodes[b] or b
if type(l) == "table" then
l = l[1] -- there are currently no tables in lccodes but it can be some, day
end
+ -- if type(u) == "table" then
+ -- u = u[1] -- there are currently no tables in lccodes but it can be some, day
+ -- end
z_case[n] = l
if l ~= b then
m_case[n] = l - 1
@@ -580,18 +659,39 @@ function splitters.utf(str) -- we could append m and u but this is cleaner, s is
-- p_mapping = { p_mappings[fs][1] }
-- end
-- end
- local t = {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
-
- return t
+ local result
+ if checked then
+ result = {
+ ch = trace_tests and char or nil, -- not in sequence
+ uc = usedinsequence.uc and byte or nil,
+ mc = usedinsequence.mc and m_case or nil,
+ zc = usedinsequence.zc and z_case or nil,
+ pc = usedinsequence.pc and p_case or nil,
+ mm = usedinsequence.mm and m_mapping or nil,
+ zm = usedinsequence.zm and z_mapping or nil,
+ pm = usedinsequence.pm and p_mapping or nil,
+ }
+ else
+ result = {
+ ch = char,
+ uc = byte,
+ mc = m_case,
+ zc = z_case,
+ pc = p_case,
+ mm = m_mapping,
+ zm = z_mapping,
+ pm = p_mapping,
+ }
+ end
+ -- local sq, n = { }, 0
+ -- for i=1,#byte do
+ -- for s=1,#sequence do
+ -- n = n + 1
+ -- sq[n] = result[sequence[s]][i]
+ -- end
+ -- end
+ -- result.sq = sq
+ return result
end
local function packch(entry)
@@ -602,7 +702,14 @@ local function packch(entry)
local tt, li = { }, split[i].ch
for j=1,#li do
local lij = li[j]
- tt[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
+ local byt = utfbyte(lij)
+ if byt > ignoredoffset then
+ tt[j] = "[]"
+ elseif byt == 0 then
+ tt[j] = " "
+ else
+ tt[j] = lij
+ end
end
t[i] = concat(tt)
end
@@ -611,7 +718,14 @@ local function packch(entry)
local t, li = { }, split.ch
for j=1,#li do
local lij = li[j]
- t[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
+ local byt = utfbyte(lij)
+ if byt > ignoredoffset then
+ t[j] = "[]"
+ elseif byt == 0 then
+ t[j] = " "
+ else
+ t[j] = lij
+ end
end
return concat(t)
end
@@ -622,16 +736,16 @@ local function packuc(entry)
if #split > 0 then -- useless test
local t = { }
for i=1,#split do
- t[i] = concat(split[i].uc, " ")
+ t[i] = concat(split[i].uc, " ") -- sq
end
return concat(t," + ")
else
- return concat(split.uc," ")
+ return concat(split.uc," ") -- sq
end
end
function sorters.sort(entries,cmp)
- if trace_tests or trace_methods then
+ if trace_methods then
local nofentries = #entries
report_sorters("entries: %s, language: %s, method: %s, digits: %s",nofentries,language,method,tostring(digits))
for i=1,nofentries do
@@ -653,7 +767,9 @@ function sorters.sort(entries,cmp)
first = " "
else
s = first
- report_sorters(">> %C (%C)",first,letter)
+ if first and letter then
+ report_sorters(">> %C (%C)",first,letter)
+ end
end
report_sorters(" %s | %s",packch(entry),packuc(entry))
end
diff --git a/tex/context/base/sort-lan.lua b/tex/context/base/sort-lan.lua
index 6d16c0d80..6b0cc5007 100644
--- a/tex/context/base/sort-lan.lua
+++ b/tex/context/base/sort-lan.lua
@@ -310,7 +310,7 @@ local ch, CH = utfchar(replacementoffset + 1), utfchar(replacementoffset + 11)
definitions["cz"] = {
replacements = {
- { "ch", ch }, { "CH", CH }
+ { "ch", ch }, { "Ch", ch }, { "CH", ch }
},
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["č"] = "č",
diff --git a/tex/context/base/spac-adj.lua b/tex/context/base/spac-adj.lua
index c87a9d17f..cdf9b5051 100644
--- a/tex/context/base/spac-adj.lua
+++ b/tex/context/base/spac-adj.lua
@@ -56,3 +56,11 @@ function nodes.handlers.graphicvadjust(head,groupcode) -- we can make an actionc
return head, false
end
end
+
+interfaces.implement {
+ name = "enablegraphicvadjust",
+ onlyonce = true,
+ actions = function()
+ nodes.tasks.enableaction("finalizers","nodes.handlers.graphicvadjust")
+ end
+}
diff --git a/tex/context/base/spac-adj.mkiv b/tex/context/base/spac-adj.mkiv
index 447dd7229..b8534303d 100644
--- a/tex/context/base/spac-adj.mkiv
+++ b/tex/context/base/spac-adj.mkiv
@@ -16,18 +16,19 @@
\unprotect
% Very nasty but needed for margin stuff inside colored
-% paragraphs. Obsolete anyway.
+% paragraphs. Obsolete for while .
\registerctxluafile{spac-adj}{1.001}
\definesystemattribute [graphicvadjust] [public]
\unexpanded\def\enablegraphicvadjust
- {\ctxlua{nodes.tasks.enableaction("finalizers","nodes.handlers.graphicvadjust")}%
+ {\writestatus\m!systems{graphicvadjusting is no longer needed!}
+ \clf_enablegraphicvadjust %once anyway
\glet\enablegraphicvadjust\relax}
\unexpanded\def\graphicvadjust % currently not enabled ... nasty bidi handling
- {\enablegraphicvadjust % and no longer needed anyway
+ {\clf_enablegraphicvadjust % and probably no longer needed anyway
\dowithnextboxcontentcs\forgetall\spac_vadjust_graphic_finish\vbox}
\def\spac_vadjust_graphic_finish
diff --git a/tex/context/base/spac-ali.lua b/tex/context/base/spac-ali.lua
index 25cc6cd66..880da6213 100644
--- a/tex/context/base/spac-ali.lua
+++ b/tex/context/base/spac-ali.lua
@@ -10,13 +10,26 @@ local div = math.div
local format = string.format
local tasks = nodes.tasks
-local appendaction = tasks.appendaction
-local prependaction = tasks.prependaction
-local disableaction = tasks.disableaction
local enableaction = tasks.enableaction
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+
+local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
+local linked_nodes = nuts.linked
local unsetvalue = attributes.unsetvalue
@@ -27,8 +40,6 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local line_code = listcodes.line
-local nodepool = nodes.pool
-
local new_stretch = nodepool.stretch
local a_realign = attributes.private("realign")
@@ -56,10 +67,10 @@ local function handler(head,leftpage,realpageno)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- if current.subtype == line_code then
- local a = current[a_realign]
+ if getsubtype(current) == line_code then
+ local a = getattr(current,a_realign)
if not a or a == 0 then
-- skip
else
@@ -75,12 +86,12 @@ local function handler(head,leftpage,realpageno)
action = leftpage and 2 or 1
end
if action == 1 then
- current.list = hpack_nodes(current.list .. new_stretch(3),current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(getlist(current),new_stretch(3)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing left, align %a, page %a, realpage %a",align,pageno,realpageno)
end
elseif action == 2 then
- current.list = hpack_nodes(new_stretch(3) .. current.list,current.width,"exactly")
+ setfield(current,"list",hpack_nodes(linked_nodes(new_stretch(3),getlist(current)),getfield(current,"width"),"exactly"))
if trace_realign then
report_realign("flushing right. align %a, page %a, realpage %a",align,pageno,realpageno)
end
@@ -90,14 +101,14 @@ local function handler(head,leftpage,realpageno)
done = true
nofrealigned = nofrealigned + 1
end
- current[a_realign] = unsetvalue
+ setattr(current,a_realign,unsetvalue)
end
end
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
elseif id == vlist_code then
- handler(current.list,leftpage,realpageno)
+ handler(getlist(current),leftpage,realpageno)
end
- current = current.next
+ current = getnext(current)
end
return head, done
end
@@ -105,7 +116,8 @@ end
function alignments.handler(head)
local leftpage = isleftpage(true,false)
local realpageno = texgetcount("realpageno")
- return handler(head,leftpage,realpageno)
+ local head, done = handler(tonut(head),leftpage,realpageno)
+ return tonode(head), done
end
local enabled = false
@@ -121,7 +133,11 @@ function alignments.set(n)
texsetattribute(a_realign,texgetcount("realpageno") * 10 + n)
end
-commands.setrealign = alignments.set
+interfaces.implement {
+ name = "setrealign",
+ actions = alignments.set,
+ arguments = "integer",
+}
statistics.register("realigning", function()
if nofrealigned > 0 then
diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv
index 9c7e81379..07d588ba7 100644
--- a/tex/context/base/spac-ali.mkiv
+++ b/tex/context/base/spac-ali.mkiv
@@ -38,8 +38,8 @@
\to \everyforgetall
\unexpanded\def\resetrealignsignal{\attribute\realignattribute\attributeunsetvalue}
-\unexpanded\def\signalinnerrealign{\ctxcommand{setrealign(2)}}
-\unexpanded\def\signalouterrealign{\ctxcommand{setrealign(1)}}
+\unexpanded\def\signalinnerrealign{\clf_setrealign\plustwo}
+\unexpanded\def\signalouterrealign{\clf_setrealign\plusone}
\installcorenamespace{aligncommand}
\installcorenamespace{alignhorizontal}
@@ -207,6 +207,9 @@
\unexpanded\def\spac_align_set_stretch
{\emergencystretch\bodyfontsize}
+\unexpanded\def\spac_align_set_extreme_stretch
+ {\emergencystretch10\bodyfontsize}
+
% Vertical
\newconstant\c_spac_align_state_vertical
@@ -562,6 +565,12 @@
\spac_align_use_indeed
\fi}
+\unexpanded\def\dousealignparameter#1% faster local variant
+ {\edef\m_spac_align_asked{#1}%
+ \ifx\m_spac_align_asked\empty\else
+ \spac_align_use_indeed
+ \fi}
+
\def\spac_align_use_indeed
{\expandafter\let\expandafter\raggedcommand\csname\??alignmentnormalcache\m_spac_align_asked\endcsname
\ifx\raggedcommand\relax
@@ -585,13 +594,36 @@
\unexpanded\def\spac_align_use_now#1%
{\csname\??alignmentnormalcache#1\endcsname}
-% The keywords:
+% Maybe we need something different in columns.
\unexpanded\def\installalign#1#2% beware: commands must be unexpandable!
{\ifcsname\??aligncommand#1\endcsname \else
\setvalue{\??aligncommand#1}{\t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
\fi}
+% beware, toks stuff and states are set at a differt time, so installalign is
+% only for special options
+%
+% \setvalue{\??aligncommand whatever}%
+% {\c_spac_align_state_horizontal\plushundred
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected .....}}
+%
+% this one could deal with both
+%
+% \unexpanded\def\installalignoption#1#2%
+% {\ifcsname\??aligncommand#1\endcsname \else
+% \setvalue{\??aligncommand#1}%
+% {\spac_align_set_horizontal_none
+% \c_spac_align_state_horizontal\plushundred % don't set
+% \t_spac_align_collected\expandafter{\the\t_spac_align_collected#2}}%
+% \fi}
+%
+% \installalignoption
+% {whatever}
+% {}
+
+% The keywords:
+
\letvalue{\??aligncommand\empty }\empty
\setvalue{\??aligncommand\v!broad }{\c_spac_align_state_broad \plusone }
\setvalue{\??aligncommand\v!wide }{\c_spac_align_state_broad \plustwo }
@@ -652,6 +684,7 @@
\setvalue{\??aligncommand\v!tolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_tolerant}}
\setvalue{\??aligncommand\v!verytolerant }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_very_tolerant}}
\setvalue{\??aligncommand\v!stretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_stretch}}
+\setvalue{\??aligncommand\v!extremestretch }{\t_spac_align_collected\expandafter{\the\t_spac_align_collected\spac_align_set_extreme_stretch}}
%D For Wolfgang:
@@ -692,7 +725,7 @@
\forgetall
\let\\=\endgraf
\ifdoublesided\signalinnerrealign\fi
- \doifrightpageelse\spac_align_set_horizontal_right\spac_align_set_horizontal_left
+ \doifelserightpage\spac_align_set_horizontal_right\spac_align_set_horizontal_left
\let\next}
\unexpanded\def\obox#1#2#3%
@@ -700,7 +733,7 @@
\forgetall
\let\\=\endgraf
\ifdoublesided\signalouterrealign\fi
- \doifrightpageelse\c_spac_align_state_horizontal_left\spac_align_set_horizontal_right
+ \doifelserightpage\c_spac_align_state_horizontal_left\spac_align_set_horizontal_right
\let\next}
\let\raggedbox\relax
@@ -938,7 +971,7 @@
\hbox}
\def\doxcheckline % used for floats so multipass anyway
- {\signalrightpage\doifrightpageelse\donetrue\donefalse}
+ {\signalrightpage\doifelserightpage\donetrue\donefalse}
\setvalue{\??alignline\v!inner }{\doxalignline\doxcheckline++\zeropoint \relax\hss }
\setvalue{\??alignline\v!outer }{\doxalignline\doxcheckline++\zeropoint \hss \relax}
@@ -1012,15 +1045,51 @@
% \simplealignedbox{2cm}{right}{x}
\installcorenamespace{alignsimple}
-
-\setvalue{\??alignsimple\v!right }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!left }#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushright}#1{{\hss#1}}
-\setvalue{\??alignsimple\v!flushleft }#1{{#1\hss}}
-\setvalue{\??alignsimple\v!middle }#1{{\hss#1\hss}}
+\installcorenamespace{alignsimplereverse}
+
+% todo: also handle \bgroup ... \egroup
+
+\unexpanded\def\spac_align_simple_left #1{{#1\hss}}
+\unexpanded\def\spac_align_simple_right #1{{\hss#1}}
+\unexpanded\def\spac_align_simple_middle#1{{\hss#1\hss}}
+
+\letvalue{\??alignsimple \v!right }\spac_align_simple_left
+\letvalue{\??alignsimple \v!outer }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushleft }\spac_align_simple_left
+\letvalue{\??alignsimple \v!left }\spac_align_simple_right
+\letvalue{\??alignsimple \v!inner }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimple \v!flushright}\spac_align_simple_right
+\letvalue{\??alignsimple \v!middle }\spac_align_simple_middle
+
+\letvalue{\??alignsimplereverse\v!right }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!outer }\spac_align_simple_right % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushleft }\spac_align_simple_right
+\letvalue{\??alignsimplereverse\v!left }\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!inner }\spac_align_simple_left % not managed! see linenumbers
+\letvalue{\??alignsimplereverse\v!flushright}\spac_align_simple_left
+\letvalue{\??alignsimplereverse\v!middle }\spac_align_simple_middle
\unexpanded\def\simplealignedbox#1#2%
- {\hbox to #1\csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\endcsname}
+ {\hbox \ifdim#1>\zeropoint to #1
+ \csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\expandafter\endcsname
+ \fi}
+
+\unexpanded\def\simplealignedboxplus#1#2#3%
+ {\hbox #3 \ifdim#1>\zeropoint to #1
+ \csname\??alignsimple\ifcsname\??alignsimple#2\endcsname#2\else\v!right\fi\expandafter\endcsname
+ \fi}
+
+\newconditional\alignsimplelefttoright \settrue\alignsimplelefttoright
+
+\unexpanded\def\simplereversealignedbox#1#2%
+ {\hbox \ifdim#1>\zeropoint to #1
+ \csname\??alignsimplereverse\ifcsname\??alignsimplereverse#2\endcsname#2\else\v!left\fi\expandafter\endcsname
+ \fi}
+
+\unexpanded\def\simplereversealignedboxplus#1#2#3%
+ {\hbox #3 \ifdim#1>\zeropoint to #1
+ \csname\??alignsimplereverse\ifcsname\??alignsimplereverse#2\endcsname#2\else\v!left\fi\expandafter\endcsname
+ \fi}
% \installnamespace{alignsets}
%
diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua
index db98b42a6..e3fa6d099 100644
--- a/tex/context/base/spac-chr.lua
+++ b/tex/context/base/spac-chr.lua
@@ -14,24 +14,47 @@ local byte, lower = string.byte, string.lower
-- to be redone: characters will become tagged spaces instead as then we keep track of
-- spaceskip etc
+-- todo: only setattr when export / use properties
+
local next = next
-trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end)
+local trace_characters = false trackers.register("typesetters.characters", function(v) trace_characters = v end)
+local trace_nbsp = false trackers.register("typesetters.nbsp", function(v) trace_nbsp = v end)
-report_characters = logs.reporter("typesetting","characters")
+local report_characters = logs.reporter("typesetting","characters")
local nodes, node = nodes, node
-local insert_node_after = nodes.insert_after
-local remove_node = nodes.remove
-local copy_node_list = nodes.copy_list
-local traverse_id = nodes.traverse_id
+local nuts = nodes.nuts
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+
+local setcolor = nodes.tracers.colors.set
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local copy_node_list = nuts.copy_list
+local traverse_id = nuts.traverse_id
local tasks = nodes.tasks
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+local new_rule = nodepool.rule
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -41,6 +64,7 @@ local glue_code = nodecodes.glue
local space_skip_code = skipcodes["spaceskip"]
local chardata = characters.data
+local is_punctuation = characters.is_punctuation
local typesetters = typesetters
@@ -63,48 +87,56 @@ local c_zero = byte('0')
local c_period = byte('.')
local function inject_quad_space(unicode,head,current,fraction)
- local attr = current.attr
+ local attr = getfield(current,"attr")
if fraction ~= 0 then
- fraction = fraction * fontquads[current.font]
+ fraction = fraction * fontquads[getfont(current)]
end
local glue = new_glue(fraction)
--- glue.attr = copy_node_list(attr)
- glue.attr = attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_char_space(unicode,head,current,parent)
- local attr = current.attr
- local font = current.font
+ local attr = getfield(current,"attr")
+ local font = getfont(current)
local char = fontcharacters[font][parent]
local glue = new_glue(char and char.width or fontparameters[font].space)
- glue.attr = current.attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode)
head, current = insert_node_after(head,current,glue)
return head, current
end
local function inject_nobreak_space(unicode,head,current,space,spacestretch,spaceshrink)
- local attr = current.attr
- local glue = new_glue(space,spacestretch,spaceshrink)
+ local attr = getfield(current,"attr")
+ local glue = new_glue(space,spacestretch,spaceshrink)
local penalty = new_penalty(10000)
- glue.attr = attr
- current.attr = nil
- glue[a_character] = unicode
+ setfield(glue,"attr",attr)
+ setfield(current,"attr",nil)
+ setattr(glue,a_character,unicode) -- bombs
head, current = insert_node_after(head,current,penalty)
+ if trace_nbsp then
+ local rule = new_rule(space)
+ local kern = new_kern(-space)
+ local penalty = new_penalty(10000)
+ setcolor(rule,"orange")
+ head, current = insert_node_after(head,current,rule)
+ head, current = insert_node_after(head,current,kern)
+ head, current = insert_node_after(head,current,penalty)
+ end
head, current = insert_node_after(head,current,glue)
return head, current
end
local function nbsp(head,current)
- local para = fontparameters[current.font]
- if current[a_alignstate] == 1 then -- flushright
+ local para = fontparameters[getfont(current)]
+ if getattr(current,a_alignstate) == 1 then -- flushright
head, current = inject_nobreak_space(0x00A0,head,current,para.space,0,0)
- current.subtype = space_skip_code
+ setfield(current,"subtype",space_skip_code)
else
head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
end
@@ -121,7 +153,7 @@ end
function characters.replacenbspaces(head)
for current in traverse_id(glyph_code,head) do
- if current.char == 0x00A0 then
+ if getchar(current) == 0x00A0 then
local h = nbsp(head,current)
if h then
head = remove_node(h,current,true)
@@ -146,22 +178,34 @@ local methods = {
-- The next one uses an attribute assigned to the character but still we
-- don't have the 'local' value.
+ [0x001F] = function(head,current)
+ local next = getnext(current)
+ if next and getid(next) == glyph_code then
+ local char = getchar(next)
+ head, current = remove_node(head,current,true)
+ if not is_punctuation[char] then
+ local p = fontparameters[getfont(next)]
+ head, current = insert_node_before(head,current,new_glue(p.space,p.space_stretch,p.space_shrink))
+ end
+ end
+ end,
+
[0x00A0] = function(head,current) -- nbsp
- local next = current.next
- if next and next.id == glyph_code then
- local char = next.char
+ local next = getnext(current)
+ if next and getid(next) == glyph_code then
+ local char = getchar(next)
if char == 0x200C or char == 0x200D then -- nzwj zwj
- next = next.next
- if next and nbsphash[next.char] then
+ next = getnext(next)
+ if next and nbsphash[getchar(next)] then
return false
end
elseif nbsphash[char] then
return false
end
end
- local prev = current.prev
- if prev and prev.id == glyph_code and nbsphash[prev.char] then
- return false -- kannada
+ local prev = getprev(current)
+ if prev and getid(prev) == glyph_code and nbsphash[getchar(prev)] then
+ return false
end
return nbsp(head,current)
end,
@@ -215,11 +259,11 @@ local methods = {
end,
[0x202F] = function(head,current) -- narrownobreakspace
- return inject_nobreak_space(0x202F,head,current,fontquads[current.font]/8)
+ return inject_nobreak_space(0x202F,head,current,fontquads[getfont(current)]/8)
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[current.font].space/8)
+ return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
end,
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
@@ -228,14 +272,15 @@ local methods = {
}
-function characters.handler(head)
+function characters.handler(head) -- todo: use traverse_id
+ head = tonut(head)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local next = current.next
- local char = current.char
+ local next = getnext(current)
+ local char = getchar(current)
local method = methods[char]
if method then
if trace_characters then
@@ -249,8 +294,8 @@ function characters.handler(head)
end
current = next
else
- current = current.next
+ current = getnext(current)
end
end
- return head, done
+ return tonode(head), done
end
diff --git a/tex/context/base/spac-chr.mkiv b/tex/context/base/spac-chr.mkiv
index 54a25be34..562fb940c 100644
--- a/tex/context/base/spac-chr.mkiv
+++ b/tex/context/base/spac-chr.mkiv
@@ -77,6 +77,9 @@
\let\zwnj\zerowidthnonjoiner
\let\zwj \zerowidthjoiner
+\let\nbsp\nobreakspace
+
+\chardef\optionalspace"1F % will be space unless before punctuation
% Shortcuts:
diff --git a/tex/context/base/spac-def.mkiv b/tex/context/base/spac-def.mkiv
index 312483cfa..7ead3c63e 100644
--- a/tex/context/base/spac-def.mkiv
+++ b/tex/context/base/spac-def.mkiv
@@ -60,7 +60,7 @@
\c!depth=.28,
\c!top=1.0,
\c!bottom=0.4,
- \c!distance=\onepoint,
+ \c!distance=\onepoint, % \dimexpr\openlineheight/10\relax
\c!line=2.8\exheight,
\c!stretch=\zerocount,
\c!shrink=\zerocount]
diff --git a/tex/context/base/spac-hor.lua b/tex/context/base/spac-hor.lua
index c9d6e2b15..5d5a43e31 100644
--- a/tex/context/base/spac-hor.lua
+++ b/tex/context/base/spac-hor.lua
@@ -6,14 +6,15 @@ if not modules then modules = { } end modules ['spac-hor'] = {
license = "see context related readme files"
}
-local match = string.match
local utfbyte = utf.byte
+local lpegmatch, P, C = lpeg.match, lpeg.P, lpeg.C
local context = context
-local commands = commands
local chardata = characters.data
+local p_check = P("the ") * (P("letter") + P("character")) * P(" ") * lpeg.patterns.utf8byte -- is a capture already
+
local can_have_space = table.tohash {
"lu", "ll", "lt", "lm", "lo", -- letters
-- "mn", "mc", "me", -- marks
@@ -26,10 +27,18 @@ local can_have_space = table.tohash {
-- "cc", "cf", "cs", "co", "cn", -- others
}
-function commands.autonextspace(str) -- todo: use nexttoken
- local ch = match(str,"the letter (.)") or match(str,"the character (.)")
- ch = ch and chardata[utfbyte(ch)]
- if ch and can_have_space[ch.category] then
- context.space()
+local function autonextspace(str) -- todo: make a real not intrusive lookahead
+ local b = lpegmatch(p_check,str)
+ if b then
+ local d = chardata[b]
+ if d and can_have_space[d.category] then
+ context.space()
+ end
end
end
+
+interfaces.implement {
+ name = "autonextspace",
+ actions = autonextspace,
+ arguments = "string",
+}
diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv
index 4cd913290..54156c3b4 100644
--- a/tex/context/base/spac-hor.mkiv
+++ b/tex/context/base/spac-hor.mkiv
@@ -32,7 +32,7 @@
{\doifoutervmode{\ifconditional\c_spac_indentation_indent_first\else\spac_indentation_variant_no\fi}}
\unexpanded\def\setupindenting
- {\doifnextoptionalelse\spac_indentation_setup_options\spac_indentation_setup_size}
+ {\doifelsenextoptionalcs\spac_indentation_setup_options\spac_indentation_setup_size}
\unexpanded\def\spac_indentation_setup_size
{\assigndimension\v_spac_indentation_current\d_spac_indentation_par{1\emwidth}{1.5\emwidth}{2\emwidth}}
@@ -64,24 +64,71 @@
\def\spac_indentation_set_everypar
{\everypar{\checkindentation}}
+\unexpanded\def\useindentingparameter#1% faster local variant
+ {\edef\m_spac_indentation_options{#1\c!indenting}%
+ \ifx\m_spac_indentation_options\empty \else
+ \spac_indentation_setup_indeed
+ \fi}
+
+% \def\spac_indentation_apply_step_one#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% % case two
+% \else
+% \edef\v_spac_indentation_current{#1}% single entry in list
+% \let\normalindentation\v_spac_indentation_current
+% \spac_indentation_setup_size
+% \fi}
+%
+% \def\spac_indentation_apply_step_two#1%
+% {\ifcsname\??indentingmethod#1\endcsname
+% \csname\??indentingmethod#1\endcsname
+% \else
+% % case one
+% \fi}
+
+% \defineindenting[whatever][yes,2cm]
+% %defineindenting[whatever][yes,-2cm]
+%
+% \setupindenting[yes,-2em] \input ward \par
+% \setupindenting[yes,2em] \input ward \par
+% \setupindenting[whatever] \input ward \par
+
+\installcorenamespace {indentingpreset}
+
+\unexpanded\def\defineindenting
+ {\dodoubleargument\spac_indenting_define}
+
+\def\spac_indenting_define[#1][#2]% todo: mixes
+ {\setevalue{\??indentingpreset#1}{#2}}
+
+\def\spac_indentation_apply_step_one_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_one}
+
+\def\spac_indentation_apply_step_two_nested#1%
+ {\expandafter\processcommacommand\expandafter[\csname\??indentingpreset#1\endcsname]\spac_indentation_apply_step_two}
+
\def\spac_indentation_apply_step_one#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_one_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
% case two
\else
\edef\v_spac_indentation_current{#1}% single entry in list
\let\normalindentation\v_spac_indentation_current
\spac_indentation_setup_size
- \fi}
+ \fi\fi}
\def\spac_indentation_apply_step_two#1%
- {\ifcsname\??indentingmethod#1\endcsname
+ {\ifcsname\??indentingpreset#1\endcsname
+ \spac_indentation_apply_step_two_nested{#1}%
+ \else\ifcsname\??indentingmethod#1\endcsname
\csname\??indentingmethod#1\endcsname
\else
% case one
- \fi}
+ \fi\fi}
\unexpanded\def\indenting % kind of obsolete
- {\doifnextoptionalelse\spac_indentation_setup_options\relax}
+ {\doifelsenextoptionalcs\spac_indentation_setup_options\relax}
% use \noindentation to suppress next indentation
@@ -217,7 +264,7 @@
\unexpanded\def\spac_indentation_check_next_indentation
{\global\let\dorechecknextindentation\relax
- \doifnextcharelse\par\donothing\spac_indentation_variant_no} % messy check as next is seldom \par
+ \doifelsenextchar\par\donothing\spac_indentation_variant_no} % messy check as next is seldom \par
\def\spac_indentation_variant_auto
{\global\let\dorechecknextindentation\spac_indentation_check_next_indentation}
@@ -339,7 +386,7 @@
\installspacingmethod \v!broad {\nonfrenchspacing} % more depending on what punctuation
\unexpanded\def\setupspacing
- {\doifnextoptionalelse\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
+ {\doifelsenextoptionalcs\spac_spacecodes_setup_yes\spac_spacecodes_setup_nop}
\def\spac_spacecodes_setup_yes[#1]%
{\csname\??spacecodemethod#1\endcsname
@@ -630,7 +677,7 @@
\fi}
\def\spac_narrower_start_named_one[#1]%
- {\doifassignmentelse{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]}
+ {\doifelseassignment{#1}\spac_narrower_start_named_one_yes\spac_narrower_start_named_one_nop[#1]}
\def\spac_narrower_start_named_one_yes[#1][#2]% [settings] []
{\setupcurrentnarrower[#1]%
@@ -641,14 +688,14 @@
\spac_narrower_start_apply{\narrowerparameter\v!default}}
\def\spac_narrower_start_named_two[#1]%
- {\doifassignmentelse{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]}
+ {\doifelseassignment{#1}\spac_narrower_start_named_settings_how\spac_narrower_start_named_tag_unknown[#1]}
\def\spac_narrower_start_named_settings_how[#1][#2]% [settings] [how]
{\setupcurrentnarrower[#1]%
\spac_narrower_start_apply{#2}}
\def\spac_narrower_start_named_tag_unknown[#1][#2]% [tag] [...]
- {\doifassignmentelse{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]}
+ {\doifelseassignment{#2}\spac_narrower_start_named_tag_settings\spac_narrower_start_named_tag_how[#1][#2]}
\def\spac_narrower_start_named_tag_settings[#1][#2]% [tag] [settings]
{\edef\currentnarrower{#1}%
@@ -692,8 +739,8 @@
\unexpanded\def\dosetleftskipadaption #1{\leftskipadaption \ifcsname\??skipadaptionleft #1\endcsname\csname\??skipadaptionleft #1\endcsname\else#1\fi\relax}
\unexpanded\def\dosetrightskipadaption#1{\rightskipadaption\ifcsname\??skipadaptionright#1\endcsname\csname\??skipadaptionright#1\endcsname\else#1\fi\relax}
-\unexpanded\def\doadaptleftskip #1{\dosetleftskipadaption {#1}\advance\leftskip \leftskipadaption }
-\unexpanded\def\doadaptrightskip#1{\dosetrightskipadaption{#1}\advance\rightskip\rightskipadaption}
+\unexpanded\def\doadaptleftskip #1{\normalexpanded{\dosetleftskipadaption {#1}}\advance\leftskip \leftskipadaption }
+\unexpanded\def\doadaptrightskip#1{\normalexpanded{\dosetrightskipadaption{#1}}\advance\rightskip\rightskipadaption}
\unexpanded\def\forgetbothskips
{\leftskip\zeropoint
@@ -739,7 +786,7 @@
{\dosingleargument\spac_tolerances_setup}
\def\spac_tolerances_setup[#1]%
- {\doifinsetelse\v!vertical{#1}%
+ {\doifelseinset\v!vertical{#1}%
{\processcommacommand[#1]\spac_tolerances_step_vertical }
{\processcommacommand[#1]\spac_tolerances_step_horizontal}}
@@ -809,31 +856,6 @@
%D In \CONTEXT\ however we save some processing time by putting
%D an extra \type{\hbox} around the \type{\strutbox}.
-% moved from page-lin.tex to here (due to visualization added
-% in august 2003)
-%
-% \unexpanded \def\crlf
-% {\ifhmode\unskip\else\strut\fi\ifcase\raggedstatus\hfil\fi\break}
-
-\unexpanded\def\crlf
- {\ifhmode
- \unskip
- \prewordbreak\crlfplaceholder
- \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
- \break
- \else
- \crlfplaceholder
- \endgraf
- \fi}
-
-\unexpanded\def\crlfplaceholder
- {\strut}
-
-\unexpanded\def\settestcrlf
- {\unexpanded\def\crlfplaceholder
- {\hbox to \zeropoint
- {\strut{\infofont\kern.25em}\lohi{\infofont CR}{\infofont LF}\hss}}}
-
%D \starttyping
%D % \setuplayout[gridgrid=yes] \showgrid
%D
@@ -1024,7 +1046,7 @@
{\futurelet\nexttoken\spac_spaces_auto_insert_next}
\def\spac_spaces_auto_insert_next
- {\ctxcommand{autonextspace(\!!bs\meaning\nexttoken\!!es)}} % todo, just consult nexttoken at the lua end
+ {\clf_autonextspace{\meaning\nexttoken}} % todo, just consult nexttoken at the lua end
%D Moved from bib module:
@@ -1059,7 +1081,7 @@
%D A rather unknown one:
\unexpanded\def\widened % moved from cont-new
- {\doifnextoptionalelse\spac_widened_yes\spac_widened_nop}
+ {\doifelsenextoptionalcs\spac_widened_yes\spac_widened_nop}
\def\spac_widened_yes[#1]#2{\hbox \s!spread #1{\hss#2\hss}}
\def\spac_widened_nop #1{\hbox \s!spread \emwidth{\hss#1\hss}}
diff --git a/tex/context/base/spac-lin.mkiv b/tex/context/base/spac-lin.mkiv
index 094e18e0b..20fec5d45 100644
--- a/tex/context/base/spac-lin.mkiv
+++ b/tex/context/base/spac-lin.mkiv
@@ -88,13 +88,13 @@
\linesparameter\c!before
\pushmacro\checkindentation
\whitespace
- \dostarttagged\t!lines\currentlines
+ \dostarttaggedchained\t!lines\currentlines\??lines
\begingroup
\uselinesstyleandcolor\c!style\c!color
- \setupindenting[\linesparameter\c!indenting]%
- \setupalign[\linesparameter\c!align]%
+ \useindentingparameter\linesparameter
+ \usealignparameter\linesparameter
\typesettinglinestrue
- \setupwhitespace[\v!none]%
+ \setupwhitespace[\v!none]% todo use fast variant
%\obeylines % move upwards to keep spaces in the first line due to optional argument
\ignorespaces
\glet\spac_after_first_obeyed_line\spac_lines_after_first_obeyed_line_a
@@ -128,7 +128,7 @@
\egroup}
\def\spac_lines_between
- {\doifmeaningelse\next\obeyedline % brrr
+ {\doifelsemeaning\next\obeyedline % brrr
{\linesparameter\c!inbetween}
{\spac_after_first_obeyed_line}}
diff --git a/tex/context/base/spac-pag.mkiv b/tex/context/base/spac-pag.mkiv
index da4c8e970..1ecc31d8c 100644
--- a/tex/context/base/spac-pag.mkiv
+++ b/tex/context/base/spac-pag.mkiv
@@ -51,7 +51,7 @@
\setpagestaterealpageno{#1}{\number#2}%
\fi}
-\unexpanded\def\doifrightpagestateelse#1#2% not expandable !
+\unexpanded\def\doifelserightpagestate#1#2% not expandable !
{\ifcase\frozenpagestate
\pagestatemismatchfalse
\realpagestateno\realfolio
@@ -100,7 +100,7 @@
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifforcedrightpagestateelse#1#2%
+\unexpanded\def\doifelseforcedrightpagestate#1#2%
{\ifcase\frozenpagestate
\pagestatemismatchfalse
\realpagestateno\realfolio
@@ -135,6 +135,9 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifrightpagestateelse \doifelserightpagestate
+\let\doifforcedrightpagestateelse\doifelseforcedrightpagestate
+
\unexpanded\def\freezepagestate {\frozenpagestate\plusone }
\unexpanded\def\defrostpagestate{\frozenpagestate\zerocount}
@@ -147,7 +150,9 @@
\definepagestate[\s!paragraph]
\unexpanded\def\signalrightpage {\dotrackpagestate \s!paragraph\nofraggedparagraphs} % use \dontleavehmode if needed
-\unexpanded\def\doifrightpageelse{\doifrightpagestateelse\s!paragraph\nofraggedparagraphs}
+\unexpanded\def\doifelserightpage{\doifelserightpagestate\s!paragraph\nofraggedparagraphs}
+
+\let\doifrightpageelse\doifelserightpage
\installcorenamespace{pagechanges}
diff --git a/tex/context/base/spac-par.mkiv b/tex/context/base/spac-par.mkiv
index 825cdca46..4dd3db243 100644
--- a/tex/context/base/spac-par.mkiv
+++ b/tex/context/base/spac-par.mkiv
@@ -203,6 +203,8 @@
% \glet\flushpostponednodedata\spac_postponed_data_flush
% \fi}
+\newtoks\everyflushatnextpar
+
\unexpanded\def\pushpostponednodedata
{\globalpushbox\b_spac_postponed_data}
@@ -214,6 +216,7 @@
\unexpanded\def\flushatnextpar
{\begingroup
+ \the\everyflushatnextpar
\glet\flushpostponednodedata\spac_postponed_data_flush
\dowithnextboxcs\spac_postponed_data_finish\hbox}
diff --git a/tex/context/base/spac-prf.mkiv b/tex/context/base/spac-prf.mkiv
new file mode 100644
index 000000000..5f1553ede
--- /dev/null
+++ b/tex/context/base/spac-prf.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=spac-prf,
+%D version=2015.11.16, % moved from test module mathplus
+%D title=\CONTEXT\ Spacing Macros,
+%D subtitle=Profiling,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Spacing Macros / Profiling}
+
+%D This is a placeholder for something to come. But as I don't want to
+%D be harrassed by 'why does it work different than a week before' this
+%D cool new feature will only end up here when stable enough. Alas.
+
+\unprotect
+
+\definesystemattribute[profilemethod][public]
+
+\unexpanded\def\setprofile [#1]{}
+\unexpanded\def\resetprofile {}
+\unexpanded\def\useprofileparameter#1{}
+\unexpanded\def\addprofiletobox #1{}
+\unexpanded\def\profilegivenbox #1#2{}
+\unexpanded\def\profiledbox {\vbox}
+
+\protect \endinput
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 0035c4119..d1cf09e17 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -8,10 +8,16 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- we also need to call the spacer for inserts!
--- todo: directly set skips
+-- somehow lists still don't always have proper prev nodes so i need to
+-- check all of the luatex code some day .. maybe i should replece the
+-- whole mvl handler by lua code .. why not
+
+-- todo: use lua nodes with lua data (>0.79)
+-- see ** can go when 0.79
-- this code dates from the beginning and is kind of experimental; it
--- will be optimized and improved soon
+-- will be optimized and improved soon .. it's way too complex now but
+-- dates from less possibilities
--
-- the collapser will be redone with user nodes; also, we might get make
-- parskip into an attribute and appy it explicitly thereby getting rid
@@ -32,14 +38,20 @@ local formatters = string.formatters
local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
-local nodes, node, trackers, attributes, context, commands, tex = nodes, node, trackers, attributes, context, commands, tex
+local nodes = nodes
+local node = node
+local trackers = trackers
+local attributes = attributes
+local context = context
+local tex = tex
local texlists = tex.lists
local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
local texnest = tex.nest
-local texgetbox = tex.getbox
local variables = interfaces.variables
+local implement = interfaces.implement
-- vertical space handler
@@ -49,11 +61,12 @@ local trace_page_builder = false trackers.register("builders.page", fun
local trace_collect_vspacing = false trackers.register("vspacing.collect", function(v) trace_collect_vspacing = v end)
local trace_vspacing = false trackers.register("vspacing.spacing", function(v) trace_vspacing = v end)
local trace_vsnapping = false trackers.register("vspacing.snapping", function(v) trace_vsnapping = v end)
-local trace_vpacking = false trackers.register("vspacing.packing", function(v) trace_vpacking = v end)
+local trace_specials = false trackers.register("vspacing.specials", function(v) trace_specials = v end)
local report_vspacing = logs.reporter("vspacing","spacing")
local report_collapser = logs.reporter("vspacing","collapsing")
local report_snapper = logs.reporter("vspacing","snapping")
+local report_specials = logs.reporter("vspacing","specials")
local report_page_builder = logs.reporter("builders","page")
local a_skipcategory = attributes.private('skipcategory')
@@ -63,27 +76,46 @@ local a_skiporder = attributes.private('skiporder')
local a_snapmethod = attributes.private('snapmethod')
local a_snapvbox = attributes.private('snapvbox')
-local find_node_tail = node.tail
-local free_node = node.free
-local free_node_list = node.flush_list
-local copy_node = node.copy
-local traverse_nodes = node.traverse
-local traverse_nodes_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local count_nodes = nodes.count
-local nodeidstostring = nodes.idstostring
-local hpack_node = node.hpack
-local vpack_node = node.vpack
-local writable_spec = nodes.writable_spec
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+local ntostring = nuts.tostring
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local free_node_list = nuts.flush_list
+local copy_node = nuts.copy
+local traverse_nodes = nuts.traverse
+local traverse_nodes_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local count_nodes = nuts.count
+local hpack_node = nuts.hpack
+local vpack_node = nuts.vpack
+local writable_spec = nuts.writable_spec
+local nodereference = nuts.reference
+
local listtoutf = nodes.listtoutf
+local nodeidstostring = nodes.idstostring
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_penalty = nodepool.penalty
local new_kern = nodepool.kern
local new_rule = nodepool.rule
+local new_glue = nodepool.glue
local new_gluespec = nodepool.gluespec
local nodecodes = nodes.nodecodes
@@ -103,8 +135,8 @@ builders.vspacing = vspacing
local vspacingdata = vspacing.data or { }
vspacing.data = vspacingdata
-vspacingdata.snapmethods = vspacingdata.snapmethods or { }
-local snapmethods = vspacingdata.snapmethods --maybe some older code can go
+local snapmethods = vspacingdata.snapmethods or { }
+vspacingdata.snapmethods = snapmethods
storage.register("builders/vspacing/data/snapmethods", snapmethods, "builders.vspacing.data.snapmethods")
@@ -114,11 +146,13 @@ local default = {
strut = true,
hfraction = 1,
dfraction = 1,
+ bfraction = 0.25,
}
local fractions = {
minheight = "hfraction", maxheight = "hfraction",
mindepth = "dfraction", maxdepth = "dfraction",
+ box = "bfraction",
top = "tlines", bottom = "blines",
}
@@ -179,28 +213,26 @@ end
-- local rule_id = nodecodes.rule
-- local vlist_id = nodecodes.vlist
-- function nodes.makevtop(n)
--- if n.id == vlist_id then
--- local list = n.list
--- local height = (list and list.id <= rule_id and list.height) or 0
--- n.depth = n.depth - height + n.height
--- n.height = height
+-- if getid(n) == vlist_id then
+-- local list = getlist(n)
+-- local height = (list and getid(list) <= rule_id and getfield(list,"height")) or 0
+-- setfield(n,"depth",getfield(n,"depth") - height + getfield(n,"height")
+-- setfield(n,"height",height
-- end
-- end
-local reference = nodes.reference
-
local function validvbox(parentid,list)
if parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return nil
end
end
local done = nil
for n in traverse_nodes(list) do
- local id = n.id
+ local id = getid(n)
if id == vlist_code or id == hlist_code then
if done then
return nil
@@ -214,9 +246,9 @@ local function validvbox(parentid,list)
end
end
if done then
- local id = done.id
+ local id = getid(done)
if id == hlist_code then
- return validvbox(id,done.list)
+ return validvbox(id,getlist(done))
end
end
return done -- only one vbox
@@ -226,19 +258,19 @@ end
local function already_done(parentid,list,a_snapmethod) -- todo: done when only boxes and all snapped
-- problem: any snapped vbox ends up in a line
if list and parentid == hlist_code then
- local id = list.id
+ local id = getid(list)
if id == whatsit_code then -- check for initial par subtype
- list = list.next
+ list = getnext(list)
if not next then
return false
end
end
--~ local i = 0
for n in traverse_nodes(list) do
- local id = n.id
---~ i = i + 1 print(i,nodecodes[id],n[a_snapmethod])
+ local id = getid(n)
+--~ i = i + 1 print(i,nodecodes[id],getattr(n,a_snapmethod))
if id == hlist_code or id == vlist_code then
- local a = n[a_snapmethod]
+ local a = getattr(n,a_snapmethod)
if not a then
-- return true -- not snapped at all
elseif a == 0 then
@@ -276,11 +308,11 @@ end
-- check variables.none etc
local function snap_hlist(where,current,method,height,depth) -- method.strut is default
- local list = current.list
+ local list = getlist(current)
local t = trace_vsnapping and { }
if t then
t[#t+1] = formatters["list content: %s"](listtoutf(list))
- t[#t+1] = formatters["parent id: %s"](reference(current))
+ t[#t+1] = formatters["parent id: %s"](nodereference(current))
t[#t+1] = formatters["snap method: %s"](method.name)
t[#t+1] = formatters["specification: %s"](method.specification)
end
@@ -312,26 +344,58 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["auto: snapht %p snapdp %p"](snapht,snapdp)
end
end
- local h, d = height or current.height, depth or current.depth
- local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
- local tlines, blines = method.tlines or 1, method.blines or 1
- local done, plusht, plusdp = false, snapht, snapdp
+
+ local h = (method.noheight and 0) or height or getfield(current,"height")
+ local d = (method.nodepth and 0) or depth or getfield(current,"depth")
+ local hr = method.hfraction or 1
+ local dr = method.dfraction or 1
+ local br = method.bfraction or 0
+ local ch = h
+ local cd = d
+ local tlines = method.tlines or 1
+ local blines = method.blines or 1
+ local done = false
+ local plusht = snapht
+ local plusdp = snapdp
local snaphtdp = snapht + snapdp
- if method.none then
+ if method.box then
+ local br = 1 - br
+ if br < 0 then
+ br = 0
+ elseif br > 1 then
+ br = 1
+ end
+ local n = ceiled((h+d-br*snapht-br*snapdp)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.max then
+ local n = ceiled((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.min then
+ local n = floored((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.none then
plusht, plusdp = 0, 0
if t then
t[#t+1] = "none: plusht 0pt plusdp 0pt"
end
end
if method.halfline then -- extra halfline
- plusht, plusdp = plusht + snaphtdp/2, plusdp + snaphtdp/2
+ plusht = plusht + snaphtdp/2
+ plusdp = plusdp + snaphtdp/2
if t then
t[#t+1] = formatters["halfline: plusht %p plusdp %p"](plusht,plusdp)
end
end
if method.line then -- extra line
- plusht, plusdp = plusht + snaphtdp, plusdp + snaphtdp
+ plusht = plusht + snaphtdp
+ plusdp = plusdp + snaphtdp
if t then
t[#t+1] = formatters["line: plusht %p plusdp %p"](plusht,plusdp)
end
@@ -339,22 +403,22 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if method.first then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list = thebox.list
+ local list = getlist(thebox)
local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = n.height
- ld = n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
break
end
if lh then
- local ht = thebox.height
- local dp = thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["first line: height %p depth %p"](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -362,9 +426,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = h - lh
ch, cd = lh, delta + d
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["first: height %p depth %p shift %p"](ch,cd,delta)
@@ -377,20 +441,21 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
elseif method.last then
local thebox = current
- local id = thebox.id
+ local id = getid(thebox)
if id == hlist_code then
- thebox = validvbox(id,thebox.list)
- id = thebox and thebox.id
+ thebox = validvbox(id,getlist(thebox))
+ id = thebox and getid(thebox)
end
if thebox and id == vlist_code then
- local list, lh, ld = thebox.list
+ local list = getlist(thebox)
+ local lh, ld
for n in traverse_nodes_id(hlist_code,list) do
- lh = n.height
- ld = n.depth
+ lh = getfield(n,"height")
+ ld = getfield(n,"depth")
end
if lh then
- local ht = thebox.height
- local dp = thebox.depth
+ local ht = getfield(thebox,"height")
+ local dp = getfield(thebox,"depth")
if t then
t[#t+1] = formatters["last line: height %p depth %p" ](lh,ld)
t[#t+1] = formatters["dimensions: height %p depth %p"](ht,dp)
@@ -398,9 +463,9 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
local delta = d - ld
cd, ch = ld, delta + h
h, d = ch, cd
- local shifted = hpack_node(current.list)
- shifted.shift = delta
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",delta)
+ setfield(current,"list",shifted)
done = true
if t then
t[#t+1] = formatters["last: height %p depth %p shift %p"](ch,cd,delta)
@@ -461,25 +526,25 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
if offset then
-- we need to set the attr
if t then
- t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["before offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- local shifted = hpack_node(current.list)
- shifted.shift = offset
- current.list = shifted
+ local shifted = hpack_node(getlist(current))
+ setfield(shifted,"shift",offset)
+ setfield(current,"list",shifted)
if t then
- t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,current.width,current.height,current.depth)
+ t[#t+1] = formatters["after offset: %p (width %p height %p depth %p)"](offset,getfield(current,"width"),getfield(current,"height"),getfield(current,"depth"))
end
- shifted[a_snapmethod] = 0
- current[a_snapmethod] = 0
+ setattr(shifted,a_snapmethod,0)
+ setattr(current,a_snapmethod,0)
end
if not height then
- current.height = ch
+ setfield(current,"height",ch)
if t then
t[#t+1] = formatters["forced height: %p"](ch)
end
end
if not depth then
- current.depth = cd
+ setfield(current,"depth",cd)
if t then
t[#t+1] = formatters["forced depth: %p"](cd)
end
@@ -492,18 +557,24 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
t[#t+1] = formatters["final height: %p -> %p"](h,ch)
t[#t+1] = formatters["final depth: %p -> %p"](d,cd)
end
+-- todo:
+--
+-- if h < 0 or d < 0 then
+-- h = 0
+-- d = 0
+-- end
if t then
- report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[current.id],t)
+ report_snapper("trace: %s type %s\n\t%\n\tt",where,nodecodes[getid(current)],t)
end
return h, d, ch, cd, lines
end
local function snap_topskip(current,method)
- local spec = current.spec
- local w = spec.width
+ local spec = getfield(current,"spec")
+ local w = getfield(spec,"width")
local wd = w
- if spec.writable then
- spec.width = 0
+ if getfield(spec,"writable") then
+ setfield(spec,"width",0)
wd = 0
end
return w, wd
@@ -518,14 +589,15 @@ local categories = allocate {
[5] = 'disable',
[6] = 'nowhite',
[7] = 'goback',
- [8] = 'together'
+ [8] = 'together', -- not used (?)
+ [9] = 'overlay',
}
vspacing.categories = categories
function vspacing.tocategories(str)
local t = { }
- for s in gmatch(str,"[^, ]") do
+ for s in gmatch(str,"[^, ]") do -- use lpeg instead
local n = tonumber(s)
if n then
t[categories[n]] = true
@@ -536,7 +608,7 @@ function vspacing.tocategories(str)
return t
end
-function vspacing.tocategory(str)
+function vspacing.tocategory(str) -- can be optimized
if type(str) == "string" then
return set.tonumber(vspacing.tocategories(str))
else
@@ -567,15 +639,15 @@ do -- todo: interface.variables
-- This will change: just node.write and we can store the values in skips which
-- then obeys grouping
- local fixedblankskip = context.fixedblankskip
- local flexibleblankskip = context.flexibleblankskip
- local setblankcategory = context.setblankcategory
- local setblankorder = context.setblankorder
- local setblankpenalty = context.setblankpenalty
- local setblankhandling = context.setblankhandling
- local flushblankhandling = context.flushblankhandling
- local addpredefinedblankskip = context.addpredefinedblankskip
- local addaskedblankskip = context.addaskedblankskip
+ local ctx_fixedblankskip = context.fixedblankskip
+ local ctx_flexibleblankskip = context.flexibleblankskip
+ local ctx_setblankcategory = context.setblankcategory
+ local ctx_setblankorder = context.setblankorder
+ local ctx_setblankpenalty = context.setblankpenalty
+ ----- ctx_setblankhandling = context.setblankhandling
+ local ctx_flushblankhandling = context.flushblankhandling
+ local ctx_addpredefinedblankskip = context.addpredefinedblankskip
+ local ctx_addaskedblankskip = context.addaskedblankskip
local function analyze(str,oldcategory) -- we could use shorter names
for s in gmatch(str,"([^ ,]+)") do
@@ -587,35 +659,35 @@ do -- todo: interface.variables
if mk then
category = analyze(mk,category)
elseif keyword == k_fixed then
- fixedblankskip()
+ ctx_fixedblankskip()
elseif keyword == k_flexible then
- flexibleblankskip()
+ ctx_flexibleblankskip()
elseif keyword == k_category then
local category = tonumber(detail)
if category then
- setblankcategory(category)
+ ctx_setblankcategory(category)
if category ~= oldcategory then
- flushblankhandling()
+ ctx_flushblankhandling()
oldcategory = category
end
end
elseif keyword == k_order and detail then
local order = tonumber(detail)
if order then
- setblankorder(order)
+ ctx_setblankorder(order)
end
elseif keyword == k_penalty and detail then
local penalty = tonumber(detail)
if penalty then
- setblankpenalty(penalty)
+ ctx_setblankpenalty(penalty)
end
else
amount = tonumber(amount) or 1
local sk = skip[keyword]
if sk then
- addpredefinedblankskip(amount,keyword)
+ ctx_addpredefinedblankskip(amount,keyword)
else -- no check
- addaskedblankskip(amount,keyword)
+ ctx_addaskedblankskip(amount,keyword)
end
end
end
@@ -623,22 +695,22 @@ do -- todo: interface.variables
return category
end
- local pushlogger = context.pushlogger
- local startblankhandling = context.startblankhandling
- local stopblankhandling = context.stopblankhandling
- local poplogger = context.poplogger
+ local ctx_pushlogger = context.pushlogger
+ local ctx_startblankhandling = context.startblankhandling
+ local ctx_stopblankhandling = context.stopblankhandling
+ local ctx_poplogger = context.poplogger
function vspacing.analyze(str)
if trace_vspacing then
- pushlogger(report_vspacing)
- startblankhandling()
+ ctx_pushlogger(report_vspacing)
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
- poplogger()
+ ctx_stopblankhandling()
+ ctx_poplogger()
else
- startblankhandling()
+ ctx_startblankhandling()
analyze(str,1)
- stopblankhandling()
+ ctx_stopblankhandling()
end
end
@@ -664,18 +736,18 @@ local trace_list, tracing_info, before, after = { }, false, "", ""
local function nodes_to_string(head)
local current, t = head, { }
while current do
- local id = current.id
+ local id = getid(current)
local ty = nodecodes[id]
if id == penalty_code then
- t[#t+1] = formatters["%s:%s"](ty,current.penalty)
+ t[#t+1] = formatters["%s:%s"](ty,getfield(current,"penalty"))
elseif id == glue_code then -- or id == kern_code then -- to be tested
t[#t+1] = formatters["%s:%p"](ty,current)
elseif id == kern_code then
- t[#t+1] = formatters["%s:%p"](ty,current.kern)
+ t[#t+1] = formatters["%s:%p"](ty,getfield(current,"kern"))
else
t[#t+1] = ty
end
- current = current.next
+ current = getnext(current)
end
return concat(t," + ")
end
@@ -699,7 +771,7 @@ local function trace_info(message, where, what)
end
local function trace_node(what)
- local nt = nodecodes[what.id]
+ local nt = nodecodes[getid(what)]
local tl = trace_list[#trace_list]
if tl and tl[1] == "node" then
trace_list[#trace_list] = { "node", formatters["%s + %s"](tl[2],nt) }
@@ -709,8 +781,8 @@ local function trace_node(what)
end
local function trace_done(str,data)
- if data.id == penalty_code then
- trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,data.penalty) }
+ if getid(data) == penalty_code then
+ trace_list[#trace_list+1] = { "penalty", formatters["%s | %s"](str,getfield(data,"penalty")) }
else
trace_list[#trace_list+1] = { "glue", formatters["%s | %p"](str,data) }
end
@@ -748,22 +820,32 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local topskip_code = skipcodes.topskip
local splittopskip_code = skipcodes.splittopskip
+-- local function free_glue_node(n)
+-- free_node(n)
+-- local s = getfield(n,"spec")
+-- if s then
+-- free_node(s)
+-- end
+-- end
+
local free_glue_node = free_node
+local free_glue_spec = function() end
+----- free_glue_spec = free_node -- can be enabled in in 0.73 (so for the moment we leak due to old luatex engine issues)
function vspacing.snapbox(n,how)
local sv = snapmethods[how]
if sv then
- local box = texgetbox(n)
- local list = box.list
+ local box = getbox(n)
+ local list = getlist(box)
if list then
- local s = list[a_snapmethod]
+ local s = getattr(list,a_snapmethod)
if s == 0 then
if trace_vsnapping then
-- report_snapper("box list not snapped, already done")
end
else
- local ht = box.height
- local dp = box.depth
+ local ht = getfield(box,"height")
+ local dp = getfield(box,"depth")
if false then -- todo: already_done
-- assume that the box is already snapped
if trace_vsnapping then
@@ -772,14 +854,14 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines = snap_hlist("box",box,sv,ht,dp)
- box.height= ch
- box.depth = cd
+ setfield(box,"height",ch)
+ setfield(box,"depth",cd)
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
h,d,ch,cd,sv.name,sv.specification,"direct",lines,listtoutf(list))
end
- box[a_snapmethod] = 0 --
- list[a_snapmethod] = 0 -- yes or no
+ setattr(box,a_snapmethod,0) --
+ setattr(list,a_snapmethod,0) -- yes or no
end
end
end
@@ -801,8 +883,10 @@ local w, h, d = 0, 0, 0
----- w, h, d = 100*65536, 65536, 65536
local function forced_skip(head,current,width,where,trace)
- if head == current and head.subtype == baselineskip_code then
- width = width - head.spec.width
+ if head == current then
+ if getsubtype(head) == baselineskip_code then
+ width = width - getfield(getfield(head,"spec"),"width")
+ end
end
if width == 0 then
-- do nothing
@@ -825,62 +909,270 @@ end
-- penalty only works well when before skip
-local discard, largest, force, penalty, add, disable, nowhite, goback, together = 0, 1, 2, 3, 4, 5, 6, 7, 8 -- move into function when upvalue 60 issue
+local discard = 0
+local largest = 1
+local force = 2
+local penalty = 3
+local add = 4
+local disable = 5
+local nowhite = 6
+local goback = 7
+local together = 8 -- not used (?)
+local overlay = 9
-- [whatsits][hlist][glue][glue][penalty]
local special_penalty_min = 32250
local special_penalty_max = 35000
+local special_penalty_xxx = 0
+
+-- this is rather messy and complex: we want to make sure that successive
+-- header don't break but also make sure that we have at least a decent
+-- break when we have succesive ones (often when testing)
+
+-- todo: mark headers as such so that we can recognize them
+
+local specialmethods = { }
+local specialmethod = 1
-local function specialpenalty(start,penalty)
- -- nodes.showsimplelist(texlists.page_head,1)
- local current = find_node_tail(texlists.page_head)
+local properties = nodes.properties.data
+
+specialmethods[1] = function(pagehead,pagetail,start,penalty)
+ --
+ if not pagehead or penalty < special_penalty_min or penalty > special_penalty_max then
+ return
+ end
+ local current = pagetail
+ --
+ -- nodes.showsimplelist(pagehead,0)
+ --
+ if trace_specials then
+ report_specials("checking penalty %a",penalty)
+ end
while current do
- local id = current.id
- if id == glue_code then
- current = current.prev
- elseif id == penalty_code then
- local p = current.penalty
- if p == penalty then
- if trace_vspacing then
- report_vspacing("overloading penalty %a",p)
+ local id = getid(current)
+ if id == penalty_code then
+ local p = properties[current]
+ if p then
+ local p = p.special_penalty
+ if not p then
+ if trace_specials then
+ report_specials(" regular penalty, continue")
+ end
+ elseif p == penalty then
+ if trace_specials then
+ report_specials(" context penalty %a, same level, overloading",p)
+ end
+ return special_penalty_xxx
+ elseif p > special_penalty_min and p < special_penalty_max then
+ if penalty < p then
+ if trace_specials then
+ report_specials(" context penalty %a, lower level, overloading",p)
+ end
+ return special_penalty_xxx
+ else
+ if trace_specials then
+ report_specials(" context penalty %a, higher level, quitting",p)
+ end
+ return
+ end
+ elseif trace_specials then
+ report_specials(" context penalty %a, higher level, continue",p)
end
- return current
- elseif p >= 10000 then
- current = current.prev
else
- break
+ local p = getfield(current,"penalty")
+ if p < 10000 then
+ -- assume some other mechanism kicks in so we seem to have content
+ if trace_specials then
+ report_specials(" regular penalty %a, quitting",p)
+ end
+ break
+ else
+ if trace_specials then
+ report_specials(" regular penalty %a, continue",p)
+ end
+ end
+ end
+ end
+ current = getprev(current)
+ end
+ -- none found, so no reson to be special
+ if trace_specials then
+ if pagetail then
+ report_specials(" context penalty, discarding, nothing special")
+ else
+ report_specials(" context penalty, discarding, nothing preceding")
+ end
+ end
+ return special_penalty_xxx
+end
+
+-- specialmethods[2] : always put something before and use that as to-be-changed
+--
+-- we could inject a vadjust to force a recalculation .. a mess
+--
+-- So, the next is far from robust and okay but for the moment this overlaying
+-- has to do. Always test this with the examples in spec-ver.mkvi!
+
+local function check_experimental_overlay(head,current)
+ local p = nil
+ local c = current
+ local n = nil
+
+ -- setfield(head,"prev",nil) -- till we have 0.79 **
+
+ local function overlay(p,n,mvl)
+ local p_ht = getfield(p,"height")
+ local p_dp = getfield(p,"depth")
+ local n_ht = getfield(n,"height")
+ local skips = 0
+ --
+ -- We deal with this at the tex end .. we don't see spacing .. enabling this code
+ -- is probably harmless btu then we need to test it.
+ --
+ local c = getnext(p)
+ while c and c ~= n do
+ local id = getid(c)
+ if id == glue_code then
+ skips = skips + getfield(getfield(c,"glue_spec"),"width")
+ elseif id == kern_code then
+ skips = skips + getfield(c,"kern")
end
+ c = getnext(c)
+ end
+ --
+ local delta = n_ht + skips + p_dp
+ texsetdimen("global","d_spac_overlay",-delta) -- for tracing
+ local k = new_kern(-delta)
+ if n_ht > p_ht then
+ -- we should adapt pagetotal ! (need a hook for that) .. now we have the wrong pagebreak
+ setfield(p,"height",n_ht)
+ end
+ insert_node_before(head,n,k)
+ if p == head then
+ head = k
+ end
+ if trace_vspacing then
+ report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,skips,delta)
+ end
+ return remove_node(head,current,true)
+ end
+
+ -- goto next line
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code or id == kern_code then
+ -- skip (actually, remove)
+ c = getnext(c)
+ elseif id == hlist_code then
+ n = c
+ break
else
- current = current.prev
+ break
+ end
+ end
+ if n then
+ -- we have a next line, goto prev line
+ c = current
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code then
+ c = getprev(c)
+ elseif id == hlist_code then
+ p = c
+ break
+ else
+ break
+ end
+ end
+ if not p then
+ if a_snapmethod == a_snapvbox then
+ -- quit, we're not on the mvl
+ else
+ local c = tonut(texlists.page_head)
+ while c and c ~= n do
+ local id = getid(c)
+ if id == hlist_code then
+ p = c
+ end
+ c = getnext(c)
+ end
+ if p and p ~= n then
+ return overlay(p,n,true)
+ end
+ end
+ elseif p ~= n then
+ return overlay(p,n,false)
end
end
+ -- in fact, we could try again later ... so then no remove (a few tries)
+ return remove_node(head, current, true)
end
+-- This will be replaced after 0.80+ when we have a more robust look-back and
+-- can look at the bigger picture.
+
+-- todo: look back and when a special is there before a list is seen penalty keep ut
+
+-- we now look back a lot, way too often
+
local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
if trace then
reset_tracing(head)
end
local current, oldhead = head, head
local glue_order, glue_data, force_glue = 0, nil, false
- local penalty_order, penalty_data, natural_penalty = 0, nil, nil
+ local penalty_order, penalty_data, natural_penalty, special_penalty = 0, nil, nil, nil
local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false
--
-- todo: keep_together: between headers
--
+ local pagehead = nil
+ local pagetail = nil
+
+ local function getpagelist()
+ if not pagehead then
+ pagehead = texlists.page_head
+ if pagehead then
+ pagehead = tonut(texlists.page_head)
+ pagetail = find_node_tail(pagehead) -- no texlists.page_tail yet-- no texlists.page_tail yet
+ end
+ end
+ end
+ --
local function flush(why)
if penalty_data then
local p = new_penalty(penalty_data)
if trace then trace_done("flushed due to " .. why,p) end
- head = insert_node_before(head,current,p)
+ if penalty_data >= 10000 then -- or whatever threshold?
+ local prev = getprev(current)
+ if getid(prev) == glue_code then -- maybe go back more, or maybe even push back before any glue
+ -- tricky case: spacing/grid-007.tex: glue penalty glue
+ head = insert_node_before(head,prev,p)
+ else
+ head = insert_node_before(head,current,p)
+ end
+ else
+ head = insert_node_before(head,current,p)
+ end
+-- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+-- end
end
if glue_data then
- local spec = glue_data.spec
+ local spec = getfield(glue_data,"spec")
if force_glue then
if trace then trace_done("flushed due to " .. why,glue_data) end
- head = forced_skip(head,current,spec.width,"before",trace)
+ head = forced_skip(head,current,getfield(spec,"width"),"before",trace)
free_glue_node(glue_data)
- elseif spec.writable then
+ elseif getfield(spec,"writable") then
if trace then trace_done("flushed due to " .. why,glue_data) end
head = insert_node_before(head,current,glue_data)
else
@@ -892,6 +1184,26 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
penalty_order, penalty_data, natural_penalty = 0, nil, nil
parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
end
+ --
+
+-- quick hack, can be done nicer
+-- local nobreakfound = nil
+-- local function checknobreak()
+-- local pagehead, pagetail = getpagelist()
+-- local current = pagetail
+-- while current do
+-- local id = getid(current)
+-- if id == hlist_code or id == vlist_code then
+-- return false
+-- elseif id == penalty_code then
+-- return getfield(current,"penalty") >= 10000
+-- end
+-- current = getprev(current)
+-- end
+-- return false
+-- end
+
+ --
if trace_vsnapping then
report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
texgetdimen("globalbodyfontstrutheight"), texgetdimen("globalbodyfontstrutdepth"),
@@ -899,13 +1211,19 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
)
end
if trace then trace_info("start analyzing",where,what) end
+
+-- local headprev = getprev(head)
+
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code or id == vlist_code then
+-- if nobreakfound == nil then
+-- nobreakfound = false
+-- end
-- needs checking, why so many calls
if snap then
- local list = current.list
- local s = current[a_snapmethod]
+ local list = getlist(current)
+ local s = getattr(current,a_snapmethod)
if not s then
-- if trace_vsnapping then
-- report_snapper("mvl list not snapped")
@@ -919,8 +1237,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
if sv then
-- check if already snapped
if list and already_done(id,list,a_snapmethod) then
- local ht = current.height
- local dp = current.depth
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
-- assume that the box is already snapped
if trace_vsnapping then
report_snapper("mvl list already snapped at (%p,%p): %s",ht,dp,listtoutf(list))
@@ -935,40 +1253,60 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif trace_vsnapping then
report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
end
else
--
end
-- tex.prevdepth = 0
flush("list")
- current = current.next
+ current = getnext(current)
elseif id == penalty_code then
- -- natural_penalty = current.penalty
+ -- natural_penalty = getfield(current,"penalty")
-- if trace then trace_done("removed penalty",current) end
-- head, current = remove_node(head, current, true)
- current = current.next
+
+-- if nobreakfound == nil then
+-- nobreakfound = checknobreak()
+-- end
+-- if nobreakfound and getfield(current,"penalty") <= 10000 then
+-- -- if trace then
+-- trace_done("removed penalty",current)
+-- -- end
+-- head, current = remove_node(head, current, true)
+-- end
+
+ current = getnext(current)
elseif id == kern_code then
- if snap and trace_vsnapping and current.kern ~= 0 then
- report_snapper("kern of %p kept",current.kern)
+ if snap and trace_vsnapping and getfield(current,"kern") ~= 0 then
+ report_snapper("kern of %p kept",getfield(current,"kern"))
end
flush("kern")
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == userskip_code then
- local sc = current[a_skipcategory] -- has no default, no unset (yet)
- local so = current[a_skiporder] or 1 -- has 1 default, no unset (yet)
- local sp = current[a_skippenalty] -- has no default, no unset (yet)
+ local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
+ local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
+ local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
if sp and sc == penalty then
+ if where == "page" then
+ getpagelist()
+ local p = specialmethods[specialmethod](pagehead,pagetail,current,sp)
+ if p then
+ if trace then
+ trace_skip("previous special penalty %a is changed to %a using method %a",sp,p,specialmethod)
+ end
+ special_penalty = sp
+ sp = p
+ end
-if where == "page" and sp >= special_penalty_min and sp <= special_penalty_max then
- local previousspecial = specialpenalty(current,sp)
- if previousspecial then
- previousspecial.penalty = 0
- sp = 0
- end
-end
+-- else
+-- if nobreakfound == nil then
+-- nobreakfound = checknobreak()
+-- end
+
+ end
if not penalty_data then
penalty_data = sp
elseif penalty_order < so then
@@ -977,43 +1315,52 @@ end
penalty_data = sp
end
if trace then trace_skip("penalty in skip",sc,so,sp,current) end
+
+-- if nobreakfound then
+-- penalty_data = 10000
+-- if trace then
+-- trace_skip("nobreak found before penalty in skip",sc,so,sp,current)
+-- end
+-- end
+
head, current = remove_node(head, current, true)
elseif not sc then -- if not sc then
if glue_data then
if trace then trace_done("flush",glue_data) end
head = insert_node_before(head,current,glue_data)
if trace then trace_natural("natural",current) end
- current = current.next
+ current = getnext(current)
else
-- not look back across head
--- todo: prev can be whatsit (latelua)
- local previous = current.prev
- if previous and previous.id == glue_code and previous.subtype == userskip_code then
- local ps = previous.spec
- if ps.writable then
- local cs = current.spec
- if cs.writable and ps.stretch_order == 0 and ps.shrink_order == 0 and cs.stretch_order == 0 and cs.shrink_order == 0 then
- local pw, pp, pm = ps.width, ps.stretch, ps.shrink
- local cw, cp, cm = cs.width, cs.stretch, cs.shrink
+ -- todo: prev can be whatsit (latelua)
+ local previous = getprev(current)
+ if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
+ local ps = getfield(previous,"spec")
+ if getfield(ps,"writable") then
+ local cs = getfield(current,"spec")
+ if getfield(cs,"writable") and getfield(ps,"stretch_order") == 0 and getfield(ps,"shrink_order") == 0 and getfield(cs,"stretch_order") == 0 and getfield(cs,"shrink_order") == 0 then
+ local pw, pp, pm = getfield(ps,"width"), getfield(ps,"stretch"), getfield(ps,"shrink")
+ local cw, cp, cm = getfield(cs,"width"), getfield(cs,"stretch"), getfield(cs,"shrink")
-- ps = writable_spec(previous) -- no writable needed here
-- ps.width, ps.stretch, ps.shrink = pw + cw, pp + cp, pm + cm
- previous.spec = new_gluespec(pw + cw, pp + cp, pm + cm) -- else topskip can disappear
+ free_glue_spec(ps)
+ setfield(previous,"spec",new_gluespec(pw + cw, pp + cp, pm + cm)) -- else topskip can disappear
if trace then trace_natural("removed",current) end
head, current = remove_node(head, current, true)
-- current = previous
if trace then trace_natural("collapsed",previous) end
- -- current = current.next
+ -- current = getnext(current)
else
if trace then trace_natural("filler",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev spec)",current) end
- current = current.next
+ current = getnext(current)
end
else
if trace then trace_natural("natural (no prev)",current) end
- current = current.next
+ current = getnext(current)
end
end
glue_order, glue_data = 0, nil
@@ -1031,6 +1378,12 @@ end
elseif sc == discard then
if trace then trace_skip("discard",sc,so,sp,current) end
head, current = remove_node(head, current, true)
+ elseif sc == overlay then
+ -- todo (overlay following line over previous
+ if trace then trace_skip("overlay",sc,so,sp,current) end
+ -- beware: head can actually be after the affected nodes as
+ -- we look back ... some day head will the real head
+ head, current = check_experimental_overlay(head,current,a_snapmethod)
elseif ignore_following then
if trace then trace_skip("disabled",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1046,12 +1399,12 @@ end
elseif glue_order == so then
-- is now exclusive, maybe support goback as combi, else why a set
if sc == largest then
- local cs, gs = current.spec, glue_data.spec
- local cw, gw = cs.width, gs.width
+ local cs, gs = getfield(current,"spec"), getfield(glue_data,"spec")
+ local cw, gw = getfield(cs,"width"), getfield(gs,"width")
if cw > gw then
if trace then trace_skip("largest",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
else
if trace then trace_skip("remove smallest",sc,so,sp,current) end
head, current = remove_node(head, current, true)
@@ -1059,7 +1412,7 @@ end
elseif sc == goback then
if trace then trace_skip("goback",sc,so,sp,current) end
free_glue_node(glue_data) -- also free spec
- head, current, glue_data = remove_node(head, current)
+ head, current, glue_data = remove_node(head,current)
elseif sc == force then
-- last one counts, some day we can provide an accumulator and largest etc
-- but not now
@@ -1073,11 +1426,11 @@ end
head, current = remove_node(head, current, true)
elseif sc == add then
if trace then trace_skip("add",sc,so,sp,current) end
- -- local old, new = glue_data.spec, current.spec
- local old, new = writable_spec(glue_data), current.spec
- old.width = old.width + new.width
- old.stretch = old.stretch + new.stretch
- old.shrink = old.shrink + new.shrink
+ -- local old, new = glue_data.spec, getfield(current,"spec")
+ local old, new = writable_spec(glue_data), getfield(current,"spec")
+ setfield(old,"width",getfield(old,"width") + getfield(new,"width"))
+ setfield(old,"stretch",getfield(old,"stretch") + getfield(new,"stretch"))
+ setfield(old,"shrink",getfield(old,"shrink") + getfield(new,"shrink"))
-- toto: order
head, current = remove_node(head, current, true)
else
@@ -1093,12 +1446,13 @@ end
end
elseif subtype == lineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("lineskip set to zero")
end
@@ -1111,15 +1465,16 @@ end
if trace then trace_skip("lineskip",sc,so,sp,current) end
flush("lineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == baselineskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
- if current.spec.writable then
+ setattr(current,a_snapmethod,0)
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") then
local spec = writable_spec(current)
- spec.width = 0
+ setfield(spec,"width",0)
if trace_vsnapping then
report_snapper("baselineskip set to zero")
end
@@ -1132,17 +1487,17 @@ end
if trace then trace_skip("baselineskip",sc,so,sp,current) end
flush("baselineskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == parskip_code then
-- parskip always comes later
if ignore_whitespace then
if trace then trace_natural("ignored parskip",current) end
head, current = remove_node(head, current, true)
elseif glue_data then
- local ps = current.spec
- local gs = glue_data.spec
- if ps.writable and gs.writable and ps.width > gs.width then
- glue_data.spec = copy_node(ps)
+ local ps = getfield(current,"spec")
+ local gs = getfield(glue_data,"spec")
+ if getfield(ps,"writable") and getfield(gs,"writable") and getfield(ps,"width") > getfield(gs,"width") then
+ setfield(glue_data,"spec",copy_node(ps))
if trace then trace_natural("taking parskip",current) end
else
if trace then trace_natural("removed parskip",current) end
@@ -1154,9 +1509,9 @@ end
end
elseif subtype == topskip_code or subtype == splittopskip_code then
if snap then
- local s = current[a_snapmethod]
+ local s = getattr(current,a_snapmethod)
if s and s ~= 0 then
- current[a_snapmethod] = 0
+ setattr(current,a_snapmethod,0)
local sv = snapmethods[s]
local w, cw = snap_topskip(current,sv)
if trace_vsnapping then
@@ -1170,46 +1525,46 @@ end
if trace then trace_skip("topskip",sc,so,sp,current) end
flush("topskip")
end
- current = current.next
+ current = getnext(current)
elseif subtype == abovedisplayskip_code then
--
if trace then trace_skip("above display skip (normal)",sc,so,sp,current) end
flush("above display skip (normal)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayskip_code then
--
if trace then trace_skip("below display skip (normal)",sc,so,sp,current) end
flush("below display skip (normal)")
- current = current.next
- --
+ current = getnext(current)
+ --
elseif subtype == abovedisplayshortskip_code then
--
if trace then trace_skip("above display skip (short)",sc,so,sp,current) end
flush("above display skip (short)")
- current = current.next
+ current = getnext(current)
--
elseif subtype == belowdisplayshortskip_code then
--
if trace then trace_skip("below display skip (short)",sc,so,sp,current) end
flush("below display skip (short)")
- current = current.next
+ current = getnext(current)
--
else -- other glue
if snap and trace_vsnapping then
- local spec = current.spec
- if spec.writable and spec.width ~= 0 then
- report_snapper("glue %p of type %a kept",current.spec.width,skipcodes[subtype])
- -- spec.width = 0
+ local spec = getfield(current,"spec")
+ if getfield(spec,"writable") and getfield(spec,"width") ~= 0 then
+ report_snapper("glue %p of type %a kept",getfield(spec,"width"),skipcodes[subtype])
+ -- setfield(spec,"width",0)
end
end
- if trace then trace_skip(formatter["glue of type %a"](subtype),sc,so,sp,current) end
+ if trace then trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current) end
flush("some glue")
- current = current.next
+ current = getnext(current)
end
else
- flush("something else")
- current = current.next
+ flush(formatters["node with id %a"](id))
+ current = getnext(current)
end
end
if trace then trace_info("stop analyzing",where,what) end
@@ -1225,17 +1580,28 @@ end
local p = new_penalty(penalty_data)
if trace then trace_done("result",p) end
head, tail = insert_node_after(head,tail,p)
+ -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+ -- end
end
if glue_data then
if not tail then tail = find_node_tail(head) end
if trace then trace_done("result",glue_data) end
if force_glue then
- head, tail = forced_skip(head,tail,glue_data.spec.width,"after",trace)
+ local spec = getfield(glue_data,"spec")
+ head, tail = forced_skip(head,tail,getfield(spec,"width"),"after",trace)
free_glue_node(glue_data)
else
head, tail = insert_node_after(head,tail,glue_data)
end
-texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
+ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
end
if trace then
if glue_data or penalty_data then
@@ -1243,9 +1609,16 @@ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevd
end
show_tracing(head)
if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[oldhead.id],nodecodes[head.id])
+ trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
end
end
+
+-- if headprev then
+-- setprev(head,headprev)
+-- setnext(headprev,head)
+-- end
+-- print("C HEAD",tonode(head))
+
return head, true
end
@@ -1271,16 +1644,17 @@ end
function vspacing.pagehandler(newhead,where)
-- local newhead = texlists.contrib_head
if newhead then
+ newhead = tonut(newhead)
local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
local flush = false
stackhack = true -- todo: only when grid snapping once enabled
-- todo: fast check if head = tail
for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = n.id
+ local id = getid(n)
if id ~= glue_code then
flush = true
- elseif n.subtype == userskip_code then
- if n[a_skipcategory] then
+ elseif getsubtype(n) == userskip_code then
+ if getattr(n,a_skipcategory) then
stackhack = true
else
flush = true
@@ -1292,35 +1666,36 @@ function vspacing.pagehandler(newhead,where)
if flush then
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (final): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
newhead = stackhead
stackhead, stacktail = nil, nil
end
if stackhack then
stackhack = false
if trace_collect_vspacing then report("processing %s nodes: %s",newhead) end
- -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
else
if trace_collect_vspacing then report("flushing %s nodes: %s",newhead) end
-- texlists.contrib_head = newhead
end
+ return tonode(newhead)
else
if stackhead then
if trace_collect_vspacing then report("appending %s nodes to stack (intermediate): %s",newhead) end
- stacktail.next = newhead
- newhead.prev = stacktail
+ setfield(stacktail,"next",newhead)
+ setfield(newhead,"prev",stacktail)
else
if trace_collect_vspacing then report("storing %s nodes in stack (initial): %s",newhead) end
stackhead = newhead
end
stacktail = newtail
-- texlists.contrib_head = nil
- newhead = nil
+ -- newhead = nil
end
end
- return newhead
+ return nil
end
local ignore = table.tohash {
@@ -1330,18 +1705,27 @@ local ignore = table.tohash {
}
function vspacing.vboxhandler(head,where)
- if head and not ignore[where] and head.next then
- head = collapser(head,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
+ if head and not ignore[where] then
+ local h = tonut(head)
+ if getnext(h) then -- what if a one liner and snapping?
+ h = collapser(h,"vbox",where,trace_vbox_vspacing,true,a_snapvbox) -- todo: local snapper
+ return tonode(h)
+ end
end
return head
end
-function vspacing.collapsevbox(n) -- for boxes but using global a_snapmethod
- local box = texgetbox(n)
+function vspacing.collapsevbox(n,aslist) -- for boxes but using global a_snapmethod
+ local box = getbox(n)
if box then
- local list = box.list
+ local list = getlist(box)
if list then
- box.list = vpack_node(collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod))
+ list = collapser(list,"snapper","vbox",trace_vbox_vspacing,true,a_snapmethod)
+ if aslist then
+ setfield(box,"list",list) -- beware, dimensions of box are wrong now
+ else
+ setfield(box,"list",vpack_node(list))
+ end
end
end
end
@@ -1352,14 +1736,65 @@ end
local outer = texnest[0]
function vspacing.resetprevdepth()
- outer.prevdepth = 0
+ if texlists.hold_head then
+ outer.prevdepth = 0
+ end
end
-- interface
-commands.vspacing = vspacing.analyze
-commands.vspacingsetamount = vspacing.setskip
-commands.vspacingdefine = vspacing.setmap
-commands.vspacingcollapse = vspacing.collapsevbox
-commands.vspacingsnap = vspacing.snapbox
-commands.resetprevdepth = vspacing.resetprevdepth
+implement {
+ name = "vspacing",
+ actions = vspacing.analyze,
+ scope = "private",
+ arguments = "string"
+}
+
+implement {
+ name = "resetprevdepth",
+ actions = vspacing.resetprevdepth,
+ scope = "private"
+}
+
+implement {
+ name = "vspacingsetamount",
+ actions = vspacing.setskip,
+ scope = "private",
+ arguments = "string",
+}
+
+implement {
+ name = "vspacingdefine",
+ actions = vspacing.setmap,
+ scope = "private",
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "vspacingcollapse",
+ actions = vspacing.collapsevbox,
+ scope = "private",
+ arguments = "integer"
+}
+
+implement {
+ name = "vspacingcollapseonly",
+ actions = vspacing.collapsevbox,
+ scope = "private",
+ arguments = { "integer", true }
+}
+
+implement {
+ name = "vspacingsnap",
+ actions = vspacing.snapbox,
+ scope = "private",
+ arguments = { "integer", "integer" }
+}
+
+implement {
+ name = "definesnapmethod",
+ actions = vspacing.definesnapmethod,
+ scope = "private",
+ arguments = { "string", "string" }
+}
+
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index afa722cfe..86a731d3c 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -17,6 +17,8 @@
\registerctxluafile{spac-ver}{1.001}
+% todo: use usernodes ?
+
% todo: itemize : intro ... only when there is one or two lines preceding and then
% keep these together i.e. \blank[intro]
@@ -120,7 +122,7 @@
\setvalue{\??interlinespacerelative\v!auto }{\let\setrelativeinterlinespace\spac_linespacing_set_relative_interlinespace}
\def\spac_linespacing_set_specified_relative_interlinespace#1% fragile?
- {\doifdimenstringelse{#1}
+ {\doifelsedimenstring{#1}
{\setupspecifiedinterlinespace[\c!line=#1]}
{\assignvalue{#1}\currentrelativeinterlinespace{1.00}{1.25}{1.50}%
\spacing\currentrelativeinterlinespace}}
@@ -140,6 +142,20 @@
\spacing\currentrelativeinterlinespace
\fi}
+\unexpanded\def\spac_linespacing_setup_use
+ {\ifcsname\namedinterlinespacehash\m_spac_interlinespace\s!parent\endcsname
+ \let\currentinterlinespace\m_spac_interlinespace
+ \spac_linespacing_setup_specified_interline_space
+ % \else
+ % we only support named interlinespaces
+ \fi}
+
+\unexpanded\def\useinterlinespaceparameter#1% see footnotes
+ {\edef\m_spac_interlinespace{#1\c!interlinespace}%
+ \ifx\m_spac_interlinespace\empty \else
+ \spac_linespacing_setup_use
+ \fi}
+
\newtoks\everysetupglobalinterlinespace
\newtoks\everysetuplocalinterlinespace
@@ -152,10 +168,14 @@
\unexpanded\def\setupinterlinespace
{\dodoubleempty\spac_linespacing_setup}
+\ifdefined\setupinterlinespace_double \else
+ \let\setupinterlinespace_double\setup_interlinespace % for a while
+\fi
+
\def\spac_linespacing_setup[#1][#2]%
{\settrue\interlinespaceisset % reset has to be done when needed
\ifsecondargument
- \setup_interlinespace[#1][#2]%
+ \setupinterlinespace_double[#1][#2]%
\else\iffirstargument
\ifcsname\namedinterlinespacehash{#1}\s!parent\endcsname
\edef\currentinterlinespace{#1}%
@@ -170,7 +190,7 @@
\fi\fi}
\def\spac_linespacing_setup_specified_or_relative[#1]%
- {\doifassignmentelse{#1}\setupspecifiedinterlinespace\setuprelativeinterlinespace[#1]%
+ {\doifelseassignment{#1}\setupspecifiedinterlinespace\setuprelativeinterlinespace[#1]%
\the\iflocalinterlinespace\everysetuplocalinterlinespace\else\everysetupglobalinterlinespace\fi}
\def\spac_linespacing_synchronize_local % adapts to the font
@@ -197,7 +217,7 @@
\fi
\popmacro\currentinterlinespace
\else
- \normalexpanded{\noexpand\doifassignmentelse{\p_spac_checked_interlinespace}%
+ \normalexpanded{\noexpand\doifelseassignment{\p_spac_checked_interlinespace}%
\setupspecifiedinterlinespace\setuprelativeinterlinespace[\p_spac_checked_interlinespace]}%
\iflocalinterlinespace
\the\everysetuplocalinterlinespace
@@ -330,7 +350,7 @@
\let\v_spac_whitespace_current\v!none
\unexpanded\def\setupwhitespace
- {\doifnextoptionalelse\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
+ {\doifelsenextoptionalcs\spac_whitespace_setup_yes\spac_whitespace_setup_nop}
\def\spac_whitespace_setup_nop
{\ifx\v_spac_whitespace_current\v!none\else
@@ -542,6 +562,8 @@
\ignorespaces
\let\spac_lines_stop_correction\spac_lines_stop_correction_yes}
+% still not ok ... will move to the lua end ... needs a final solution
+
\unexpanded\def\spac_lines_stop_correction_yes
{\removeunwantedspaces
\egroup
@@ -549,6 +571,11 @@
\blank[\v!white]%
\snaptogrid\hbox{\box\scratchbox}%
\else
+\blank[\v!nowhite]%
+\ifdim\parskip>\zeropoint
+ % too fuzzy otherwise
+\else
+ % doesn't like whitespace
\ifdim\d_spac_prevdepth<\maxdimen
\unless\ifdim\d_spac_prevdepth<\zeropoint
\ifdim\d_spac_prevdepth<\strutdp \relax
@@ -562,6 +589,7 @@
\fi
\fi
\fi
+\fi
\ifdim\pagegoal<\maxdimen
\blank[\v!white,\the\d_spac_lines_correction_before]% \blank[\v!white]\dotopbaselinecorrection
\fi
@@ -853,11 +881,11 @@
\strutheightfactor\dimexpr\normallineheight
\fi
\strutdp\spacingfactor\dimexpr
- \ifdim\minimumstrutdepth>\zeropoint
- \minimumstrutdepth
- \else
- \strutdepthfactor\dimexpr\normallineheight
- \fi
+ \ifdim\minimumstrutdepth>\zeropoint
+ \minimumstrutdepth
+ \else
+ \strutdepthfactor\dimexpr\normallineheight
+ \fi
\dosetstrut}
\unexpanded\def\setcharstrut#1%
@@ -896,7 +924,9 @@
\ifabsnum\dimexpr\strutht+\strutdp-\lineheight\relax<\plustwo
% compensate rounding error /- 1sp to avoid too many
% 1sp baselineskips in for instance verbatim
- \strutht\dimexpr\lineheight-\strutdp\relax
+ % \strutht\dimexpr\lineheight-\strutdp\relax
+ % better:
+ \strutdp\dimexpr\lineheight-\strutht\relax
\struttotal\lineheight
\else
\struttotal\dimexpr\strutht+\strutdp\relax
@@ -916,15 +946,47 @@
\s!height\strutht
\s!depth \strutdp}}
+\newconstant\c_strut_visual_mode
+
\def\spac_struts_set_vide
{\setbox\strutbox\hbox % at some time this extra wrapping was needed
{\spac_struts_vide_hbox to \zeropoint
- {% \hss % new, will be option
- \vrule
- \s!width \strutwidth
- \s!height\strutht
- \s!depth \strutdp
- \hss}}}
+ {\ifcase\c_strut_visual_mode
+ \spac_struts_black
+ \or
+ \spac_struts_color
+ \else
+ \spac_struts_black
+ \fi}}}
+
+\def\spac_struts_black
+ {\vrule
+ \s!width \strutwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \hss}
+
+\def\spac_struts_color
+ {\hss % new, will be option
+ \scratchwidth.1\struthtdp
+ \begingroup
+ \directcolor[f:b:t]%
+ \vrule
+ \s!width \scratchwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \kern-\scratchwidth
+ \vrule
+ \s!width \scratchwidth
+ \s!height\zeropoint
+ \s!depth \strutdp
+ \endgroup
+ \kern-.625\scratchwidth
+ \vrule
+ \s!width .25\scratchwidth
+ \s!height\strutht
+ \s!depth \strutdp
+ \hss}
\let\spac_struts_vide_hbox\hbox % overloaded in trac-vis.mkiv
@@ -948,6 +1010,35 @@
\let\normalstrut\strut
+\unexpanded\def\halfstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht/\plustwo\relax
+ \dp\scratchbox\dimexpr\strutdp/\plustwo\relax
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\quarterstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht/\plusfour\relax
+ \dp\scratchbox\dimexpr\strutdp/\plusfour\relax
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\depthstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht-\struthtdp/\plustwo\relax % assumes that ht > lineheight/2
+ \box\scratchbox
+ \endgroup}
+
%D Sometimes a capstrut comes in handy
%D
%D \starttabulate[|Tl|l|l|]
@@ -989,7 +1080,13 @@
\fi}
\unexpanded\def\showstruts % adapts .. is wrong
- {\setteststrut
+ {\c_strut_visual_mode\zerocount
+ \setteststrut
+ \settestcrlf}
+
+\unexpanded\def\showcolorstruts % adapts .. is wrong
+ {\c_strut_visual_mode\plusone
+ \setteststrut
\settestcrlf}
\unexpanded\def\setteststrut
@@ -1020,12 +1117,17 @@
\newbox\nostrutbox \setbox\nostrutbox\emptyhbox
+\newtoks\everysetnostrut
+
\unexpanded\def\setnostrut
- {\setbox\strutbox\copy\nostrutbox
- \let\strut\empty
- \let\endstrut\empty
- \let\begstrut\empty
- \let\crlfplaceholder\empty}
+ {\the\everysetnostrut}
+
+\appendtoks
+ \setbox\strutbox\copy\nostrutbox
+ \let\strut\empty
+ \let\endstrut\empty
+ \let\begstrut\empty
+\to \everysetnostrut
% when enabled, sigstruts will remove themselves if nothing
% goes inbetween
@@ -1154,6 +1256,10 @@
\let\normaloffinterlineskip\offinterlineskip % knuth's original
+\appendtoks
+ \ifvmode\clf_resetprevdepth\fi % a nasty hack (tested for a while now)
+\to \everyafteroutput
+
%D My own one:
\unexpanded\def\spac_helpers_push_interlineskip_yes
@@ -1325,10 +1431,20 @@
\unexpanded\def\installsnapvalues#1#2% todo: a proper define
{\edef\currentsnapper{#1:#2}%
\ifcsname\??gridsnapperattributes\currentsnapper\endcsname \else
- \setevalue{\??gridsnapperattributes\currentsnapper}{\ctxlua{builders.vspacing.definesnapmethod("#1","#2")}}%
+ \setevalue{\??gridsnapperattributes\currentsnapper}{\clf_definesnapmethod{#1}{#2}}%
\fi
\setevalue{\??gridsnappers#1}{\attribute\snapmethodattribute\csname\??gridsnapperattributes\currentsnapper\endcsname\space}}
+\unexpanded\def\usegridparameter#1% no checking here
+ {\edef\m_spac_grid_asked{#1\c!grid}%
+ \ifx\m_spac_grid_asked\empty
+ \attribute \snapvboxattribute\attributeunsetvalue
+ \else
+ \spac_grids_snap_value_set\m_spac_grid_asked
+ \attribute \snapvboxattribute\attribute\snapmethodattribute
+ \fi}
+
+
\unexpanded\def\definegridsnapping
{\dodoubleargument\spac_grids_define}
@@ -1394,6 +1510,9 @@
\definegridsnapping[\v!none] [\v!none]
\definegridsnapping[\v!line] [\v!line]
\definegridsnapping[\v!strut] [\v!strut]
+\definegridsnapping[\v!box] [\v!box] % centers a box rounded upwards (box:.5 -> tolerance)
+\definegridsnapping[\v!min] [\v!min] % centers a box rounded downwards
+\definegridsnapping[\v!max] [\v!max] % centers a box rounded upwards
\definegridsnapping[\v!max] [\v!maxdepth,\v!maxheight,\v!strut]
\definegridsnapping[\v!min] [\v!mindepth,\v!minheight,\v!strut]
@@ -1402,13 +1521,13 @@
\unexpanded\def\synchronizelocallinespecs
{\bodyfontlineheight \normallineheight
- \bodyfontstrutheight\strutheight
- \bodyfontstrutdepth \strutdepth}
+ \bodyfontstrutheight\strutht
+ \bodyfontstrutdepth \strutdp}
\unexpanded\def\synchronizegloballinespecs
{\global\globalbodyfontlineheight \normallineheight
- \global\globalbodyfontstrutheight\strutheight
- \global\globalbodyfontstrutdepth \strutdepth}
+ \global\globalbodyfontstrutheight\strutht
+ \global\globalbodyfontstrutdepth \strutdp}
\appendtoks
\synchronizegloballinespecs
@@ -1479,10 +1598,10 @@
\def\spac_grids_snap_to_finish#1%
{\ifvbox\nextbox % this will go away
- \ctxcommand{vspacingcollapse(\number\nextbox)}% isn't that already done?
+ \clf_vspacingcollapse\nextbox\relax % isn't that already done?
\fi
\doifelsenothing{#1}{\spac_grids_snap_value_set\v!normal}{\spac_grids_snap_value_set{#1}}%
- \ctxcommand{vspacingsnap(\number\nextbox,\number\attribute\snapmethodattribute)}%
+ \clf_vspacingsnap\nextbox\attribute\snapmethodattribute\relax
\ifvbox\nextbox\vbox\else\hbox\fi attr \snapmethodattribute \zerocount {\box\nextbox}%
\egroup}
@@ -1632,7 +1751,7 @@
\def\spac_vspacing_define_amount[#1][#2][#3]% can be combined
{\setvalue{\??vspacingamount#1}{\ifgridsnapping#3\else#2\fi}%
- \ctxcommand{vspacingsetamount("#1")}}
+ \clf_vspacingsetamount{#1}}
% \installcorenamespace{vspacingamountnormal}
% \installcorenamespace{vspacingamountgrid}
@@ -1644,13 +1763,13 @@
% \fi
% \csname n>#1\endcsname{#2}%
% \csname g>#1\endcsname{#3}%
-% \ctxcommand{vspacingsetamount("#1")}}
+% \clf_vspacingsetamount{#1}}
\unexpanded\def\definevspacing
{\dodoubleempty\spac_vspacing_define}
\def\spac_vspacing_define[#1][#2]%
- {\ctxcommand{vspacingdefine("#1","#2")}}
+ {\clf_vspacingdefine{#1}{#2}}
%D The injector code (generated at the \LUA\ end):
@@ -1751,7 +1870,7 @@
% The main spacer:
\unexpanded\def\vspacing
- {\doifnextoptionalelse\spac_vspacing_yes\spac_vspacing_nop}
+ {\doifelsenextoptionalcs\spac_vspacing_yes\spac_vspacing_nop}
\def\spac_vspacing_yes
{\ifinpagebody % somewhat weird
@@ -1772,21 +1891,21 @@
\fi\fi}
\def\spac_vspacing_yes_indeed[#1]%
- {\ifmmode\else\par\ctxcommand{vspacing("#1")}\fi}
+ {\ifmmode\else\par\clf_vspacing{#1}\fi}
\def\spac_vspacing_yes_ignore[#1]%
{\ifmmode\else\par\fi}
\def\spac_vspacing_nop_indeed
- {\ifmmode\else\par\ctxcommand{vspacing("\currentvspacing")}\fi}
+ {\ifmmode\else\par\clf_vspacing{\currentvspacing}\fi}
\def\spac_vspacing_nop_ignore
{\ifmmode\else\par\fi}
\def\directvspacing#1%
- {\par\ctxcommand{vspacing("#1")}}
+ {\par\clf_vspacing{#1}}
-% handy (and faste):
+% handy (and faster):
\unexpanded\def\directvpenalty#1%
{\begingroup
@@ -1807,7 +1926,7 @@
% these depend on bigskipamount cum suis so we'd better sync them
\unexpanded\def\setupvspacing
- {\doifnextoptionalelse\setupvspacing_yes\setupvspacing_nop}
+ {\doifelsenextoptionalcs\setupvspacing_yes\setupvspacing_nop}
\let\currentvspacing\s!default % hm, default, standard ...
@@ -1829,7 +1948,7 @@
% category:4 is default
-% this interface might change (into an \install, buw we will then keep this one hidden)
+% this interface might change (into an \install, but we will then keep this one hidden)
\definevspacingamount[\v!none] [\zeropoint] [\zeropoint]
\definevspacingamount[\v!big] [\bigskipamount] [\bodyfontlineheight]
@@ -1840,8 +1959,8 @@
\definevspacingamount[\v!quarterline] [.25\openlineheight] [.25\bodyfontlineheight]
\definevspacingamount[\v!formula] [\medskipamount] [.5\bodyfontlineheight]
\definevspacingamount[\v!white] [\parskip] [\bodyfontwhitespace]
-\definevspacingamount[\v!height] [\strutheight] [\bodyfontstrutheight]
-\definevspacingamount[\v!depth] [\strutdepth] [\bodyfontstrutdepth]
+\definevspacingamount[\v!height] [\strutht] [\bodyfontstrutheight]
+\definevspacingamount[\v!depth] [\strutdp] [\bodyfontstrutdepth]
\definevspacingamount[-\v!line] [-\openlineheight] [-\bodyfontlineheight]
\definevspacingamount[-\v!halfline] [-.5\openlineheight] [-.5\bodyfontlineheight]
@@ -1860,6 +1979,32 @@
\fi\fi
\relax}
+% used in itemize ... always test this
+
+\newdimen\d_spac_overlay
+
+\def\spac_overlay_lines
+ {\blank[\v!back,\v!overlay]%
+ \nointerlineskip}
+
+% \startitemize[n]
+% \item \input zapf
+% \item \startitemize[a]
+% \item \input knuth
+% \stopitemize
+% \stopitemize
+%
+% \strut \hfill first line \blank[overlay] second line \hfill \strut
+%
+% \ruledvbox {
+% \strut \hfill line 1 \blank[overlay]
+% line 2 \hfill \strut \blank[overlay]
+% \strut \hfill line 3 \hfill \strut
+% }
+%
+% \dorecurse{50}
+% {\startitemize[n] \startitem \startitemize[a] \item #1 \stopitemize \stopitem \stopitemize}
+
\definevspacing[\v!preference][penalty:-500] % goodbreak
\definevspacing[\v!samepage] [penalty:10000] % nobreak
\definevspacing[\v!max] [category:1]
@@ -1867,7 +2012,9 @@
\definevspacing[\v!disable] [category:5]
\definevspacing[\v!nowhite] [category:6]
\definevspacing[\v!back] [category:7]
-\definevspacing[\v!always] [category:0]
+% together [category:8]
+\definevspacing[\v!overlay] [category:9]
+\definevspacing[\v!always] [category:0] % hm, internally it's discard
\definevspacing[\v!weak] [order:0]
\definevspacing[\v!strong] [order:100]
@@ -1894,8 +2041,20 @@
%D \type {\blank} (we needed the first one while playing with the
%D new code).
+% We keep this one as reference
+%
+% \unexpanded\def\inhibitblank
+% {\vspacing[\v!disable]}
+%
+% but use the following more efficient variant instead:
+
\unexpanded\def\inhibitblank
- {\vspacing[\v!disable]} % can be made faster
+ {\ifvmode
+ \begingroup
+ \attribute\skipcategoryattribute\plusfive
+ \vskip\zeropoint
+ \endgroup
+ \fi}
\let\doinhibitblank\inhibitblank % keep this command, used in styles
@@ -1980,7 +2139,7 @@
\let\m_spac_hanging_location\empty
\def\spac_hanging_start[#1]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\let\m_spac_hanging_location\empty
\setupcurrenthanging[#1]}%
{\edef\m_spac_hanging_location{#1}}%
@@ -2109,7 +2268,7 @@
% as encountered in forced blank skips (see lua code)
%
% \appendtoks
-% \ifvmode\ctxcommand{resetprevdepth()}\fi
+% \ifvmode\clf_resetprevdepth\fi
% \to \everyafteroutput
%
% this should only happen when there is nothing left over (how to detemine that) .. testcase:
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index a74501e41..e43651ef8 100644
Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index a591afb75..37e62b70b 100644
Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index caa7dc16c..45c282256 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -320,7 +320,6 @@ return {
},
{
category = "mkiv",
- comment = "maybe this becomes a runtime module",
filename = "toks-ini",
loading = "always",
status = "okay",
@@ -392,6 +391,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "typo-sus",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
filename = "node-pag",
@@ -540,6 +545,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
filename = "unic-ini",
@@ -613,6 +624,12 @@ return {
loading = "always",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
{
category = "mkiv",
filename = "lang-frq",
@@ -2014,13 +2031,13 @@ return {
{
category = "mkiv",
filename = "bibl-bib",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
category = "mkiv",
filename = "bibl-tra",
- loading = "always",
+ loading = "on demand",
status = "pending",
},
{
@@ -2534,6 +2551,66 @@ return {
loading = "on demand",
status = "okay",
},
+ {
+ category = "mkiv",
+ filename = "publ-ini",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-old",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-tra",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-usr",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-jrn",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-xml",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-apa",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-cite",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-definitions",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
+ filename = "publ-imp-commands",
+ loading = "always",
+ status = "pending",
+ },
},
lua = {
{
@@ -2606,12 +2683,12 @@ return {
{
category = "lua",
filename = "bibl-bib",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
filename = "bibl-tra",
- status = "todo",
+ loading = "on demand",
},
{
category = "lua",
@@ -3151,6 +3228,12 @@ return {
loading = "font-lib",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "font-inj",
+ loading = "font-lib",
+ status = "okay",
+ },
{
category = "lua",
filename = "font-ldr",
@@ -3428,6 +3511,18 @@ return {
loading = "lang-def",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "lang-dis",
+ loading = "lang-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
{
category = "lua",
filename = "lang-ini",
@@ -3440,6 +3535,12 @@ return {
loading = "lang-lab",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
{
category = "lua",
filename = "lang-txt",
@@ -4033,6 +4134,11 @@ return {
filename = "node-pag",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "node-ppt",
+ status = "todo",
+ },
{
category = "lua",
filename = "node-pro",
@@ -4651,7 +4757,14 @@ return {
{
category = "lua",
filename = "toks-ini",
- status = "todo",
+ loading = "toks-ini",
+ status = "okay",
+ },
+ {
+ category = "lua",
+ filename = "toks-scn",
+ loading = "toks-ini",
+ status = "okay",
},
{
category = "lua",
@@ -4753,6 +4866,11 @@ return {
filename = "typo-bld",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "typo-sus",
+ status = "okay",
+ },
{
category = "lua",
filename = "typo-brk",
@@ -4840,6 +4958,11 @@ return {
filename = "typo-fln",
status = "okay",
},
+ {
+ category = "lua",
+ filename = "typo-man",
+ status = "todo",
+ },
{
category = "lua",
filename = "typo-prc",
@@ -4997,6 +5120,48 @@ return {
filename = "x-mathml",
status = "todo",
},
+ {
+ category = "lua",
+ filename = "publ-ini",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-aut",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-dat",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-oth",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-fnd",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-tra",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
+ {
+ category = "lua",
+ filename = "publ-usr",
+ loading = "publ-ini.mkiv",
+ status = "pending",
+ },
},
metafun = {
{
diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua
index c38ab3c2e..96b68b236 100644
--- a/tex/context/base/strc-bkm.lua
+++ b/tex/context/base/strc-bkm.lua
@@ -13,14 +13,15 @@ if not modules then modules = { } end modules ['strc-bkm'] = {
-- we should hook the placement into everystoptext ... needs checking
-local format, concat, gsub = string.format, table.concat, string.gsub
+-- todo: make an lpeg for stripped
+
+local next, type = next, type
+local gsub, lower = string.gsub, string.lower
+local concat = table.concat
local utfvalues = utf.values
local settings_to_hash = utilities.parsers.settings_to_hash
-local codeinjections = backends.codeinjections
-
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
-
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
local report_bookmarks = logs.reporter("structure","bookmarks")
local structures = structures
@@ -30,13 +31,17 @@ structures.bookmarks = structures.bookmarks or { }
local bookmarks = structures.bookmarks
local sections = structures.sections
local lists = structures.lists
-
local levelmap = sections.levelmap
local variables = interfaces.variables
+local implement = interfaces.implement
+local codeinjections = backends.codeinjections
-bookmarks.method = "internal" -- or "page"
+bookmarks.method = "internal" -- or "page"
-local names, opened, forced, numbered = { }, { }, { }, { }
+local names = { }
+local opened = { }
+local forced = { }
+local numbered = { }
function bookmarks.register(settings)
local force = settings.force == variables.yes
@@ -78,8 +83,13 @@ function bookmarks.overload(name,text)
end
end
if ls then
- ls.titledata.bookmark = text
+ local titledata = ls.titledata
+ if titledata then
+ titledata.bookmark = text
+ end
end
+ -- last resort
+ -- context.writetolist({name},text,"")
end
local function stripped(str) -- kind of generic
@@ -101,54 +111,6 @@ function bookmarks.setup(spec)
end
end
--- function bookmarks.place()
--- if next(names) then
--- local list = lists.filtercollected(names,"all",nil,lists.collected,forced)
--- if #list > 0 then
--- local levels, noflevels, lastlevel = { }, 0, 1
--- for i=1,#list do
--- local li = list[i]
--- local metadata = li.metadata
--- local name = metadata.name
--- if not metadata.nolist or forced[name] then -- and levelmap[name] then
--- local titledata = li.titledata
--- if titledata then
--- local structural = levelmap[name]
--- lastlevel = structural or lastlevel
--- local title = titledata.bookmark
--- if not title or title == "" then
--- -- We could typeset the title and then convert it.
--- if not structural then
--- -- placeholder, todo: bookmarklabel
--- title = name .. ": " .. (titledata.title or "?")
--- else
--- title = titledata.title or "?"
--- end
--- end
--- if numbered[name] then
--- local sectiondata = sections.collected[li.references.section]
--- local numberdata = li.numberdata
--- if sectiondata and numberdata and not numberdata.hidenumber then
--- -- we could typeset the number and convert it
--- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title
--- end
--- end
--- noflevels = noflevels + 1
--- levels[noflevels] = {
--- lastlevel,
--- stripped(title), -- can be replaced by converter
--- li.references, -- has internal and realpage
--- allopen or opened[name]
--- }
--- end
--- end
--- end
--- bookmarks.finalize(levels)
--- end
--- function bookmarks.place() end -- prevent second run
--- end
--- end
-
function bookmarks.place()
if next(names) then
local levels = { }
@@ -157,26 +119,53 @@ function bookmarks.place()
local nofblocks = #lists.sectionblocks -- always >= 1
local showblocktitle = toboolean(numberspec.showblocktitle,true)
for i=1,nofblocks do
- local block = lists.sectionblocks[i]
+ local block = lists.sectionblocks[i]
local blockdone = nofblocks == 1
- local list = lists.filtercollected(names,block..":all",nil,lists.collected,forced)
+ local list = lists.filter {
+ names = names,
+ criterium = block .. ":all",
+ forced = forced,
+ }
for i=1,#list do
local li = list[i]
local metadata = li.metadata
local name = metadata.name
if not metadata.nolist or forced[name] then -- and levelmap[name] then
local titledata = li.titledata
+ --
+ if not titledata then
+ local userdata = li.userdata
+ if userdata then
+ local first = userdata.first
+ local second = userdata.second
+ if first then
+ if second then
+ titledata = { title = first .. " " .. second }
+ else
+ titledata = { title = first }
+ end
+ elseif second then
+ titledata = { title = second }
+ else
+ -- ignoring (command and so)
+ end
+ end
+ end
+ --
if titledata then
if not blockdone then
if showblocktitle then
-- add block entry
local blockdata = sections.sectionblockdata[block]
noflevels = noflevels + 1
+ local references = li.references
levels[noflevels] = {
- 1, -- toplevel
- stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block),
- li.references,
- allopen or opened[name] -- same as first entry
+ level = 1, -- toplevel
+ title = stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block),
+ reference = references,
+ opened = allopen or opened[name], -- same as first entry
+ realpage = references and references.realpage or 0, -- handy for later
+ usedpage = true,
}
end
blockdone = true
@@ -190,27 +179,36 @@ function bookmarks.place()
local title = titledata.bookmark
if not title or title == "" then
-- We could typeset the title and then convert it.
- if not structural then
- -- placeholder, todo: bookmarklabel
- title = name .. ": " .. (titledata.title or "?")
- else
+ -- if not structural then
+ -- title = titledata.title or "?")
+ -- else
title = titledata.title or "?"
- end
+ -- end
end
if numbered[name] then
local sectiondata = sections.collected[li.references.section]
local numberdata = li.numberdata
- if sectiondata and numberdata and not numberdata.hidenumber then
+ if sectiondata and numberdata then
+ if not numberdata.hidenumber then
-- we could typeset the number and convert it
- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title
+ local number = sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)
+ if number and #number > 0 then
+ title = concat(number) .. " " .. title
+ end
+ end
end
end
noflevels = noflevels + 1
+ local references = li.references
levels[noflevels] = {
- lastlevel,
- stripped(title), -- can be replaced by converter
- li.references, -- has internal and realpage
- allopen or opened[name]
+ level = lastlevel,
+ title = stripped(title), -- can be replaced by converter
+ reference = references, -- has internal and realpage
+ opened = allopen or opened[name],
+ realpage = references and references.realpage or 0, -- handy for later
+ usedpage = true,
+ structural = structural,
+ name = name,
}
end
end
@@ -222,47 +220,296 @@ function bookmarks.place()
end
function bookmarks.flatten(levels)
+ if not levels then
+ -- a plugin messed up
+ return { }
+ end
-- This function promotes leading structurelements with a higher level
-- to the next lower level. Such situations are the result of lack of
-- structure: a subject preceding a chapter in a sectionblock. So, the
-- following code runs over section blocks as well. (bookmarks-007.tex)
local noflevels = #levels
if noflevels > 1 then
- local skip, start, one = false, 1, levels[1]
- local first, block = one[1], one[3].block
+ local function showthem()
+ for i=1,noflevels do
+ local level = levels[i]
+ -- if level.structural then
+ -- report_bookmarks("%i > %s > %s",level.level,level.reference.block,level.title)
+ -- else
+ report_bookmarks("%i > %s > %s > %s",level.level,level.reference.block,level.name,level.title)
+ -- end
+ end
+ end
+ if trace_bookmarks then
+ report_bookmarks("checking structure")
+ showthem()
+ end
+ local skip = false
+ local done = 0
+ local start = 1
+ local one = levels[1]
+ local first = one.level
+ local block = one.reference.block
for i=2,noflevels do
- local li = levels[i]
- local new, newblock = li[1], li[3].block
+ local current = levels[i]
+ local new = current.level
+ local reference = current.reference
+ local newblock = type(reference) == "table" and current.reference.block or block
if newblock ~= block then
- first, block, start, skip = new, newblock, i, false
+ first = new
+ block = newblock
+ start = i
+ skip = false
elseif skip then
-- go on
elseif new > first then
skip = true
elseif new < first then
for j=start,i-1 do
- local lj = levels[j]
- local old = lj[1]
- lj[1] = new
+ local previous = levels[j]
+ local old = previous.level
+ previous.level = new
if trace_bookmarks then
- report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2])
+ report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,previous.title)
end
+ done = done + 1
end
skip = true
end
end
+ if trace_bookmarks then
+ if done > 0 then
+ report_bookmarks("%a entries promoted")
+ showthem()
+ else
+ report_bookmarks("nothing promoted")
+ end
+ end
+ end
+ return levels
+end
+
+local extras = { }
+local lists = { }
+local names = { }
+
+bookmarks.extras = extras
+
+local function cleanname(name)
+ return lower(file.basename(name))
+end
+
+function extras.register(name,levels)
+ if name and levels then
+ name = cleanname(name)
+ local found = names[name]
+ if found then
+ lists[found].levels = levels
+ else
+ lists[#lists+1] = {
+ name = name,
+ levels = levels,
+ }
+ names[name] = #lists
+ end
+ end
+end
+
+function extras.get(name)
+ if name then
+ local found = names[cleanname(name)]
+ if found then
+ return lists[found].levels
+ end
+ else
+ return lists
+ end
+end
+
+function extras.reset(name)
+ local l, n = { }, { }
+ if name then
+ name = cleanname(name)
+ for i=1,#lists do
+ local li = lists[i]
+ local ln = li.name
+ if name == ln then
+ -- skip
+ else
+ local m = #l + 1
+ l[m] = li
+ n[ln] = m
+ end
+ end
+ end
+ lists, names = l, n
+end
+
+local function checklists()
+ for i=1,#lists do
+ local levels = lists[i].levels
+ for j=1,#levels do
+ local entry = levels[j]
+ local pageindex = entry.pageindex
+ if pageindex then
+ entry.reference = figures.getrealpage(pageindex)
+ entry.pageindex = nil
+ end
+ end
end
end
+function extras.tosections(levels)
+ local sections = { }
+ local noflists = #lists
+ for i=1,noflists do
+ local levels = lists[i].levels
+ local data = { }
+ sections[i] = data
+ for j=1,#levels do
+ local entry = levels[j]
+ if entry.usedpage then
+ local section = entry.section
+ local d = data[section]
+ if d then
+ d[#d+1] = entry
+ else
+ data[section] = { entry }
+ end
+ end
+ end
+ end
+ return sections
+end
+
+function extras.mergesections(levels,sections)
+ if not sections or #sections == 0 then
+ return levels
+ elseif not levels then
+ return { }
+ else
+ local merge = { }
+ local noflists = #lists
+ if #levels == 0 then
+ local level = 0
+ local section = 0
+ for i=1,noflists do
+ local entries = sections[i][0]
+ if entries then
+ for i=1,#entries do
+ local entry = entries[i]
+ merge[#merge+1] = entry
+ entry.level = entry.level + level
+ end
+ end
+ end
+ else
+ for j=1,#levels do
+ local entry = levels[j]
+ merge[#merge+1] = entry
+ local section = entry.reference.section
+ local level = entry.level
+ entry.section = section -- for tracing
+ for i=1,noflists do
+ local entries = sections[i][section]
+ if entries then
+ for i=1,#entries do
+ local entry = entries[i]
+ merge[#merge+1] = entry
+ entry.level = entry.level + level
+ end
+ end
+ end
+ end
+ end
+ return merge
+ end
+end
+
+function bookmarks.merge(levels,mode)
+ return extras.mergesections(levels,extras.tosections())
+end
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+local bookmarkactions = sequencers.new {
+ arguments = "levels,method",
+ returnvalues = "levels",
+ results = "levels",
+}
+
+appendgroup(bookmarkactions,"before") -- user
+appendgroup(bookmarkactions,"system") -- private
+appendgroup(bookmarkactions,"after" ) -- user
+
+appendaction(bookmarkactions,"system",bookmarks.flatten)
+appendaction(bookmarkactions,"system",bookmarks.merge)
+
function bookmarks.finalize(levels)
- -- This function can be overloaded by an optional converter
- -- that uses nodes.toutf on a typeset stream. This is something
- -- that we will support when the main loop has become a coroutine.
- codeinjections.addbookmarks(levels,bookmarks.method)
+ local method = bookmarks.method or "internal"
+ checklists() -- so that plugins have the adapted page number
+ levels = bookmarkactions.runner(levels,method)
+ if levels and #levels > 0 then
+ -- normally this is not needed
+ local purged = { }
+ for i=1,#levels do
+ local l = levels[i]
+ if l.usedpage ~= false then
+ purged[#purged+1] = l
+ end
+ end
+ --
+ codeinjections.addbookmarks(purged,method)
+ else
+ -- maybe a plugin messed up
+ end
+end
+
+function bookmarks.installhandler(what,where,func)
+ if not func then
+ where, func = "after", where
+ end
+ if where == "before" or where == "after" then
+ sequencers.appendaction(bookmarkactions,where,func)
+ else
+ report_tex("installing bookmark %a handlers in %a is not possible",what,tostring(where))
+ end
end
-- interface
-commands.overloadbookmark = bookmarks.overload
-commands.registerbookmark = bookmarks.register
-commands.setupbookmarks = bookmarks.setup
+implement {
+ name = "setupbookmarks",
+ actions = bookmarks.setup,
+ arguments = {
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ { "showblocktitle" },
+ }
+ }
+}
+
+implement {
+ name = "registerbookmark",
+ actions = bookmarks.register,
+ arguments = {
+ {
+ { "names" },
+ { "opened" },
+ { "force" },
+ { "number" },
+ }
+ }
+}
+
+implement {
+ name = "overloadbookmark",
+ actions = bookmarks.overload,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/strc-bkm.mkiv b/tex/context/base/strc-bkm.mkiv
index 9d2ebd796..9688a1f93 100644
--- a/tex/context/base/strc-bkm.mkiv
+++ b/tex/context/base/strc-bkm.mkiv
@@ -74,7 +74,11 @@
\def\strc_bookmarks_bookmark_yes[#1]#2%
{\begingroup
\simplifycommands
- \ctxcommand{overloadbookmark("#1",\!!bs\detokenize\expandafter{\normalexpanded{#2}}\!!es)}%
+ \ifnum\thenamedheadlevel{#1}>\zerocount
+ \clf_overloadbookmark{#1}{\detokenize\expandafter{\normalexpanded{#2}}}%
+ \else
+ \strc_lists_write_to[#1][]{#2}{}% todo: a dedicated bookmark writer
+ \fi
\endgroup}
\def\strc_bookmarks_bookmark_nop[#1]#2%
@@ -106,27 +110,59 @@
\ifthirdargument
\setupcurrentbookmark[#3]% no every so not all possible
\else\ifsecondargument
- \doifassignmentelse{#2}{\let\m_bookmarks_opened\empty\setupcurrentbookmark[#2]}\donothing
+ \doifelseassignment{#2}{\let\m_bookmarks_opened\empty\setupcurrentbookmark[#2]}\donothing
\fi\fi
- \ctxcommand{registerbookmark {
- names = "\m_bookmarks_names",
- opened = "\m_bookmarks_opened",
- force = "\bookmarkparameter\c!force",
- number = "\bookmarkparameter\c!number",
- }}%
+ \clf_registerbookmark
+ names {\m_bookmarks_names}%
+ opened {\m_bookmarks_opened}%
+ force {\bookmarkparameter\c!force}%
+ number {\bookmarkparameter\c!number}%
+ \relax
\endgroup}
\appendtoks
- \ctxcommand{setupbookmarks {
- separatorset = "\bookmarkparameter\c!numberseparatorset",
- conversionset = "\bookmarkparameter\c!numberconversionset",
- starter = \!!bs\bookmarkparameter\c!numberstarter\!!es,
- stopper = \!!bs\bookmarkparameter\c!numberstopper\!!es,
- segments = "\bookmarkparameter\c!numbersegments",
- showblocktitle = "\bookmarkparameter\c!sectionblock",
- }}%
+ \clf_setupbookmarks
+ separatorset {\bookmarkparameter\c!numberseparatorset}%
+ conversionset {\bookmarkparameter\c!numberconversionset}%
+ starter {\bookmarkparameter\c!numberstarter}%
+ stopper {\bookmarkparameter\c!numberstopper}%
+ segments {\bookmarkparameter\c!numbersegments}%
+ showblocktitle {\bookmarkparameter\c!sectionblock}%
+ \relax
\to \everysetupbookmark
+%D There is a plugin mechanism but this is for experts only. The intermediate
+%D data structures are stable.
+%D
+%D \starttyping
+%D \startluacode
+%D structures.bookmarks.installhandler("check before","before",function(levels)
+%D logs.report("extra bookmarks","before (normal bookmarks)")
+%D inspect(levels)
+%D logs.report("extra bookmarks","before (extra bookmarks)")
+%D inspect(structures.bookmarks.extras.get())
+%D return levels
+%D end)
+%D structures.bookmarks.installhandler("check after", "after", function(levels)
+%D logs.report("extra bookmarks","after (merged bookmarks)")
+%D inspect(levels)
+%D return levels
+%D end)
+%D \stopluacode
+%D \starttyping
+%D
+%D This mechanism was added when bookmark inclusion became (optional) part of graphic
+%D inclusion (which is needed by Taco).
+%D
+%D \starttyping
+%D \getfiguredimensions[somefile.pdf]
+%D \dorecurse {\noffigurepages} {
+%D \startTEXpage
+%D \externalfigure[somefile.pdf][interaction=bookmark,page=\recurselevel]
+%D \stopTEXpage
+%D }
+%D \starttyping
+
\protect \endinput
% \starttext
diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua
index 935b6c061..0ababcfc0 100644
--- a/tex/context/base/strc-blk.lua
+++ b/tex/context/base/strc-blk.lua
@@ -16,6 +16,8 @@ local allocate = utilities.storage.allocate
local context = context
local commands = commands
+local implement = interfaces.implement
+
local structures = structures
structures.blocks = structures.blocks or { }
@@ -78,13 +80,19 @@ end
function blocks.select(state,name,tag,criterium)
criterium = criterium or "text"
- if find(tag,"=") then tag = "" end
- local names = settings_to_set(name)
- local all = tag == ""
- local tags = not all and settings_to_set(tag)
- local hide = state == "process"
- local n = sections.numberatdepth(criterium)
- local result = lists.filtercollected("all", criterium, n, collected, { })
+ if find(tag,"=",1,true) then
+ tag = ""
+ end
+ local names = settings_to_set(name)
+ local all = tag == ""
+ local tags = not all and settings_to_set(tag)
+ local hide = state == "process"
+ local result = lists.filter {
+ names = "all",
+ criterium = criterium,
+ number = sections.numberatdepth(criterium), -- not needed
+ collected = collected,
+ }
for i=1,#result do
local ri = result[i]
local metadata = ri.metadata
@@ -148,8 +156,7 @@ end
-- interface
-
-commands.definestructureblock = blocks.define
-commands.savestructureblock = blocks.save
-commands.selectstructureblock = blocks.select
-commands.setstructureblockstate = blocks.setstate
+implement { name = "definestructureblock", actions = blocks.define, arguments = "string" }
+implement { name = "savestructureblock", actions = blocks.save, arguments = { "string", "string" ,"string" } }
+implement { name = "selectstructureblock", actions = blocks.select, arguments = { "string", "string" ,"string", "string" } }
+implement { name = "setstructureblockstate", actions = blocks.setstate, arguments = { "string", "string" ,"string" } }
diff --git a/tex/context/base/strc-blk.mkiv b/tex/context/base/strc-blk.mkiv
index 1dd144aa9..fe259d223 100644
--- a/tex/context/base/strc-blk.mkiv
+++ b/tex/context/base/strc-blk.mkiv
@@ -33,7 +33,7 @@
\installcommandhandler \??block {block} \??block
\appendtoks
- \ctxcommand{definestructureblock("\currentblock")}%
+ \clf_definestructureblock{\currentblock}%
\setuevalue{\e!begin\currentblock}{\dodoubleempty\strc_blocks_begin[\currentblock]}%
\setuevalue{\e!end \currentblock}{}%
\to \everydefineblock
@@ -41,7 +41,8 @@
\unexpanded\def\strc_blocks_begin[#1][#2]%
{\normalexpanded{\buff_pickup{@block@}{\e!begin#1}{\e!end#1}}
{}% before
- {\ctxcommand{savestructureblock("#1","#2","@block@")}}}% after
+ {\clf_savestructureblock{#1}{#2}{@block@}}%
+ \plusone}% after
\let\strc_blocks_setup\relax
@@ -71,17 +72,17 @@
\egroup}
\def\strc_blocks_set_state[#1][#2][#3]% state name tag
- {\ctxcommand{setstructureblockstate("#1","#2","#3")}}
+ {\clf_setstructureblockstate{#1}{#2}{#3}}
\def\strc_blocks_select[#1][#2][#3][#4]% state name tag setups
{\bgroup
- \doifassignmentelse{#3}
+ \doifelseassignment{#3}
{\getparameters[\??blocktemp][\c!criterium=\v!text,#3]%
\def\strc_blocks_setup{\setupcurrentblock[#3]}%
- \ctxcommand{selectstructureblock("#1","#2","","\csname\??blocktemp\c!criterium\endcsname")}}
+ \clf_selectstructureblock{#1}{#2}{}{\csname\??blocktemp\c!criterium\endcsname}}
{\getparameters[\??blocktemp][\c!criterium=\v!text,#4]%
\def\strc_blocks_setup{\setupcurrentblock[#4]}%
- \ctxcommand{selectstructureblock("#1","#2","#3","\csname\??blocktemp\c!criterium\endcsname")}}%
+ \clf_selectstructureblock{#1}{#2}{#3}{\csname\??blocktemp\c!criterium\endcsname}}%
\egroup}
% hide : save, if [+] also hidden execute
diff --git a/tex/context/base/strc-con.mkvi b/tex/context/base/strc-con.mkvi
index 75519b8ce..11f6f758e 100644
--- a/tex/context/base/strc-con.mkvi
+++ b/tex/context/base/strc-con.mkvi
@@ -159,6 +159,7 @@
\unexpanded\def\strc_constructions_initialize#1% class instance
{\edef\currentconstruction{#1}%
+ \let\currentconstructionhash\??construction
\let\currentconstructionlistentry\!!zerocount
\expandafter\let\expandafter\currentconstructionmain \csname\??constructionmain \currentconstruction\endcsname
\expandafter\let\expandafter\currentconstructionlevel \csname\??constructionlevel\currentconstruction\endcsname
@@ -214,7 +215,7 @@
\constructionparameter\c!headcommand
{\strut
\constructionparameter\c!text
- \ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}}%
+ \clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax}%
\endgroup}
\unexpanded\def\strc_constructions_stored_start
@@ -262,10 +263,14 @@
\def\strc_constructions_ignore_head
{\constructionsheaddistance\zeropoint
- \constructionsheadwidth \zeropoint}
+ \constructionsheadwidth \zeropoint
+ % we also need to make sure that no stretch creeps in (new per 2015-02-02, for Alan)
+ \settrue\c_strc_constructions_distance_none}
+
+\let\currentconstructionhash\??construction
\unexpanded\setvalue{\??constructionstarthandler\v!construction}% this will be redone (reorganized) .. too much boxing
- {\dostarttagged\t!construction\currentconstruction
+ {\dostarttaggedchained\t!construction\currentconstruction\currentconstructionhash
\dotagsetconstruction
\constructionparameter\c!before
\begingroup
@@ -322,7 +327,7 @@
\else
\strc_constructions_preroll_head\currentconstructionsample
\ifzeropt\wd\constructionheadbox
- \strc_constructions_ignore_head
+ \strc_constructions_ignore_head
\else
\strc_constructions_set_width_and_distance
\fi
@@ -352,10 +357,6 @@
\ifx\p_strc_constructions_align\empty \else
\setupalign[\p_strc_constructions_align]% \use...
\fi
- \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}%
- \ifx\p_strc_constructions_indenting\empty \else
- \indenting[\p_strc_constructions_indenting]% \use...
- \fi
\ifcase\c_strc_constructions_nested_state
\c_strc_constructions_nested_state\plusone
\or
@@ -366,6 +367,11 @@
\edef\p_strc_constructions_headalign{\constructionparameter\c!headalign}%
%
\directsetup\p_strc_constructions_renderingsetup\relax
+ % moved to here 2014-07-03
+ \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}%
+ \ifx\p_strc_constructions_indenting\empty \else
+ \indenting[\p_strc_constructions_indenting]% \use...
+ \fi
%
\dostoptagged % tag
\dostarttagged\t!constructioncontent\empty
@@ -502,7 +508,7 @@
\setupalign[\p_strc_constructions_headalign]% use fast one
\fi
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox}%
-\setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
+ \setbox\constructionheadbox\hbox{\box\constructionheadbox}% needed in case of e.g. a real big head font, see descriptions-006.tex
\ht\constructionheadbox\strutht
\dp\constructionheadbox\strutdp}
@@ -526,9 +532,14 @@
% The setups. These only deal with placement of the descriptor and initializing the
% environment. The wrapping happens elsewhere.
+% todo: optimize the setups with
+%
+% \ifconditional\c_strc_constructions_distance_none : no need for skip
+% \ifzeropt\wd\constructionheadbox : no need for box and skips
+
\startsetups[\??constructionrenderings:\v!left]
\edef\p_strc_constructions_hang{\constructionparameter\c!hang}%
- \doifsetupselse{\??constructionrenderings:\v!left:\p_strc_constructions_hang} {
+ \doifelsesetups{\??constructionrenderings:\v!left:\p_strc_constructions_hang} {
\directsetup{\??constructionrenderings:\v!left:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!left:\v!hanging}
@@ -537,7 +548,7 @@
\startsetups[\??constructionrenderings:\v!right]
\edef\p_strc_constructions_hang{\constructionparameter\c!hang}
- \doifsetupselse{\??constructionrenderings:\v!right:\p_strc_constructions_hang} {
+ \doifelsesetups{\??constructionrenderings:\v!right:\p_strc_constructions_hang} {
\directsetup{\??constructionrenderings:\v!right:\p_strc_constructions_hang}
} {
\directsetup{\??constructionrenderings:\v!right:\v!hanging}
@@ -556,6 +567,7 @@
\copy\constructionheadbox\hss
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -580,6 +592,7 @@
\copy\constructionheadbox
\hskip\rightconstructionskip
}
+ \nobreak
\advance\rightskip \constructionsheaddistance
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -634,6 +647,7 @@
\box\constructionheadbox
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -653,6 +667,7 @@
}
}
}
+ \nobreak
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
\stopsetups
@@ -730,7 +745,7 @@
\startsetups[\??constructionrenderings:\v!serried]
\edef\p_strc_constructions_width{\constructionparameter\c!width}% CHECK ! ! ! wrong parameter namespace
- \doifsetupselse{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
+ \doifelsesetups{\??constructionrenderings:\v!serried:\p_strc_constructions_width} {
\directsetup{\??constructionrenderings:\v!serried:\p_strc_constructions_width}
} {
\directsetup{\??constructionrenderings:\v!serried:\v!wide}
@@ -741,7 +756,7 @@
\let\\=\crlf
\noindent
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
- \penalty\plustenthousand % new
+ \nobreak
\hskip\constructionsheaddistance\relax
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -752,8 +767,8 @@
\noindent
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox % why copy? leftover?
\ifconditional\c_strc_constructions_distance_none \else
- \penalty\plustenthousand % new
- \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
+ \nobreak
+ \hskip\constructionsheaddistance \!!plus .5\constructionsheaddistance \!!minus .25\constructionsheaddistance\relax
\fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -770,6 +785,7 @@
\ifhbox\constructionheadbox\unhcopy\else\copy\fi\constructionheadbox
\hss
}
+ \nobreak
\hskip\constructionsheaddistance\relax
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -845,7 +861,7 @@
\let\currentconstructionlistentry\!!zerocount
\def\strc_constructions_register
- {\ctxcommand{doiflisthasentry(\currentconstructionlistentry)}%
+ {\clf_doifelselisthasentry\numexpr\currentconstructionlistentry\relax
\strc_constructions_register_nop
\strc_constructions_register_yes}
@@ -858,7 +874,7 @@
\def\strc_constructions_discard
{\iftrialtypesetting
% \writestatus{constructions}{discarding \currentconstruction: \number\currentconstructionlistentry}%
- \ctxcommand{discardfromlist(\currentconstructionlistentry)}%
+ \clf_discardfromlist\currentconstructionlistentry\relax
\fi}
\let\currentconstructionlistnumber \!!zerocount
@@ -928,62 +944,67 @@
\else
\setnextinternalreferences{construction}\currentconstructionmain % plural
\relax
- \scratchcounter\ctxcommand{addtolist{ % we can set a counter at the lua end
- metadata = {
- kind = "construction",
- name = "\currentconstructionmain",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- % \currentdirectionparameters
- },
- references = {
- internal = \nextinternalreference,
- order = \nextinternalorderreference,
- reference = "\currentconstructionreference",
- referenceprefix = "\referenceprefix",
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- titledata = {
- label = \!!bs\detokenize\expandafter{\currentconstructionlabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentconstructiontitle }\!!es,
+ \scratchcounter\clf_addtolist
+ metadata {
+ kind {construction}
+ name {\currentconstructionmain}
+ % level structures.sections.currentlevel()
+ catcodes \catcodetable
+ % \currentdirectionparameters
+ }
+ references {
+ internal \nextinternalreference
+ order \nextinternalorderreference
+ reference {\currentconstructionreference}
+ prefix {\referenceprefix}
+ % block {\currentsectionblock}
+ % section structures.sections.currentid(),
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentconstructionlabel}}
+ title {\detokenize\expandafter{\currentconstructiontitle}}
\ifx\currentconstructionbookmark\currentconstructiontitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentconstructionbookmark}\!!es,
+ bookmark {\detokenize\expandafter{\currentconstructionbookmark}}
\fi
\ifx\currentconstructionlist\currentconstructiontitle \else
- list = \!!bs\detokenize\expandafter{\currentconstructionlist }\!!es,
+ list {\detokenize\expandafter{\currentconstructionlist}}
\fi
- },
+ }
\ifconditional\c_strc_constructions_number_state
- prefixdata = {
- prefix = "\constructionparameter\c!prefix",
- separatorset = "\constructionparameter\c!prefixseparatorset",
- conversion = \!!bs\constructionparameter\c!prefixconversion\!!es,
- conversionset = "\constructionparameter\c!prefixconversionset",
- set = "\constructionparameter\c!prefixset",
- segments = "\constructionparameter\c!prefixsegments",
- connector = \!!bs\constructionparameter\c!prefixconnector\!!es,
- },
- numberdata = {
- numbers = structures.counters.compact("\currentconstructionnumber",nil,true), % ! number can be cloned
- separatorset = "\constructionparameter\c!numberseparatorset",
- conversion = "\constructionparameter\c!numberconversion",
- conversionset = "\constructionparameter\c!numberconversionset",
- starter = \!!bs\constructionparameter\c!numberstarter\!!es,
- stopper = \!!bs\constructionparameter\c!numberstopper\!!es,
- segments = "\constructionparameter\c!numbersegments",
- },
+ prefixdata {
+ prefix {\constructionparameter\c!prefix}
+ separatorset {\constructionparameter\c!prefixseparatorset}
+ conversion {\constructionparameter\c!prefixconversion}
+ conversionset {\constructionparameter\c!prefixconversionset}
+ set {\constructionparameter\c!prefixset}
+ segments {\constructionparameter\c!prefixsegments}
+ connector {\constructionparameter\c!prefixconnector}
+ }
+ numberdata {
+ numbers {\currentconstructionnumber}
+ separatorset {\constructionparameter\c!numberseparatorset}
+ conversion {\constructionparameter\c!numberconversion}
+ conversionset {\constructionparameter\c!numberconversionset}
+ starter {\constructionparameter\c!numberstarter}
+ stopper {\constructionparameter\c!numberstopper}
+ segments {\constructionparameter\c!numbersegments}
+ }
\or
% symbol
\fi
- userdata = \!!bs\detokenize{#2}\!!es % will be converted to table at the lua end
- }
- }\relax
+ userdata {\detokenize{#2}}
+ \relax
% \writestatus{constructions}{registering \currentconstruction: \number\scratchcounter}%
+ \clf_setinternalreference
+ prefix {\referenceprefix}%
+ reference {\currentconstructionreference}%
+ internal \nextinternalreference
+ view {\interactionparameter\c!focus}%
+ \relax
\normalexpanded{%
\endgroup
\edef\noexpand\currentconstructionlistentry {\the\scratchcounter}%
- \edef\noexpand\currentconstructionattribute {\ctxcommand {setinternalreference("\referenceprefix","\currentconstructionreference",\nextinternalreference,"\interactionparameter\c!focus")}}%
+ \edef\noexpand\currentconstructionattribute {\the\lastdestinationattribute}%
\edef\noexpand\currentconstructionsynchronize{\ctxlatecommand{enhancelist(\the\scratchcounter)}}%
}%
\fi}
@@ -993,7 +1014,7 @@
% macros.
\def\reinstateconstructionnumberentry#1% was xdef
- {\edef\currentconstructionattribute {\ctxcommand {getinternalreference(#1)}}%
+ {\edef\currentconstructionattribute {\clf_getinternallistreference#1}%
\edef\currentconstructionsynchronize{\ctxlatecommand{enhancelist(#1)}}}
\installstructurelistprocessor{construction}{\usestructurelistprocessor{number+title}}
diff --git a/tex/context/base/strc-def.mkiv b/tex/context/base/strc-def.mkiv
index 0738bdf29..b4d2a5fea 100644
--- a/tex/context/base/strc-def.mkiv
+++ b/tex/context/base/strc-def.mkiv
@@ -32,6 +32,7 @@
\defineresetset [\s!default] [] [1] % each level
\defineprefixset [\s!default] [section-1,section-2,section-3] []
+\defineconversionset [\v!number] [] [numbers]
\defineconversionset [\v!pagenumber] [] [numbers]
\defineprefixset [\v!all] [section-1,section-2,section-3,section-4,section-5,section-6,section-7,section-8] []
@@ -47,6 +48,9 @@
\setupuserpagenumber
[\c!numberconversionset=\v!pagenumber]
+\setupcounters
+ [\c!numberconversionset=\v!number]
+
% \startsetups defaults:frontpart:pagenumbers:roman
% \defineconversionset[\c!frontpart:\c!pagenumber][][romannumerals]
% \setupuserpagenumber[\c!way=\v!by\v!block]
@@ -220,19 +224,19 @@
[\c!before={\blank[\v!preference,\v!big]}, % sort of mkii compatible, watch columns
\c!after=\blank,
\c!label=\v!yes,
- \c!distance=1em]
+ \c!distance=\emwidth]
\setuplist
[\v!chapter]
[\c!before={\blank[\v!preference,\v!big]}, % sort of mkii compatible, watch columns
\c!after=]
-\setuplist [\v!part] [\c!width=0em]
-\setuplist [\v!chapter] [\c!width=2em]
-\setuplist [\v!section] [\c!width=3em]
-\setuplist [\v!subsection] [\c!width=4em]
-\setuplist [\v!subsubsection] [\c!width=5em]
-\setuplist [\v!subsubsubsection] [\c!width=6em]
-\setuplist [\v!subsubsubsubsection] [\c!width=7em]
+\setuplist [\v!part] [\c!width=0\emwidth]
+\setuplist [\v!chapter] [\c!width=2\emwidth]
+\setuplist [\v!section] [\c!width=3\emwidth]
+\setuplist [\v!subsection] [\c!width=4\emwidth]
+\setuplist [\v!subsubsection] [\c!width=5\emwidth]
+\setuplist [\v!subsubsubsection] [\c!width=6\emwidth]
+\setuplist [\v!subsubsubsubsection] [\c!width=7\emwidth]
\protect \endinput
diff --git a/tex/context/base/strc-des.mkvi b/tex/context/base/strc-des.mkvi
index 9c4d3fc6d..3557000f9 100644
--- a/tex/context/base/strc-des.mkvi
+++ b/tex/context/base/strc-des.mkvi
@@ -76,6 +76,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!description}%
{\let\currentdescription \currentconstruction
\let\constructionparameter \descriptionparameter
+ \let\constructionnamespace \??description
\let\detokenizedconstructionparameter\detokenizeddescriptionparameter
\let\letconstructionparameter \letdescriptionparameter
\let\useconstructionstyleandcolor \usedescriptionstyleandcolor
@@ -102,10 +103,10 @@
\unexpanded\def\strc_descriptions_start#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_start_yes\strc_descriptions_start_nop}
+ \doifelsenextoptionalcs\strc_descriptions_start_yes\strc_descriptions_start_nop}
\unexpanded\def\strc_descriptions_start_yes[#1]%
- {\doifassignmentelse{#1}\strc_descriptions_start_yes_assignment\strc_descriptions_start_yes_reference[#1]}
+ {\doifelseassignment{#1}\strc_descriptions_start_yes_assignment\strc_descriptions_start_yes_reference[#1]}
\unexpanded\def\strc_descriptions_start_yes_assignment[#1]% todo userdata
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#1][]%
@@ -119,7 +120,7 @@
\fi}
\unexpanded\def\strc_descriptions_start_yes_titled[#1]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_start_yes_titled_indeed[#1]}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_start_yes_normal[#1]}}
@@ -140,7 +141,7 @@
\fi}
\unexpanded\def\strc_descriptions_start_nop_titled
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_start_nop_titled_indeed}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_start_nop_normal}}%
@@ -162,7 +163,7 @@
\unexpanded\def\strc_descriptions_command#1%
{\begingroup
\strc_constructions_initialize{#1}%
- \doifnextoptionalelse\strc_descriptions_yes\strc_descriptions_nop}
+ \doifelsenextoptionalcs\strc_descriptions_yes\strc_descriptions_nop}
\unexpanded\def\strc_descriptions_yes
{\ifconditional\c_strc_constructions_title_state
@@ -176,7 +177,7 @@
\csname\??constructioncommandhandler\currentconstructionhandler\endcsname}
\unexpanded\def\strc_descriptions_yes_titled[#1]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_yes_titled_indeed[#1]}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_yes_normal[#1]}}
@@ -197,7 +198,7 @@
\fi}
\unexpanded\def\strc_descriptions_nop_titled
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_descriptions_nop_titled_indeed}%
{\setfalse\c_strc_constructions_title_state
\strc_descriptions_nop_normal}}
diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua
index e3cbb02ed..029d68a9d 100644
--- a/tex/context/base/strc-doc.lua
+++ b/tex/context/base/strc-doc.lua
@@ -17,15 +17,18 @@ if not modules then modules = { } end modules ['strc-doc'] = {
local next, type, tonumber, select = next, type, tonumber, select
local format, gsub, find, gmatch, match = string.format, string.gsub, string.find, string.gmatch, string.match
-local concat, fastcopy = table.concat, table.fastcopy
+local concat, fastcopy, insert, remove = table.concat, table.fastcopy, table.insert, table.remove
local max, min = math.max, math.min
local allocate, mark, accesstable = utilities.storage.allocate, utilities.storage.mark, utilities.tables.accesstable
local setmetatableindex = table.setmetatableindex
+local lpegmatch, P, C = lpeg.match, lpeg.P, lpeg.C
local catcodenumbers = catcodes.numbers
local ctxcatcodes = catcodenumbers.ctxcatcodes
local variables = interfaces.variables
+local implement = interfaces.implement
+
local v_last = variables.last
local v_first = variables.first
local v_previous = variables.previous
@@ -59,8 +62,14 @@ local startapplyprocessor = processors.startapply
local stopapplyprocessor = processors.stopapply
local strippedprocessor = processors.stripped
+local convertnumber = converters.convert
+
local a_internal = attributes.private('internal')
+local ctx_convertnumber = context.convertnumber
+local ctx_sprint = context.sprint
+local ctx_finalizeauto = context.finalizeautostructurelevel
+
-- -- -- document -- -- --
local data -- the current state
@@ -124,28 +133,48 @@ local registered = sections.registered
storage.register("structures/sections/registered", registered, "structures.sections.registered")
+local function update(name,level,section)
+ for k, v in next, registered do
+ if k ~= name and v.coupling == name then
+ report_structure("updating section level %a to level of %a",k,name)
+ context.doredefinehead(k,name)
+ update(k,level,section)
+ end
+ end
+end
+
function sections.register(name,specification)
registered[name] = specification
+ local level = specification.level
+ local section = specification.section
+ update(name,level,section)
end
function sections.currentid()
return #tobesaved
end
+local lastsaved = 0
+
function sections.save(sectiondata)
-- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier
local numberdata = sectiondata.numberdata
local ntobesaved = #tobesaved
if not numberdata or sectiondata.metadata.nolist then
- return ntobesaved
+ -- stay
else
ntobesaved = ntobesaved + 1
tobesaved[ntobesaved] = numberdata
if not collected[ntobesaved] then
collected[ntobesaved] = numberdata
end
- return ntobesaved
end
+ lastsaved = ntobesaved
+ return ntobesaved
+end
+
+function sections.currentsectionindex()
+ return lastsaved -- only for special controlled situations
end
function sections.load()
@@ -210,7 +239,7 @@ end
function sections.pushblock(name,settings)
counters.check(0) -- we assume sane usage of \page between blocks
local block = name or data.block
- data.blocks[#data.blocks+1] = block
+ insert(data.blocks,block)
data.block = block
sectionblockdata[block] = settings
documents.reset()
@@ -218,17 +247,18 @@ function sections.pushblock(name,settings)
end
function sections.popblock()
- data.blocks[#data.blocks] = nil
- local block = data.blocks[#data.blocks] or data.block
+ local block = remove(data.blocks) or data.block
data.block = block
documents.reset()
return block
end
-function sections.currentblock()
+local function getcurrentblock()
return data.block or data.blocks[#data.blocks] or "unknown"
end
+sections.currentblock = getcurrentblock
+
function sections.currentlevel()
return data.depth
end
@@ -239,18 +269,36 @@ end
local saveset = { } -- experiment, see sections/tricky-001.tex
-function sections.somelevel(given)
+function sections.setentry(given)
-- old number
local numbers = data.numbers
+ --
+ local metadata = given.metadata
+ local numberdata = given.numberdata
+ local references = given.references
+ local directives = given.directives
+ local userdata = given.userdata
+
+ if not metadata then
+ metadata = { }
+ given.metadata = metadata
+ end
+ if not numberdata then
+ numberdata = { }
+ given.numberdata = numberdata
+ end
+ if not references then
+ references = { }
+ given.references = references
+ end
local ownnumbers = data.ownnumbers
local forced = data.forced
local status = data.status
local olddepth = data.depth
- local givenname = given.metadata.name
+ local givenname = metadata.name
local mappedlevel = levelmap[givenname]
local newdepth = tonumber(mappedlevel or (olddepth > 0 and olddepth) or 1) -- hm, levelmap only works for section-*
- local directives = given.directives
local resetset = directives and directives.resetset or ""
-- local resetter = sets.getall("structure:resets",data.block,resetset)
-- a trick to permit userdata to overload title, ownnumber and reference
@@ -260,14 +308,13 @@ function sections.somelevel(given)
report_structure("name %a, mapped level %a, old depth %a, new depth %a, reset set %a",
givenname,mappedlevel,olddepth,newdepth,resetset)
end
- local u = given.userdata
- if u then
- -- kind of obsolete as we can pass them directly anyway
- if u.reference and u.reference ~= "" then given.metadata.reference = u.reference ; u.reference = nil end
- if u.ownnumber and u.ownnumber ~= "" then given.numberdata.ownnumber = u.ownnumber ; u.ownnumber = nil end
- if u.title and u.title ~= "" then given.titledata.title = u.title ; u.title = nil end
- if u.bookmark and u.bookmark ~= "" then given.titledata.bookmark = u.bookmark ; u.bookmark = nil end
- if u.label and u.label ~= "" then given.titledata.label = u.label ; u.label = nil end
+ if userdata then
+ -- kind of obsolete as we can pass them directly anyway ... NEEDS CHECKING !
+ if userdata.reference and userdata.reference ~= "" then given.metadata.reference = userdata.reference ; userdata.reference = nil end
+ if userdata.ownnumber and userdata.ownnumber ~= "" then given.numberdata.ownnumber = userdata.ownnumber ; userdata.ownnumber = nil end
+ if userdata.title and userdata.title ~= "" then given.titledata.title = userdata.title ; userdata.title = nil end
+ if userdata.bookmark and userdata.bookmark ~= "" then given.titledata.bookmark = userdata.bookmark ; userdata.bookmark = nil end
+ if userdata.label and userdata.label ~= "" then given.titledata.label = userdata.label ; userdata.label = nil end
end
-- so far for the trick
if saveset then
@@ -305,12 +352,12 @@ function sections.somelevel(given)
end
end
counters.check(newdepth)
- ownnumbers[newdepth] = given.numberdata.ownnumber or ""
- given.numberdata.ownnumber = nil
+ ownnumbers[newdepth] = numberdata.ownnumber or ""
+ numberdata.ownnumber = nil
data.depth = newdepth
-- new number
olddepth = newdepth
- if given.metadata.increment then
+ if metadata.increment then
local oldn, newn = numbers[newdepth] or 0, 0
local fd = forced[newdepth]
if fd then
@@ -340,40 +387,31 @@ function sections.somelevel(given)
v[2](k)
end
end
- local numberdata= given.numberdata
- if not numberdata then
- -- probably simplified to nothing
- numberdata = { }
- given.numberdata = numberdata
- end
-
local n = { }
for i=1,newdepth do
n[i] = numbers[i]
end
numberdata.numbers = n
--- numberdata.numbers = fastcopy(numbers)
-
+ if not numberdata.block then
+ numberdata.block = getcurrentblock() -- also in references
+ end
if #ownnumbers > 0 then
numberdata.ownnumbers = fastcopy(ownnumbers)
end
if trace_detail then
report_structure("name %a, numbers % a, own numbers % a",givenname,numberdata.numbers,numberdata.ownnumbers)
end
-
- local metadata = given.metadata
- local references = given.references
-
+ if not references.block then
+ references.block = getcurrentblock() -- also in numberdata
+ end
local tag = references.tag or tags.getid(metadata.kind,metadata.name)
if tag and tag ~= "" and tag ~= "?" then
references.tag = tag
end
-
local setcomponent = structures.references.setcomponent
if setcomponent then
setcomponent(given) -- might move to the tex end
end
-
references.section = sections.save(given)
-- given.numberdata = nil
end
@@ -456,7 +494,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local data = data.status[depth]
local d
if data then
- if find(key,"%.") then
+ if find(key,".",1,true) then
d = accesstable(key,data)
else
d = data.titledata
@@ -468,7 +506,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
local metadata = data.metadata
local catcodes = metadata and metadata.catcodes
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -477,7 +515,7 @@ function sections.structuredata(depth,key,default,honorcatcodetable) -- todo: sp
else
local catcodes = catcodenumbers[honorcatcodetable]
if catcodes then
- context.sprint(catcodes,d)
+ ctx_sprint(catcodes,d)
else
context(d)
end
@@ -512,16 +550,18 @@ function sections.current()
return data.status[data.depth]
end
-function sections.depthnumber(n)
+local function depthnumber(n)
local depth = data.depth
if not n or n == 0 then
n = depth
elseif n < 0 then
n = depth + n
end
- return context(data.numbers[n] or 0)
+ return data.numbers[n] or 0
end
+sections.depthnumber = depthnumber
+
function sections.autodepth(numbers)
for i=#numbers,1,-1 do
if numbers[i] ~= 0 then
@@ -547,10 +587,9 @@ end
-- sign=positive => also zero
-- sign=hang => llap sign
---~ todo: test this
---~
+-- this can be a local function
-local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+local function process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
-- todo: too much (100 steps)
local number = numbers and (numbers[index] or 0)
local ownnumber = ownnumbers and ownnumbers[index] or ""
@@ -571,20 +610,20 @@ local function process(index,numbers,ownnumbers,criterium,separatorset,conversio
if ownnumber ~= "" then
result[#result+1] = ownnumber
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups) .. inherited!
- result[#result+1] = converters.convert(conversion,number)
+ result[#result+1] = convertnumber(conversion,number,language)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
- result[#result+1] = converters.convert(theconversion,number)
+ result[#result+1] = convertnumber(theconversion,number,language)
end
else
if ownnumber ~= "" then
applyprocessor(ownnumber)
elseif conversion and conversion ~= "" then -- traditional (e.g. used in itemgroups)
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
local theconversion = sets.get("structure:conversions",block,conversionset,index,"numbers")
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "numbers",number)
+ ctx_convertnumber(data or "numbers",number)
stopapplyprocessor()
end
end
@@ -606,6 +645,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local set = ""
local segments = ""
local criterium = ""
+ local language = ""
for d=1,select("#",...) do
local data = select(d,...) -- can be multiple parametersets
if data then
@@ -619,6 +659,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if set == "" then set = data.set or "" end
if segments == "" then segments = data.segments or "" end
if criterium == "" then criterium = data.criterium or "" end
+ if language == "" then language = data.language or "" end
end
end
if separatorset == "" then separatorset = "default" end
@@ -630,6 +671,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if connector == "" then connector = nil end
if set == "" then set = "default" end
if segments == "" then segments = nil end
+ if language == "" then language = nil end
--
if criterium == v_strict then
criterium = 0
@@ -641,10 +683,10 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
criterium = 0
end
--
- local firstprefix, lastprefix = 0, 16
+ local firstprefix, lastprefix = 0, 16 -- too much, could max found level
if segments then
local f, l = match(tostring(segments),"^(.-):(.+)$")
- if l == "*" then
+ if l == "*" or l == v_all then
l = 100 -- new
end
if f and l then
@@ -678,7 +720,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
applyprocessor(starter)
end
end
- if prefixlist and (kind == 'section' or kind == 'prefix' or kind == 'direct') then
+ if prefixlist and (kind == "section" or kind == "prefix" or kind == "direct") then
-- find valid set (problem: for sectionnumber we should pass the level)
-- no holes
local b, e, bb, ee = 1, #prefixlist, 0, 0
@@ -722,15 +764,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
local prefix = prefixlist[k]
local index = sections.getlevel(prefix) or k
if index >= firstprefix and index <= lastprefix then
- -- process(index,result)
- preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
end
end
else
-- also holes check
for index=firstprefix,lastprefix do
- -- process(index,result)
- preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,index,entry,result,preceding,done)
+ preceding, done = process(index,numbers,ownnumbers,criterium,separatorset,conversion,conversionset,entry,result,preceding,done,language)
end
end
--
@@ -746,7 +786,7 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if result then
result[#result+1] = strippedprocessor(groupsuffix)
else
- applyprocessor(groupsuffix)
+ applyprocessor(groupsuffix)
end
end
if stopper then
@@ -891,34 +931,21 @@ end
function sections.getnumber(depth,what) -- redefined here
local sectiondata = sections.findnumber(depth,what)
- context((sectiondata and sectiondata.numbers[depth]) or 0)
+ local askednumber = 0
+ if sectiondata then
+ local numbers = sectiondata.numbers
+ if numbers then
+ askednumber = numbers[depth] or 0
+ end
+ end
+ context(askednumber)
end
-- experimental
local levels = { }
---~ function commands.autonextstructurelevel(level)
---~ if level > #levels then
---~ for i=#levels+1,level do
---~ levels[i] = ""
---~ end
---~ end
---~ local finish = concat(levels,"\n",level) or ""
---~ for i=level+1,#levels do
---~ levels[i] = ""
---~ end
---~ levels[level] = [[\finalizeautostructurelevel]]
---~ context(finish)
---~ end
-
---~ function commands.autofinishstructurelevels()
---~ local finish = concat(levels,"\n") or ""
---~ levels = { }
---~ context(finish)
---~ end
-
-function commands.autonextstructurelevel(level)
+local function autonextstructurelevel(level)
if level > #levels then
for i=#levels+1,level do
levels[i] = false
@@ -926,7 +953,7 @@ function commands.autonextstructurelevel(level)
else
for i=level,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
levels[i] = false
end
end
@@ -934,39 +961,141 @@ function commands.autonextstructurelevel(level)
levels[level] = true
end
-function commands.autofinishstructurelevels()
+local function autofinishstructurelevels()
for i=1,#levels do
if levels[i] then
- context.finalizeautostructurelevel()
+ ctx_finalizeauto()
end
end
levels = { }
end
--- interface (some are actually already commands, like sections.fullnumber)
+implement {
+ name = "autonextstructurelevel",
+ actions = autonextstructurelevel,
+ arguments = "integer",
+}
-commands.structurenumber = function() sections.fullnumber() end
-commands.structuretitle = function() sections.title () end
+implement {
+ name = "autofinishstructurelevels",
+ actions = autofinishstructurelevels,
+}
-commands.structurevariable = function(name) sections.structuredata(nil,name) end
-commands.structureuservariable = function(name) sections.userdata (nil,name) end
-commands.structurecatcodedget = function(name) sections.structuredata(nil,name,nil,true) end
-commands.structuregivencatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
-commands.structureautocatcodedget = function(name,catcode) sections.structuredata(nil,name,nil,catcode) end
+-- interface (some are actually already commands, like sections.fullnumber)
-commands.namedstructurevariable = function(depth,name) sections.structuredata(depth,name) end
-commands.namedstructureuservariable = function(depth,name) sections.userdata (depth,name) end
+implement {
+ name = "depthnumber",
+ actions = { depthnumber, context },
+ arguments = "integer",
+}
---
+implement { name = "structurenumber", actions = sections.fullnumber }
+implement { name = "structuretitle", actions = sections.title }
+
+implement { name = "structurevariable", actions = sections.structuredata, arguments = { false, "string" } }
+implement { name = "structureuservariable", actions = sections.userdata, arguments = { false, "string" } }
+implement { name = "structurecatcodedget", actions = sections.structuredata, arguments = { false, "string", false, true } }
+implement { name = "structuregivencatcodedget", actions = sections.structuredata, arguments = { false, "string", false, "integer" } }
+implement { name = "structureautocatcodedget", actions = sections.structuredata, arguments = { false, "string", false, "string" } }
+
+implement { name = "namedstructurevariable", actions = sections.structuredata, arguments = { "string", "string" } }
+implement { name = "namedstructureuservariable", actions = sections.userdata, arguments = { "string", "string" } }
+
+implement { name = "setstructurelevel", actions = sections.setlevel, arguments = { "string", "string" } }
+implement { name = "getstructurelevel", actions = sections.getcurrentlevel, arguments = { "string" } }
+implement { name = "setstructurenumber", actions = sections.setnumber, arguments = { "integer", "string" } }
+implement { name = "getstructurenumber", actions = sections.getnumber, arguments = { "integer" } }
+implement { name = "getsomestructurenumber", actions = sections.getnumber, arguments = { "integer", "string" } }
+implement { name = "getfullstructurenumber", actions = sections.fullnumber, arguments = { "integer" } }
+implement { name = "getsomefullstructurenumber", actions = sections.fullnumber, arguments = { "integer", "string" } }
+implement { name = "getspecificstructuretitle", actions = sections.structuredata, arguments = { "string", "'titledata.title'",false,"string" } }
+
+implement { name = "reportstructure", actions = sections.reportstructure }
+
+implement {
+ name = "registersection",
+ actions = sections.register,
+ arguments = {
+ "string",
+ {
+ { "coupling" },
+ { "section" },
+ { "level", "integer" },
+ { "parent" },
+ }
+ }
+}
-commands.setsectionblock = sections.setblock
-commands.pushsectionblock = sections.pushblock
-commands.popsectionblock = sections.popblock
+implement {
+ name = "setsectionentry",
+ actions = sections.setentry,
+ arguments = {
+ {
+ { "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "backreference" },
+ { "prefix" },
+ { "reference" },
+ }
+ },
+ { "directives", {
+ { "resetset" }
+ }
+ },
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" },
+ { "xmlsetup" },
+ { "nolist", "boolean" },
+ { "increment" },
+ }
+ },
+ { "titledata", {
+ { "label" },
+ { "title" },
+ { "bookmark" },
+ { "marking" },
+ { "list" },
+ }
+ },
+ { "numberdata", {
+ { "block" },
+ { "hidenumber", "boolean" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "ownnumber" },
+ { "language" },
+ },
+ },
+ { "userdata" },
+ }
+ }
+}
---
+-- os.exit()
-local byway = "^" .. v_by -- ugly but downward compatible
+implement {
+ name = "setsectionblock",
+ actions = sections.setblock,
+ arguments = { "string", { { "bookmark" } } }
+}
-function commands.way(way)
- context((gsub(way,byway,"")))
-end
+implement {
+ name = "pushsectionblock",
+ actions = sections.pushblock,
+ arguments = { "string", { { "bookmark" } } }
+}
+
+implement {
+ name = "popsectionblock",
+ actions = sections.popblock,
+}
diff --git a/tex/context/base/strc-doc.mkiv b/tex/context/base/strc-doc.mkiv
index c8dfae1e4..c453f199e 100644
--- a/tex/context/base/strc-doc.mkiv
+++ b/tex/context/base/strc-doc.mkiv
@@ -20,7 +20,13 @@
%D This will move:
\unexpanded\def\setstructuresynchronization#1% todo: use ctxcontext
- {\xdef\currentstructureattribute {\ctxlua {tex.write(structures.references.setinternalreference("\currentstructurereferenceprefix","\currentstructurereference",\nextinternalreference,"\interactionparameter\c!focus"))}}%
- \xdef\currentstructuresynchronize{\ctxlatecommand{enhancelist(#1)}}}
+ {\clf_setinternalreference
+ prefix {\currentstructurereferenceprefix}%
+ reference {\currentstructurereference}
+ internal \nextinternalreference
+ view {\interactionparameter\c!focus}%
+ \relax
+ \xdef\currentstructureattribute {\the\lastdestinationattribute}%
+ \xdef\currentstructuresynchronize{\strc_lists_inject_enhance{#1}{\nextinternalreference}}}
\protect \endinput
diff --git a/tex/context/base/strc-enu.mkvi b/tex/context/base/strc-enu.mkvi
index e369bc2e1..b76bc0067 100644
--- a/tex/context/base/strc-enu.mkvi
+++ b/tex/context/base/strc-enu.mkvi
@@ -172,7 +172,7 @@
\ifx\p_counter\empty %
\let\p_counter\currentenumeration
\fi
- \doifcounterelse\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_enumerations_define_counter\p_counter}%
\letenumerationparameter\s!counter\p_counter
%\strc_enumerations_setup_counter\currentenumeration
\to \everydefineenumeration
@@ -183,6 +183,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!enumeration}%
{\let\currentenumeration \currentconstruction
\let\constructionparameter \enumerationparameter
+ \let\constructionnamespace \??enumeration
\let\detokenizedconstructionparameter\detokenizedenumerationparameter
\let\letconstructionparameter \letenumerationparameter
\let\useconstructionstyleandcolor \useenumerationstyleandcolor
@@ -283,7 +284,7 @@
\unexpanded\def\strc_enumerations_inject_extra_text
{\ifconditional\c_strc_constructions_title_state
- \ctxcommand{doiflisthastitleelse("\currentconstructionmain",\currentconstructionlistentry)}
+ \clf_doifelselisthastitle{\currentconstructionmain}\numexpr\currentconstructionlistentry\relax
\donothing
\strc_enumerations_inject_extra_text_indeed
\fi}
@@ -300,7 +301,7 @@
\useconstructionstyleandcolor\c!titlestyle\c!titlecolor
\constructionparameter\c!titlecommand
{\constructionparameter\c!titleleft
- \ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}%
+ \clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax
\constructionparameter\c!titleright}%
\endgroup}
@@ -312,7 +313,7 @@
\unexpanded\def\strc_enumerations_inject_number
{\constructionparameter\c!left
\constructionparameter\c!starter
- \ctxcommand{savedlistprefixednumber("\currentconstructionmain",\currentconstructionlistentry)}%
+ \clf_savedlistprefixednumber{\currentconstructionmain}\currentconstructionlistentry\relax
\constructionparameter\c!stopper
\constructionparameter\c!right}
@@ -370,6 +371,6 @@
\fi}
\unexpanded\def\strc_enumerations_skip_number_coupling[#tag]% e.g. for questions with no answer
- {\ctxlua{structures.references.setnextorder("construction","#tag")}}
+ {\clf_setnextreferenceorder{construction}{#tag}}
\protect \endinput
diff --git a/tex/context/base/strc-flt.mkvi b/tex/context/base/strc-flt.mkvi
index a93921317..be2958fbf 100644
--- a/tex/context/base/strc-flt.mkvi
+++ b/tex/context/base/strc-flt.mkvi
@@ -11,12 +11,31 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D This module will be redone with conditionals and everythings
+
\writestatus{loading}{ConTeXt Structure Macros / Float Numbering}
\registerctxluafile{strc-flt}{1.001}
\unprotect
+% todo: a keyword for this (and then a settings->hash for speed)
+%
+% \setuplayout[width=middle,backspace=3cm]
+%
+% \appendtoks
+% \settrue\inhibitmargindata
+% \to \everyinsidefloat
+%
+% \starttext
+% \dorecurse{20}{
+% \par \inleft{\red\infofont<#1>} \par
+% \placefigure[leftmargin]{}{\framed[height=1cm,width=2cm]{}}
+% % \placefigure{#1}{\framed[height=1cm,width=2cm]{}}
+% \par line #1.1 \par line #1.2 \par
+% }
+% \stoptext
+
% todo: delay caption creation and make setups for each method instead
% so that we can have a list of methods and redo them as we can
% keep the list or even better: recreate it
@@ -91,8 +110,9 @@
% \c!stopper=\@@kostopper,
\c!suffixseparator=, % currently rather hard coded
\c!suffix=\floatcaptionsuffix,
- \c!distance=1em,
+ \c!distance=\emwidth,
\c!conversion=\v!numbers,
+ \c!maxwidth=\hsize,
\c!command=]
% we can comment some of these
@@ -133,6 +153,7 @@
\c!outermargin=\zeropoint, % idem
\c!leftmargindistance=\zeropoint,
\c!rightmargindistance=\floatparameter\c!leftmargindistance,
+ \c!step=\v!big, % the flish side float step (big=line, medium=halfline, small=quarterline, depth=halfline with normaldepth)
\c!ntop=2,
\c!nbottom=0,
\c!nlines=4, % used?
@@ -186,8 +207,8 @@
{\definefloatcaption[#1][#3]%
\definecounter[#1][#3]%
\definelist[#1][#3]%
- \presetlabeltext[#1=\Word{#3}~]%
- \presetheadtext[#2=\Word{#2}]%
+ \copylabeltext[#1=#3]%
+ %\presetheadtext[#2=\Word{#2}]%
\strc_floats_define_saved[#1][#3]%
\strc_floats_define_commands{#1}{#2}}
@@ -235,8 +256,9 @@
\namedtaggedlabeltexts
\t!floatlabel \currentfloat
\t!floatnumber\currentfloat
- {\ctxcommand{savedlistprefixednumber("\currentfloat",\currentfloatnumber)}%
- \thecurrentfloatnumbersuffix}%
+ {\floatcaptionparameter\c!numbercommand
+ {\clf_savedlistprefixednumber{\currentfloat}\currentfloatnumber\relax
+ \thecurrentfloatnumbersuffix}}%
\fi
\fi \fi}
@@ -244,7 +266,8 @@
{\ifnofloatcaption \else
\ifx\currentfloatnumber\relax\else
\dostarttagged\t!floattext\empty
- \ctxcommand{savedlisttitle("\currentfloat",\currentfloatnumber)}%
+ \floatcaptionparameter\c!textcommand
+ {\clf_savedlisttitle{\currentfloat}\currentfloatnumber\relax}%
\dostoptagged
\fi
\fi}
@@ -457,6 +480,10 @@
\ifx\currentfloat\empty
\let\currentfloat\v!figure % a bit of a hack
\fi
+ \doifelsecommandhandler\??float\currentfloat
+ \donothing
+ {\writestatus\m!floatblocks{unknown float type '\currentfloat'}%
+ \let\currentfloat\v!figure}% also a hack
\global\let\lastplacedfloat\currentfloat
\let\m_strc_floats_saved_userdata\empty
\let\currentfloatcaption\currentfloat}
@@ -489,8 +516,9 @@
\edef\floatlocation{\floatparameter\c!default}% beware of a clash between alignment locations
\fi
\strc_floats_analyze_location
+ % todo: use \lets
\setupcurrentfloatcaption[\c!reference={#reference},\c!title={#caption},\c!marking=,\c!list=,\c!bookmark=]%
- \doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal}
+ \doifelseinset\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal}
\unexpanded\def\placefloat
{\flushnotes
@@ -535,7 +563,7 @@
\setupcurrentfloatuserdata[#userdata]%
\fi
\strc_floats_analyze_location
- \doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal
+ \doifelseinset\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal
\bgroup
\ignorespaces}
@@ -547,7 +575,7 @@
{\flushnotes
\page_otr_command_flush_side_floats % here !
\strc_floats_begin_group
- \dodoubleempty\strc_floats_start_place_float}
+ \dotripleempty\strc_floats_start_place_float}
\def\strc_floats_start_place_float[#tag]%
{\strc_floats_set_current_tag{#tag}%
@@ -636,14 +664,14 @@
\unexpanded\def\strc_floats_place_next_box_normal
{\ifconditional\c_page_floats_some_waiting
% this was \checkwaitingfloats spread all over
- \doifinsetelse\v!always\floatlocation
+ \doifelseinset\v!always\floatlocation
{\showmessage\m!floatblocks5\empty}
- {\doifcommonelse\floatlocation\flushfloatslist\page_otr_command_flush_floats\donothing}%
+ {\doifelsecommon\floatlocation\flushfloatslist\page_otr_command_flush_floats\donothing}%
% but which should be done before using box \floatbox
\fi
\page_margin_strc_floats_before % todo: each float handler gets a before
\global\insidefloattrue
- \dostarttagged\t!float\currentfloat
+ \dostarttaggedchained\t!float\currentfloat\??float
\page_margin_strc_floats_set_hsize % todo: each float handler gets a set_hsize
\the\everyinsidefloat
\strc_floats_analyze_variables_one
@@ -681,7 +709,7 @@
{\ifinsidecolumns
\global\setfalse\c_strc_floats_par_float
\else
- \doifcommonelse\floatlocation\flushfloatslist
+ \doifelsecommon\floatlocation\flushfloatslist
{\global\settrue \c_strc_floats_par_float}
{\global\setfalse\c_strc_floats_par_float}%
\fi
@@ -720,12 +748,12 @@
180=>\global\c_strc_floats_rotation\commalistelement\relax,%
270=>\global\c_strc_floats_rotation\commalistelement\relax]%
\fi
- \doifinsetelse\v!nonumber\floatlocation
+ \doifelseinset\v!nonumber\floatlocation
{\global\nofloatnumbertrue}
{\doifelse{\floatcaptionparameter\c!number}\v!yes
{\global\nofloatnumberfalse}
{\global\nofloatnumbertrue}}%
- \doifinsetelse\v!none\floatlocation
+ \doifelseinset\v!none\floatlocation
{\global\nofloatcaptiontrue}
{\global\nofloatcaptionfalse}%
\doif{\floatcaptionparameter\c!number}\v!none % new
@@ -741,7 +769,7 @@
\ifconditional\c_page_floats_center_box_global
\settrue\c_page_floats_center_box_local
\else
- \doifinsetelse\v!local\floatlocation\settrue\setfalse\c_page_floats_center_box_local
+ \doifelseinset\v!local\floatlocation\settrue\setfalse\c_page_floats_center_box_local
\fi
\doifnotcommon{\v!always,\v!here,\v!force}\floatlocation % ! ! ! ! ! !
{\setfalse\c_page_floats_center_box_global
@@ -767,6 +795,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doifmainfloatbodyelse\doifelsemainfloatbody
+
% todo: optional user pars
\let\currentfloatattribute\empty % to be checked
@@ -801,7 +831,7 @@
% uses:
\def\strc_floats_group_index
- {\numexpr\ctxcommand{listgroupindex("\currentfloat","\currentfloatgroup")}\relax}
+ {\numexpr\clf_listgroupindex{\currentfloat}{\currentfloatgroup}\relax}
\def\strc_floats_place_packaged_boxes
{\expandafter\strc_floats_place_packaged_boxes_indeed\expandafter{\m_strc_floats_saved_userdata}}
@@ -835,7 +865,7 @@
\relax
\relax
\relax
- [\c!name=\currentfloat,%
+ [\s!name=\currentfloat,% was c!name
\s!counter=\currentfloatcounter,%
\s!hascaption=\ifnofloatcaption \v!no\else\v!yes\fi,%
\s!hasnumber=\ifnofloatnumber \v!no\else\v!yes\fi,%
@@ -896,26 +926,29 @@
\strc_float_load_data
\to \everyinsidefloat
-\def\doifrightpagefloatelse
+\def\doifelserightpagefloat
{\ifdoublesided
\ifsinglesided
\doubleexpandafter\firstoftwoarguments
\else
- \doubleexpandafter\doifoddfloatpageelse
+ \doubleexpandafter\doifelseoddfloatpage
\fi
\else
\expandafter\firstoftwoarguments
\fi}
-\def\doifoddfloatpageelse
+\def\doifelseoddfloatpage
{\ifodd\purenumber\strc_float_realpage\space
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifrightpagefloatelse\doifelserightpagefloat
+\let\doifoddpagefloatelse \doifelseoddpagefloat
+
\appendtoks
- \let\rightorleftpageaction\doifrightpagefloatelse
+ \let\rightorleftpageaction\doifelserightpagefloat
\to \everyinsidefloat
% \let\movesidefloat\gobbleoneargument
@@ -926,7 +959,7 @@
\unexpanded\def\movesidefloat[#settings]% (-)n*line|x=,y=
{\global\d_page_sides_downshift \zeropoint
\global\d_page_sides_extrashift\zeropoint
- \doifassignmentelse{#settings}%
+ \doifelseassignment{#settings}%
{\begingroup
\setupcurrentfloat[\c!x=\zeropoint,\c!y=\zeropoint,#settings]%
\ifgridsnapping
@@ -948,6 +981,12 @@
\setvalue{\??floatmovement+\v!hang}{\strc_floats_move_down_hang\plusone}
\setvalue{\??floatmovement-\v!hang}{\strc_floats_move_down_hang\minusone}
+\setvalue{\??floatmovement-2*\v!line}{\strc_floats_move_down_line{-2}}
+\setvalue{\??floatmovement+2*\v!line}{\strc_floats_move_down_line{2}}
+\setvalue{\??floatmovement 2*\v!line}{\strc_floats_move_down_line{2}}
+
+\unexpanded\def\installfloatmovement#1#2{\setvalue{\??floatmovement#1}{#2}}
+
\def\strc_floats_move_down#setting%
{\csname\??floatmovement
\ifcsname\??floatmovement#setting\endcsname#setting\fi
@@ -1038,10 +1077,10 @@
\fi
\fi
% we can also support edges .. in that case no common but a fast loop
- \doifinsetelse\v!hanging\floatlocation
- {\doifcommonelse{\v!inleft,\v!leftmargin}\floatlocation
+ \doifelseinset\v!hanging\floatlocation
+ {\doifelsecommon{\v!inleft,\v!leftmargin}\floatlocation
{\let\p_maxwidth\leftmarginwidth}%
- {\doifcommonelse{\v!inright,\v!rightmargin}\floatlocation
+ {\doifelsecommon{\v!inright,\v!rightmargin}\floatlocation
{\let\p_maxwidth\rightmarginwidth}%
{\edef\p_maxwidth{\floatparameter\c!maxwidth}}}}%
{\edef\p_maxwidth{\floatparameter\c!maxwidth}}%
@@ -1050,9 +1089,9 @@
\else
\scratchwidth\p_maxwidth\relax
\ifdim\wd\floatbox>\scratchwidth
- \doifcommonelse{\v!inright,\v!rightmargin,\v!rightedge,\v!inleft,\v!leftmargin,\v!leftedge}\floatlocation
+ \doifelsecommon{\v!inright,\v!rightmargin,\v!rightedge,\v!inleft,\v!leftmargin,\v!leftedge}\floatlocation
{\global\d_page_sides_maximum\scratchwidth}
- {\doifcommonelse{\v!right,\v!left}\floatlocation
+ {\doifelsecommon{\v!right,\v!left}\floatlocation
\strc_floats_realign_floatbox_horizontal_one
\strc_floats_realign_floatbox_horizontal_two}%
\fi
@@ -1094,21 +1133,32 @@
\strc_floats_calculate_skip\d_strc_floats_bottom {\rootfloatparameter\c!spaceafter }%
\strc_floats_calculate_skip\d_page_sides_topskip {\rootfloatparameter\c!sidespacebefore}%
\strc_floats_calculate_skip\d_page_sides_bottomskip{\rootfloatparameter\c!sidespaceafter }%
- \global\d_strc_floats_margin \rootfloatparameter\c!margin
- \global\d_page_sided_leftshift \floatparameter \c!leftmargindistance
- \global\d_page_sided_rightshift\floatparameter \c!rightmargindistance
- \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
- \global\c_page_floats_n_of_bottom \rootfloatparameter\c!nbottom
+ \global\d_strc_floats_margin \rootfloatparameter\c!margin
+ \global\d_page_sided_leftshift \floatparameter \c!leftmargindistance
+ \global\d_page_sided_rightshift \floatparameter \c!rightmargindistance
+ \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
+ \global\c_page_floats_n_of_bottom\rootfloatparameter\c!nbottom
\endgroup}
\unexpanded\def\betweenfloatblanko % assumes that spaceafter is present
{\blank[\rootfloatparameter\c!spacebefore]} % or v!back,....
+% \unexpanded\def\doplacefloatbox % used elsewhere
+% {%\forgetall % NO
+% \whitespace
+% \blank[\rootfloatparameter\c!spacebefore]
+% \page_otr_command_flush_float_box
+% \blank[\rootfloatparameter\c!spaceafter]}
+
\unexpanded\def\doplacefloatbox % used elsewhere
{%\forgetall % NO
\whitespace
\blank[\rootfloatparameter\c!spacebefore]
+ \nointerlineskip
+ \flushnotes % new per 2014-05-29 : todo: move them up in the mvl
+ \nointerlineskip
\page_otr_command_flush_float_box
+ \nointerlineskip
\blank[\rootfloatparameter\c!spaceafter]}
% test case:
@@ -1126,7 +1176,7 @@
\global\floatheight \ht\floatbox % forget about the depth
\global\floattextwidth\dimexpr\hsize-\floatwidth-\rootfloatparameter\c!margin\relax
\edef\floatlocation{\floatlocationmethod}% to be sure .. why
- \doifinsetelse\v!tall\floatlocationmethod
+ \doifelseinset\v!tall\floatlocationmethod
{\floattextheight\dimexpr\pagegoal-\pagetotal-\bigskipamount\relax % ugly, this bigskip
\ifdim\floattextheight>\textheight
\floattextheight\textheight
@@ -1151,27 +1201,27 @@
{\floattextheight\ifdim\ht\floattext<\floatheight\floatheight\else\ht\floattext\fi}%
\setbox\floatbox\vbox to \floattextheight
{\hsize\floatwidth
- \doifinsetelse\v!both\floatlocation
- {\doifinsetelse\v!low\floatlocation
+ \doifelseinset\v!both\floatlocation
+ {\doifelseinset\v!low\floatlocation
{\vfill\box\floatbox}
- {\doifinsetelse\v!middle\floatlocation
+ {\doifelseinset\v!middle\floatlocation
{\vfill\box\floatbox\vfill}
{\box\floatbox\vfill}}}
{\box\floatbox\vfill}}%
\setbox\floattext\vbox to \floattextheight
{\hsize\floattextwidth
- \doifinsetelse\v!low\floatlocation
+ \doifelseinset\v!low\floatlocation
{\vfill
\box\floattext
\doifinset\c!offset\floatlocation{\whitespace\blank}}
- {\doifinsetelse\v!middle\floatlocation
+ {\doifelseinset\v!middle\floatlocation
{\vfill
\box\floattext
\vfill}
{\doifinset\v!offset\floatlocation{\whitespace\blank}%
\box\floattext
\vfill}}}%
- \doifinsetelse\v!right\floatlocation
+ \doifelseinset\v!right\floatlocation
{\setbox\floatbox\hbox to \hsize
{\box\floattext
\hfill
@@ -1190,13 +1240,13 @@
\blank[\rootfloatparameter\c!spaceafter]%
\strc_floats_end_text_group
\page_floats_report_total}
-
+
\def\borderedfloatbox
{\begingroup
\setupcurrentfloat[\c!location=\v!normal,\c!width=\v!fit,\c!height=\v!fit]%
\inheritedfloatframed{\box\floatbox}%
\endgroup}
-
+
% minwidth=fit,width=max : no overshoot, as wide as graphic
\def\strc_floats_align_content_indeed
@@ -1366,17 +1416,82 @@
\fi
\strc_floats_make_complete_caption}}
+% \def\strc_floats_prepare_stack_caption_auto
+% {\ifx\p_strc_floats_caption_align\empty \else
+% \doifnotinset\v!middle\p_strc_floats_caption_align{\let\captionovershoot\!!zeropoint}%
+% \fi
+% \edef\captionhsize{\the\wd\b_strc_floats_content}%
+% \ifdim\captionhsize>\hsize
+% % float is wider than \hsize
+% \setbox\b_strc_floats_caption\vbox
+% {\settrialtypesetting
+% \strc_floats_caption_set_align
+% \hsize\captionhsize
+% \notesenabledfalse
+% \strc_floats_make_complete_caption}%
+% \ifdim\ht\scratchbox>\lineheight % more lines
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\dimexpr\captionhsize-\captionovershoot\relax
+% \ifdim\hsize<\captionminwidth\relax
+% \hsize\captionhsize
+% \fi
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \strc_floats_make_complete_caption}%
+% \fi
+% \else
+% % float is smaller of equal to \hsize
+% \ifdim\captionhsize<\captionminwidth\relax
+% \scratchdimen\captionminwidth % float smaller than min width
+% \edef\captionhsize{\the\scratchdimen}%
+% \fi
+% \setbox\scratchbox\vbox % test with overshoot
+% {\settrialtypesetting
+% \scratchdimen\dimexpr\captionhsize+\captionovershoot+3\emwidth\relax % 3em is an average word length
+% \ifdim\scratchdimen<\hsize
+% \hsize\scratchdimen
+% \fi
+% \notesenabledfalse
+% \strc_floats_make_complete_caption}%
+% \ifdim\ht\scratchbox>\lineheight
+% % at least an average word longer than a line
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \scratchdimen\dimexpr\captionhsize+\captionovershoot\relax
+% \ifdim\scratchdimen<\hsize
+% \hsize\scratchdimen
+% \fi
+% \strc_floats_make_complete_caption}%
+% \else\ifx\p_strc_floats_caption_align\empty
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \raggedcenter % overloads
+% \strc_floats_make_complete_caption}%
+% \else
+% \setbox\b_strc_floats_caption\vbox
+% {\strc_floats_caption_set_align
+% \hsize\captionhsize
+% \strc_floats_make_complete_caption}%
+% \fi\fi
+% \fi}
+
\def\strc_floats_prepare_stack_caption_auto
{\ifx\p_strc_floats_caption_align\empty \else
\doifnotinset\v!middle\p_strc_floats_caption_align{\let\captionovershoot\!!zeropoint}%
\fi
\edef\captionhsize{\the\wd\b_strc_floats_content}%
- \ifdim\captionhsize>\hsize
+ \scratchwidth\floatcaptionparameter\c!maxwidth\relax
+ \ifdim\captionhsize>\scratchwidth
% float is wider than \hsize
\setbox\b_strc_floats_caption\vbox
{\settrialtypesetting
\strc_floats_caption_set_align
- \hsize\captionhsize
+ \hsize\scratchwidth
\notesenabledfalse
\strc_floats_make_complete_caption}%
\ifdim\ht\scratchbox>\lineheight % more lines
@@ -1384,13 +1499,13 @@
{\strc_floats_caption_set_align
\hsize\dimexpr\captionhsize-\captionovershoot\relax
\ifdim\hsize<\captionminwidth\relax
- \hsize\captionhsize
+ \hsize\scratchwidth
\fi
\strc_floats_make_complete_caption}%
\else
\setbox\b_strc_floats_caption\vbox
{\strc_floats_caption_set_align
- \hsize\captionhsize
+ \hsize\scratchwidth
\strc_floats_make_complete_caption}%
\fi
\else
@@ -1456,10 +1571,10 @@
\box\b_strc_floats_content}}
\def\strc_floats_build_box_next_outer
- {\doifrightpagefloatelse\strc_floats_build_box_next_right\strc_floats_build_box_next_left}
+ {\doifelserightpagefloat\strc_floats_build_box_next_right\strc_floats_build_box_next_left}
\def\strc_floats_build_box_next_inner
- {\doifrightpagefloatelse\strc_floats_build_box_next_left\strc_floats_build_box_next_right}
+ {\doifelserightpagefloat\strc_floats_build_box_next_left\strc_floats_build_box_next_right}
\def\strc_floats_build_box_next_right_hang#1%
{\ifconditional\c_strc_floats_par_float \hbox \else \expandafter \strc_floats_align_content \fi
@@ -1510,12 +1625,12 @@
{\strc_floats_build_box_next_left_margin_indeed \leftmargindistance }
\def\strc_floats_build_box_next_outer_margin
- {\doifrightpagefloatelse
+ {\doifelserightpagefloat
{\strc_floats_build_box_next_right_margin_indeed\rightmargindistance}
{\strc_floats_build_box_next_left_margin_indeed \rightmargindistance}}
\def\strc_floats_build_box_next_inner_margin
- {\doifrightpagefloatelse
+ {\doifelserightpagefloat
{\strc_floats_build_box_next_left_margin_indeed \leftmargindistance}
{\strc_floats_build_box_next_right_margin_indeed\leftmargindistance}}
@@ -1552,29 +1667,16 @@
\def\strc_floats_flush_left_caption_hang
{\hsmash{\llap{\box\b_strc_floats_caption\dotfskip{\floatcaptionparameter\c!distance}}}}
-% \def\strc_floats_flush_caption_hang % expanded can go
-% {\expanded{\doifinsetelse{\v!righthanging}{\floatcaptionparameter\c!location}}
-% {\strc_floats_flush_right_caption_hang}
-% {\expanded{\doifinsetelse{\v!lefthanging}{\floatcaptionparameter\c!location}}
-% {\strc_floats_flush_left_caption_hang}
-% {\expanded{\doifinsetelse{\v!hang}{\floatcaptionparameter\c!location}}
-% {\expanded{\doifinsetelse{\v!outer}{\floatcaptionparameter\c!location}}
-% {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
-% {\expanded{\doifinsetelse{\v!right}{\floatcaptiondirectives}}
-% {\strc_floats_flush_right_caption_hang}
-% {\strc_floats_flush_left_caption_hang}}}
-% {\box\b_strc_floats_caption}}}}
-
\def\strc_floats_flush_caption_hang % expanded can go
{\edef\p_strc_floats_caption_location{\floatcaptionparameter\c!location}%
- \doifinsetelse\v!righthanging\p_strc_floats_caption_location
+ \doifelseinset\v!righthanging\p_strc_floats_caption_location
{\strc_floats_flush_right_caption_hang}
- {\doifinsetelse\v!lefthanging\p_strc_floats_caption_location
+ {\doifelseinset\v!lefthanging\p_strc_floats_caption_location
{\strc_floats_flush_left_caption_hang}
- {\doifinsetelse\v!hang\p_strc_floats_caption_location
- {\doifinsetelse\v!outer\p_strc_floats_caption_location
- {\doifrightpagefloatelse{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
- {\doifinsetelse\v!right\floatcaptiondirectives
+ {\doifelseinset\v!hang\p_strc_floats_caption_location
+ {\doifelseinset\v!outer\p_strc_floats_caption_location
+ {\doifelserightpagefloat{\strc_floats_flush_right_caption_hang}{\strc_floats_flush_left_caption_hang}}
+ {\doifelseinset\v!right\floatcaptiondirectives
{\strc_floats_flush_right_caption_hang}
{\strc_floats_flush_left_caption_hang}}}
{\box\b_strc_floats_caption}}}}
@@ -1637,7 +1739,7 @@
\fi}
\def\strc_floats_build_box_top_stack_normal
- {\doifinsetelse\v!overlay{\floatcaptionparameter\c!location}
+ {\doifelseinset\v!overlay{\floatcaptionparameter\c!location}
\strc_floats_build_box_top_stack_normal_overlay
\strc_floats_build_box_top_stack_normal_content}
@@ -1684,7 +1786,7 @@
{\dp\b_strc_floats_caption\strutdepth
\setbox\scratchbox\vbox
{\strc_floats_align_caption{\copy\b_strc_floats_caption}%
- \strc_floats_align_content {\copy\b_strc_floats_content }}%
+ \strc_floats_align_content{\copy\b_strc_floats_content}}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
\vbox to \noflines\lineheight
{\d_strc_float_temp_width\wd\b_strc_floats_content
@@ -1753,8 +1855,8 @@
\processallactionsinset[\floatcaptionparameter\c!location]
[ \v!left=>\let\next\strc_floats_relocate_caption_left,
\v!right=>\let\next\strc_floats_relocate_caption_right,
- \v!inner=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_left }{\let\next\strc_floats_relocate_caption_right},
- \v!outer=>\doifrightpagefloatelse{\let\next\strc_floats_relocate_caption_right}{\let\next\strc_floats_relocate_caption_left }]%
+ \v!inner=>\doifelserightpagefloat{\let\next\strc_floats_relocate_caption_left }{\let\next\strc_floats_relocate_caption_right},
+ \v!outer=>\doifelserightpagefloat{\let\next\strc_floats_relocate_caption_right}{\let\next\strc_floats_relocate_caption_left }]%
\next}
\installfloatboxbuilder \v!none \strc_floats_build_box_default
@@ -1987,7 +2089,7 @@
\definefloat
[\v!graphic]
[\v!graphics]
-
+
% float strategy, replaces some of the above macros
\installcorenamespace{floatmethods}
@@ -1999,7 +2101,7 @@
\let\forcedfloatmethod\empty % set by lua
\def\setfloatmethodvariables#1% \floatmethod \floatlabel \floatrow \floatcolumn
- {\ctxcommand{analysefloatmethod("#1")}}
+ {\clf_analysefloatmethod{#1}}
\def\somesomewherefloat[#1]%
{\page_floats_save_somewhere_float\s!somewhere{#1}}
@@ -2214,6 +2316,7 @@
\installfloatmethod \s!singlecolumn \v!local \somelocalfloat
\installfloatmethod \s!multicolumn \v!local \somelocalfloat
+\installfloatmethod \s!mixedcolumn \v!local \somelocalfloat
\installfloatmethod \s!columnset \v!local \somelocalfloat
\protect \endinput
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index 09ed79288..f736427bb 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -38,14 +38,19 @@ local txtcatcodes = catcodenumbers.txtcatcodes
local context = context
local commands = commands
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
-
local trace_processors = false
local report_processors = logs.reporter("processors","structure")
trackers.register("typesetters.processors", function(v) trace_processors = v end)
+local xmlconvert = lxml.convert
+local xmlstore = lxml.store
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_xmlsetup = context.xmlsetup
+local ctx_xmlprocessbuffer = context.xmlprocessbuffer
+
-- -- -- namespace -- -- --
-- This is tricky: we have stored and initialized already some of
@@ -68,7 +73,7 @@ structures.itemgroups = structures.itemgroups or { }
structures.specials = structures.specials or { }
structures.counters = structures.counters or { }
structures.tags = structures.tags or { }
-structures.formulas = structures.formulas or { }
+structures.formulas = structures.formulas or { } -- not used but reserved
structures.sets = structures.sets or { }
structures.marks = structures.marks or { }
structures.floats = structures.floats or { }
@@ -151,11 +156,17 @@ local function simplify(d,nodefault)
for k, v in next, d do
local tv = type(v)
if tv == "table" then
- if next(v) then t[k] = simplify(v) end
+ if next(v) then
+ t[k] = simplify(v)
+ end
elseif tv == "string" then
- if v ~= "" and v ~= "default" then t[k] = v end
+ if v ~= "" then
+ t[k] = v
+ end
elseif tv == "boolean" then
- if v then t[k] = v end
+ if v then
+ t[k] = v
+ end
else
t[k] = v
end
@@ -168,6 +179,34 @@ local function simplify(d,nodefault)
end
end
+-- we only care about the tuc file so this would do too:
+--
+-- local function simplify(d,nodefault)
+-- if d then
+-- for k, v in next, d do
+-- local tv = type(v)
+-- if tv == "string" then
+-- if v == "" or v == "default" then
+-- d[k] = nil
+-- end
+-- elseif tv == "table" then
+-- if next(v) then
+-- simplify(v)
+-- end
+-- elseif tv == "boolean" then
+-- if not v then
+-- d[k] = nil
+-- end
+-- end
+-- end
+-- return d
+-- elseif nodefault then
+-- return nil
+-- else
+-- return { }
+-- end
+-- end
+
helpers.simplify = simplify
function helpers.merged(...)
@@ -211,19 +250,19 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
report_processors("putting xml data in buffer: %s",xmldata)
report_processors("processing buffer with setup %a and tag %a",xmlsetup,tag)
end
- if experiment then
- -- the question is: will this be forgotten ... better store in a via file
- local xmltable = lxml.convert("temp",xmldata or "")
- lxml.store("temp",xmltable)
- context.xmlsetup("temp",xmlsetup or "")
- else
- context.xmlprocessbuffer("dummy",tag,xmlsetup or "")
- end
+ if experiment then
+ -- the question is: will this be forgotten ... better store in a via file
+ local xmltable = xmlconvert("temp",xmldata or "")
+ xmlstore("temp",xmltable)
+ ctx_xmlsetup("temp",xmlsetup or "")
+ else
+ ctx_xmlprocessbuffer("dummy",tag,xmlsetup or "")
+ end
elseif xmlsetup then -- title is reference to node (so \xmlraw should have been used)
if trace_processors then
report_processors("feeding xmlsetup %a using node %a",xmlsetup,title)
end
- context.xmlsetup(title,metadata.xmlsetup)
+ ctx_xmlsetup(title,metadata.xmlsetup)
else
local catcodes = metadata.catcodes
if catcodes == notcatcodes or catcodes == xmlcatcodes then
@@ -241,9 +280,9 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
-- doesn't work when a newline is in there \section{Test\ A} so we do
-- it this way:
--
- pushcatcodes(catcodes)
+ ctx_pushcatcodes(catcodes)
context(title)
- popcatcodes()
+ ctx_popcatcodes()
end
end
else
@@ -297,7 +336,8 @@ function sets.getall(namespace,block,name)
end
end
--- messy (will be another keyword, fixedconversion)
+-- messy (will be another keyword, fixedconversion) .. needs to be documented too
+-- maybe we should cache
local splitter = lpeg.splitat("::")
@@ -337,4 +377,8 @@ end
-- interface
-commands.definestructureset = sets.define
+interfaces.implement {
+ name = "definestructureset",
+ actions = sets.define,
+ arguments = { "string", "string", "string", "string", "boolean" }
+}
diff --git a/tex/context/base/strc-ini.mkvi b/tex/context/base/strc-ini.mkvi
index 8488d1dab..ad83cbc58 100644
--- a/tex/context/base/strc-ini.mkvi
+++ b/tex/context/base/strc-ini.mkvi
@@ -70,12 +70,12 @@
\unexpanded\def\defineconversionset{\dotripleempty\strc_sets_define_conversion_set}
\unexpanded\def\defineprefixset {\dotripleempty\strc_sets_define_prefix_set}
-% Low level versions (no optional checking). The detokenize and escaping might go away.
+% Low level versions (no optional checking). The detokenize might go away.
-\unexpanded\def\strc_sets_define_reset_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:resets", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}",true)}}
-\unexpanded\def\strc_sets_define_separator_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:separators", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
-\unexpanded\def\strc_sets_define_conversion_set[#name][#set][#default]{\ctxcommand{definestructureset("structure:conversions","#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
-\unexpanded\def\strc_sets_define_prefix_set [#name][#set][#default]{\ctxcommand{definestructureset("structure:prefixes", "#name","\luaescapestring{\detokenize{#set}}","\luaescapestring{\detokenize{#default}}")}}
+\unexpanded\def\strc_sets_define_reset_set [#name][#set][#default]{\clf_definestructureset{structure:resets} {#name}{\detokenize{#set}}{\detokenize{#default}}true\relax}
+\unexpanded\def\strc_sets_define_separator_set [#name][#set][#default]{\clf_definestructureset{structure:separators} {#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
+\unexpanded\def\strc_sets_define_conversion_set[#name][#set][#default]{\clf_definestructureset{structure:conversions}{#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
+\unexpanded\def\strc_sets_define_prefix_set [#name][#set][#default]{\clf_definestructureset{structure:prefixes} {#name}{\detokenize{#set}}{\detokenize{#default}}false\relax}
\let\definestructureresetset \defineresetset
\let\definestructureseparatorset \defineseparatorset
diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua
index 4945c282f..adec591c1 100644
--- a/tex/context/base/strc-itm.lua
+++ b/tex/context/base/strc-itm.lua
@@ -6,33 +6,58 @@ if not modules then modules = { } end modules ['strc-itm'] = {
license = "see context related readme files"
}
-local structures = structures
-local itemgroups = structures.itemgroups
-local jobpasses = job.passes
-
-local setvariable = jobpasses.save
-local getvariable = jobpasses.getfield
-
-function itemgroups.register(name,nofitems,maxwidth)
- setvariable("itemgroup", { nofitems, maxwidth })
+local structures = structures
+local itemgroups = structures.itemgroups
+local jobpasses = job.passes
+
+local implement = interfaces.implement
+
+local setvariable = jobpasses.save
+local getvariable = jobpasses.getfield
+
+local texsetcount = tex.setcount
+local texsetdimen = tex.setdimen
+
+local f_stamp = string.formatters["itemgroup:%s:%s"]
+local counts = table.setmetatableindex("number")
+
+-- We keep the counter at the Lua end so we can group the items within
+-- an itemgroup which in turn makes for less passes when one itemgroup
+-- entry is added or removed.
+
+local trialtypesetting = context.trialtypesetting
+
+local function analyzeitemgroup(name,level)
+ local n = counts[name]
+ if level == 1 then
+ n = n + 1
+ counts[name] = n
+ end
+ local stamp = f_stamp(name,n)
+ local n = getvariable(stamp,level,1,0)
+ local w = getvariable(stamp,level,2,0)
+ texsetcount("local","c_strc_itemgroups_max_items",n)
+ texsetdimen("local","d_strc_itemgroups_max_width",w)
end
-function itemgroups.nofitems(name,index)
- return getvariable("itemgroup", index, 1, 0)
+local function registeritemgroup(name,level,nofitems,maxwidth)
+ local n = counts[name]
+ if not trialtypesetting() then
+ -- no trialtypsetting
+ setvariable(f_stamp(name,n), { nofitems, maxwidth }, level)
+ elseif level == 1 then
+ counts[name] = n - 1
+ end
end
-function itemgroups.maxwidth(name,index)
- return getvariable("itemgroup", index, 2, 0)
-end
-
--- interface (might become counter/dimension)
-
-commands.registeritemgroup = itemgroups.register
-
-function commands.nofitems(name,index)
- context(getvariable("itemgroup", index, 1, 0))
-end
+implement {
+ name = "analyzeitemgroup",
+ actions = analyzeitemgroup,
+ arguments = { "string", "integer" }
+}
-function commands.maxitemwidth(name,index)
- context(getvariable("itemgroup", index, 2, 0))
-end
+implement {
+ name = "registeritemgroup",
+ actions = registeritemgroup,
+ arguments = { "string", "integer", "integer", "dimen" }
+}
diff --git a/tex/context/base/strc-itm.mkvi b/tex/context/base/strc-itm.mkvi
index 8259fa38d..86fc9d9fd 100644
--- a/tex/context/base/strc-itm.mkvi
+++ b/tex/context/base/strc-itm.mkvi
@@ -15,6 +15,9 @@
\registerctxluafile{strc-itm}{1.001}
+%D As we analyze/register widths and such we could as well push and pop the
+%D numbers at the \LUA\ end (which saves a few calls).
+
%D Cleaning up this module happened around the time when Kate Bush came up
%D with the nicest numbered list of words: 50 Words For Snow. It's therefore
%D no surprise that I had that cd running several times when updating this
@@ -203,7 +206,6 @@
\newdimen \d_strc_itemgroups_max_width % multipass
\newcount \c_strc_itemgroups_max_items % multipass
-\newcount \c_strc_itemgroups_n_of_lists
\newcount \c_strc_itemgroups_n_of_items
\newcount \c_strc_itemgroups_nesting
\newcount \c_strc_itemgroups_column_depth
@@ -227,13 +229,10 @@
\let \currentitemgroupsegments \empty
\def\strc_itemgroups_register_status
- {\iftrialtypesetting \else
- \ctxcommand{registeritemgroup("\currentitemgroup",\number\c_strc_itemgroups_n_of_items,"\itemgroupparameter\c!maxwidth")}%
- \fi}
+ {\clf_registeritemgroup{\currentparentitemgroup}\c_strc_itemgroups_nesting\c_strc_itemgroups_n_of_items\dimexpr\itemgroupparameter\c!maxwidth\relax}
\def\strc_itemgroups_check_n_of_items % we could do this at the lua end and save a call (i.e. will be dimen and counter)
- {\c_strc_itemgroups_max_items\ctxcommand{nofitems("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\relax
- \d_strc_itemgroups_max_width\ctxcommand{maxitemwidth("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\scaledpoint
+ {\clf_analyzeitemgroup{\currentparentitemgroup}\c_strc_itemgroups_nesting\relax
\edef\currentnofitems{\the\c_strc_itemgroups_max_items}}
% todo: \dodosetreference -> \strc_counters_register_component (to be checked)
@@ -249,47 +248,51 @@
\fi \fi}
\def\strc_itemgroups_insert_reference_indeed % maybe we need a 'frozen counter' numberdata blob / quick hack .. .mive this to strc-ref
- {%\setnextinternalreference
+ {% needs testing, gave problems:
+ \setnextinternalreference
% no need to collect nodes in \b_strc_destination_nodes here ... maybe at some point
\strc_references_start_destination_nodes
- % this is somewhat over the top ... we should use the counter's reference
- \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","\currentitemreference",
- {
- metadata = {
- kind = "item",% ?
- catcodes = \the\catcodetable,
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument" \else nil \fi, % only useful when text
- },
- references = {
- % internal = \nextinternalreference, % no: this spoils references
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- prefixdata = structures.helpers.simplify {
- prefix = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefix",
- separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixseparatorset",
- conversion = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversion\!!es,
- conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversionset",
- set = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixset",
- segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!prefixsegments",
- % segments = "\askedprefixsegments",
- connector = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconnector\!!es,
- },
- numberdata = structures.helpers.simplify {
- numbers = structures.counters.compact("\v_strc_itemgroups_counter",nil,true),
- separatorset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberseparatorset",
- % conversion = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversion",
- % conversion = "\currentitemgroupconversionset",
- % conversionset = "\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversionset",
- % todo: fixedconversion = "\currentitemgroupconversionset", % temp hack:
- conversionset = "fixed::\currentitemgroupconversionset",
- %
- % for the moment no stopper, we need to make references configurable first
- % stopper = \!!bs\namedcounterparameter\v_strc_itemgroups_counter\c!numberstopper\!!es,
- segments = "\namedcounterparameter\v_strc_itemgroups_counter\c!numbersegments",
- },
- })
+ % this is somewhat over the top ... we should use the counter's reference
+ \clf_setreferenceattribute
+ {%
+ metadata {%
+ kind {item}% was item, why?
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument}% only useful when text
+ \fi
+ catcodes \catcodetable
+ }%
+ references {%
+ internal \nextinternalreference % no: this spoils references
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentitemreference}%
+ }%
+ prefixdata {%
+ prefix {\namedcounterparameter\v_strc_itemgroups_counter\c!prefix}%
+ separatorset {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixseparatorset}%
+ conversion {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversion}%
+ conversionset {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconversionset}%
+ set {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixset}%
+ segments {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixsegments}%
+ % segments {\askedprefixsegments}%
+ connector {\namedcounterparameter\v_strc_itemgroups_counter\c!prefixconnector}%
+ }%
+ numberdata {%
+ numbers {\v_strc_itemgroups_counter}%
+ separatorset {\namedcounterparameter\v_strc_itemgroups_counter\c!numberseparatorset}%
+ % conversion {\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversion}%
+ % conversionset {\namedcounterparameter\v_strc_itemgroups_counter\c!numberconversionset}%
+ % fixedconversion {\currentitemgroupconversionset}%
+ conversionset {fixed::\currentitemgroupconversionset}% temp hack
+ %
+ % for the moment no stopper, we need to make references configurable first
+ % stopper {\namedcounterparameter\v_strc_itemgroups_counter\c!numberstopper}%
+ segments {\namedcounterparameter\v_strc_itemgroups_counter\c!numbersegments}%
+ }%
}%
+ \relax
\strc_references_stop_destination_nodes
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
@@ -331,7 +334,7 @@
\def\strc_itemgroups_store_continue_state#options#settings%
{\setxvalue{\??itemgroupoption \currentitemgroup}{\strc_itemgroups_process_options{#options}}%
- \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup [#settings]}}
+ \setgvalue{\??itemgroupsetting\currentitemgroup}{\setupcurrentitemgroup[#settings]}}
\def\strc_itemgroups_fetch_continue_state
{\getvalue{\??itemgroupoption \currentitemgroup}%
@@ -467,7 +470,7 @@
\let\strc_itemgroups_margin_symbol\empty
\let\strc_itemgroups_extra_symbol\empty
%
- \global\letitemgroupparameter\c!maxwidth\!!zerocount
+ \global\letitemgroupparameter\c!maxwidth\!!zeropoint
}
\setvalue{\??itemgroupfirst\v!intro }{\settrue\c_strc_itemgroups_intro }
@@ -482,10 +485,11 @@
\csname\??itemgroupfirst#option\endcsname
\fi}
-\ifdefined\dotagsetitemgroup \else \let\dotagsetitemgroup\relax \fi
+\ifdefined\dotagsetitemgroup \else \let\dotagsetitemgroup\relax \fi
+\ifdefined\dotagsetitem \else \let\dotagsetitem \gobbleoneargument \fi
\def\strc_itemgroups_tag_start_group
- {\dostarttagged\t!itemgroup\currentitemgroup
+ {\dostarttaggedchained\t!itemgroup\currentparentitemgroup\??itemgroup
\dotagsetitemgroup}
\def\strc_itemgroups_tag_stop_group
@@ -576,7 +580,7 @@
% can be made a bit faster
\def\strc_itemgroups_setup_each#category#whatever%
- {\doifassignmentelse{#whatever}
+ {\doifelseassignment{#whatever}
{\strc_itemgroups_normal_setup[#category][#whatever]}
{\strc_itemgroups_normal_setup[#category][\c!option={#whatever}]}}
@@ -587,7 +591,7 @@
{\strc_itemgroups_normal_setup[#category:#level][\c!option={#whatever}]}
\def\strc_itemgroups_setup_list#subcategories#category#whatever%
- {\doifassignmentelse{#whatever}
+ {\doifelseassignment{#whatever}
{\processcommalist[#subcategories]{\strc_itemgroups_setup_list_level_a{#category}{#whatever}}}
{\processcommalist[#subcategories]{\strc_itemgroups_setup_list_level_b{#category}{#whatever}}}}
@@ -639,9 +643,9 @@
\def\strc_itemgroups_set_symbol#symbol%
{\edef\strc_itemgroups_tmp_symbol{#symbol}%
\ifx\strc_itemgroups_tmp_symbol\empty \else
- \doifsymboldefinedelse\strc_itemgroups_tmp_symbol
+ \doifelsesymboldefined\strc_itemgroups_tmp_symbol
\strc_itemgroups_set_symbol_symbol
- {\doifconversiondefinedelse\strc_itemgroups_tmp_symbol
+ {\doifelseconversiondefined\strc_itemgroups_tmp_symbol
\strc_itemgroups_set_symbol_conversion
\donothing}%
\fi}
@@ -706,7 +710,7 @@
\ifthirdargument
\strc_itemgroups_start_indeed[#options][#settings]%
\else\ifsecondargument
- \doifassignmentelse{#options}
+ \doifelseassignment{#options}
{\strc_itemgroups_start_indeed[][#options]}
{\strc_itemgroups_start_indeed[#options][]}%
\else
@@ -745,7 +749,6 @@
\iftrialtypesetting
\strc_counters_save\v_strc_itemgroups_counter
\fi
- \global\advance\c_strc_itemgroups_n_of_lists\plusone
\c_strc_itemgroups_n_of_items\zerocount
\strc_itemgroups_check_n_of_items
\ifx\itemgroupoptions\empty
@@ -811,7 +814,7 @@
\ifinsidecolumns\else\ifcase\c_strc_itemgroups_column_depth
\global\c_strc_itemgroups_column_depth\c_strc_itemgroups_nesting % global ?
\strc_itemgroups_before_command
- \strc_itemgroups_tag_start_group
+ %\strc_itemgroups_tag_start_group
\strc_itemgroups_start_columns
\fi\fi
\fi
@@ -941,8 +944,7 @@
\fi\fi
% new test, needed in sidefloats (surfaced in volker's proceedings)
\iftrialtypesetting
- \strc_counters_restore\v_strc_itemgroups_counter
- \global\advance\c_strc_itemgroups_n_of_lists\minusone
+ \strc_counters_restore\v_strc_itemgroups_counter % could happen in LUA
\fi
\global\advance\c_strc_itemgroups_nesting\minusone
\xdef\currentitemlevel{\number\c_strc_itemgroups_nesting}%
@@ -965,7 +967,7 @@
{\setfalse\c_strc_itemgroups_first
% \begingroup % (3)
\ifcase\c_strc_itemgroups_nesting
- % 0
+ % 0
\or
\strc_itemgroups_start_item_first_one % 1
\else
@@ -1009,8 +1011,21 @@
\strc_itemgroups_between_command
\fi}
-\unexpanded\def\strc_itemgroups_start_item[#1]% we can reuse more
- {\def\currentitemreference{#1}%
+% c_strc_itemgroups_concat:
+%
+% the problem is that we use leftskip so concat cannot reliable take the height into
+% account; it's .. rather tricky when white space in there anyway (due to \par) .. so
+% we rely on a special blank method
+%
+% \startitemize[n]
+% \item bla
+% \item \startitemize[a]
+% \item bla $\displaystyle\int^{x^{y^4}}$ \item bla
+% \stopitemize
+% \stopitemize
+
+\unexpanded\def\strc_itemgroups_start_item[#reference]% we can reuse more
+ {\def\currentitemreference{#reference}%
\ifconditional\c_strc_itemgroups_text
% begin of item
\else
@@ -1026,13 +1041,11 @@
\strc_itemgroups_start_item_next
\fi
\ifconditional\c_strc_itemgroups_concat
- % \vskip-\dimexpr\lastskip+\lineheight\relax
- \vskip-\lastskip % we cannot use a \dimexpr here because
- \vskip-\lineheight % then we loose the stretch and shrink
- \nobreak
+ \spac_overlay_lines % see spac-ver.mkvi ... a typical potential problem
\setfalse\c_strc_itemgroups_concat
\fi
\dostarttagged\t!item\empty
+ % \dotagsetitem\empty
\dostarttagged\t!itemtag\empty
\strc_itemgroups_insert_item
\dostoptagged
@@ -1073,17 +1086,24 @@
\fi
\fi
\strc_itemgroups_start_item[#reference]%
- \groupedcommand\strc_itemgroups_start_head_indeed\strc_itemgroups_stop_head_indeed}
+ \pickupgroupedcommand
+ \strc_itemgroups_start_head_indeed
+ \strc_itemgroups_stop_head_indeed
+ \strc_itemgroups_head_body_indeed}
\unexpanded\def\stopitemgrouphead
- {\stopitemgroupitem}
+ {\dostoptagged
+ \stopitemgroupitem}
\unexpanded\def\strc_itemgroups_start_head_indeed
{\settrue\c_strc_itemgroups_head
+ \dotagsetitem\s!head% % weird place
+ \dostarttagged\t!itemhead\empty
\useitemgroupstyleandcolor\c!headstyle\c!headcolor\ignorespaces}
\unexpanded\def\strc_itemgroups_stop_head_indeed
{\removeunwantedspaces
+ \dostoptagged
\ifconditional\c_strc_itemgroups_text
\space
\ignorespaces
@@ -1092,7 +1112,10 @@
\fi
\strc_itemgroups_insert_breakno
\ifconditional\c_strc_itemgroups_pack\else\strc_itemgroups_after_head_command\fi
- \strc_itemgroups_insert_breakno
+ \strc_itemgroups_insert_breakno}
+
+\unexpanded\def\strc_itemgroups_head_body_indeed
+ {\dostarttagged\t!itembody\empty
\noindentation}
% Simple commands.
@@ -1110,30 +1133,37 @@
\unexpanded\def\strc_itemgroups_start_symbol#text%
{\def\strc_itemgroups_extra_symbol{#text}%
\settrue\c_strc_itemgroups_symbol
- \startitemgroupitem}
+ \startitemgroupitem
+ \dotagsetitem\s!symbol}
\unexpanded\def\strc_itemgroups_start_dummy
{\strc_itemgroups_start_symbol\strut\strut} % two ?
\unexpanded\def\strc_itemgroups_start_subitem
{\settrue\c_strc_itemgroups_sub
- \startitemgroupitem}
+ \startitemgroupitem
+ \dotagsetitem\s!sub}
\unexpanded\def\strc_itemgroups_start_edge#text%
{\strc_itemgroups_start_symbol
{\strc_itemgroups_calculate_list_width\c_strc_itemgroups_nesting
+ \dostarttagged\t!ignore\empty % for the moment, maybe an attribute
\hbox to \d_strc_itemgroups_list_width
- {#text\hskip\itemgroupparameter\c!distance}}}
+ {#text\hskip\itemgroupparameter\c!distance}%
+ \dostoptagged}}
\unexpanded\def\strc_itemgroups_start_margin#text%
{\def\strc_itemgroups_margin_symbol % brrr
- {\llap
+ {\dostarttagged\t!ignore\empty % for the moment, maybe an attribute
+ \llap
{\begingroup
\useitemgroupstyleandcolor\c!marstyle\c!marcolor
#text% keep em/ex local
\endgroup
- \hskip\dimexpr\leftskip+\leftmargindistance\relax}}%
- \startitemgroupitem}
+ \hskip\dimexpr\leftskip+\leftmargindistance\relax}%
+ \dostoptagged}%
+ \startitemgroupitem
+ \dotagsetitem\s!margin}
\unexpanded\def\strc_itemgroups_start_text#text%
{\def\strc_itemgroups_extra_symbol{#text}%
@@ -1207,7 +1237,9 @@
{\dontcomplain
\hbox to \d_strc_itemgroups_list_width
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\strc_itemgroups_left_sym_filler
\box\b_strc_itemgroups % can already have a forced widt, only factor handled here
@@ -1217,7 +1249,9 @@
\def\strc_itemgroups_handle_lapped_item_negative
{\llap
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hskip\leftmargindistance}}
@@ -1225,7 +1259,9 @@
\def\strc_itemgroups_handle_groups_text_item
{\hbox
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
+\enspace
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hskip\interwordspace}%
@@ -1234,7 +1270,9 @@
\def\strc_itemgroups_handle_groups_inline_item
{\hbox to \d_strc_itemgroups_list_width
{\ifconditional\c_strc_itemgroups_sub
+ \dostarttagged\t!ignore\empty
\llap{+\enspace}%
+ \dostoptagged
\fi
\box\b_strc_itemgroups
\hss}} % was: \hfill
@@ -1270,8 +1308,8 @@
\setfalse\c_strc_itemgroups_symbol}
\def\strc_itemgroups_make_fitting_box
- {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\scaledpoint\relax % brr, sp
- \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\number\wd\b_strc_itemgroups}}%
+ {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\relax
+ \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\the\wd\b_strc_itemgroups}}%
\fi
\ifdim\d_strc_itemgroups_max_width>\zeropoint
\setbox\b_strc_itemgroups\simplealignedbox
@@ -1524,7 +1562,7 @@
{\strc_itemgroups_item_alone}
\def\strc_itemgroups_item_alone[#category]#text\par
- {\doifsomethingelse{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
+ {\doifelsesomething{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
\startitem#text\stopitem
\stopitemgroup}
@@ -1532,7 +1570,7 @@
{\strc_itemgroups_head_alone}
\def\strc_itemgroups_head_alone[#category]#head\par#body\par
- {\doifsomethingelse{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
+ {\doifelsesomething{#category}{\startitemgroup[#category]}{\startitemgroup[\v!itemize]}%
\starthead{#head}#body\stophead
\stopitemgroup}
diff --git a/tex/context/base/strc-lab.mkiv b/tex/context/base/strc-lab.mkiv
index ce4cdcc5e..ac49941aa 100644
--- a/tex/context/base/strc-lab.mkiv
+++ b/tex/context/base/strc-lab.mkiv
@@ -58,10 +58,15 @@
{\normalexpanded{\defineconstruction[#1][#3][\s!handler=\v!label,\c!level=#2]}%
\setevalue{\??label#1:\s!parent}{\??label#3}}%
\ifconditional\c_strc_constructions_define_commands
- \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
- \setuevalue{\c!reset#1}{\strc_labels_reset {#1}{\number#2}}% obsolete
- %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
- \setuevalue {#1}{\strc_labels_command{#1}}%
+ \setuevalue{\e!next #1}{\strc_labels_next {#1}{\number#2}}% obsolete
+ \setuevalue{\v!reset #1}{\strc_labels_reset {#1}{\number#2}}% obsolete % should be \e!reset anyway
+ %setuevalue{\c!set #1}{\strc_labels_set {#1}{\number#2}}% obsolete
+ \ifcsname\v!current#1\endcsname
+ % we play safe
+ \else
+ \setuevalue{\v!current#1}{\strc_labels_current{#1}}% % obsolete % should be \e!current anyway
+ \fi
+ \setuevalue {#1}{\strc_labels_command{#1}}%
\fi}
% todo: \strc_labels_command for user
@@ -96,16 +101,19 @@
\ifx\p_counter\empty %
\let\p_counter\currentlabel
\fi
- \doifcounterelse\p_counter\donothing{\strc_labels_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_labels_define_counter\p_counter}%
\letlabelparameter\s!counter\p_counter
\to \everydefinelabel
\let\p_strc_constructions_title \empty
\let\p_strc_constructions_number\empty
+\newconditional\c_strc_constructions_number_keep
+
\setvalue{\??constructioninitializer\v!label}%
{\let\currentlabel \currentconstruction
\let\constructionparameter \labelparameter
+ \let\constructionnamespace \??label
\let\detokenizedconstructionparameter\detokenizedlabelparameter
\let\letconstructionparameter \letlabelparameter
\let\useconstructionstyleandcolor \uselabelstyleandcolor
@@ -117,7 +125,9 @@
\iftrialtypesetting
\strc_counters_save\currentconstructionnumber
\fi
- \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \ifconditional\c_strc_constructions_number_keep \else
+ \strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
+ \fi
\else
\setfalse\c_strc_constructions_number_state
\fi
@@ -137,11 +147,12 @@
%D Interfaces:
-\let\strc_labels_command\strc_descriptions_command
+\unexpanded\def\strc_labels_command{\setfalse\c_strc_constructions_number_keep\strc_descriptions_command}
+\unexpanded\def\strc_labels_current{\settrue \c_strc_constructions_number_keep\strc_descriptions_command}
-\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
-\unexpanded\def\strc_labels_reset{\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
-%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_next {\strc_constructions_next_indeed \namedlabelparameter} % #1#2
+\unexpanded\def\strc_labels_reset {\strc_constructions_reset_indeed\namedlabelparameter} % #1#2
+%unexpanded\def\strc_labels_set {\strc_constructions_set_indeed \namedlabelparameter} % #1#2
% similar to enumerations
diff --git a/tex/context/base/strc-lev.lua b/tex/context/base/strc-lev.lua
index 947889e1e..d7ffd6af4 100644
--- a/tex/context/base/strc-lev.lua
+++ b/tex/context/base/strc-lev.lua
@@ -8,26 +8,30 @@ if not modules then modules = { } end modules ['strc-lev'] = {
local insert, remove = table.insert, table.remove
-local context = context
-local commands = commands
+local context = context
+local interfaces = interfaces
-local sections = structures.sections
-local default = interfaces.variables.default
+local sections = structures.sections
+local implement = interfaces.implement
-sections.levels = sections.levels or { }
+local v_default = interfaces.variables.default
-local level, levels, categories = 0, sections.levels, { }
+sections.levels = sections.levels or { }
-storage.register("structures/sections/levels", levels, "structures.sections.levels")
+local level = 0
+local levels = sections.levels
+local categories = { }
local f_two_colon = string.formatters["%s:%s"]
-function commands.definesectionlevels(category,list)
+storage.register("structures/sections/levels", levels, "structures.sections.levels")
+
+local function definesectionlevels(category,list)
levels[category] = utilities.parsers.settings_to_array(list)
end
-function commands.startsectionlevel(category)
- category = category ~= "" and category or default
+local function startsectionlevel(category)
+ category = category ~= "" and category or v_default
level = level + 1
local lc = levels[category]
if not lc or level > #lc then
@@ -38,7 +42,7 @@ function commands.startsectionlevel(category)
insert(categories,category)
end
-function commands.stopsectionlevel()
+local function stopsectionlevel()
local category = remove(categories)
if category then
local lc = levels[category]
@@ -52,3 +56,20 @@ function commands.stopsectionlevel()
-- error
end
end
+
+implement {
+ name = "definesectionlevels",
+ actions = definesectionlevels,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "startsectionlevel",
+ actions = startsectionlevel,
+ arguments = "string"
+}
+
+implement {
+ name = "stopsectionlevel",
+ actions = stopsectionlevel,
+}
diff --git a/tex/context/base/strc-lev.mkvi b/tex/context/base/strc-lev.mkvi
index b3ec2a2d5..dae3f28e3 100644
--- a/tex/context/base/strc-lev.mkvi
+++ b/tex/context/base/strc-lev.mkvi
@@ -26,17 +26,17 @@
\unexpanded\def\definesectionlevels{\dodoubleargument\strc_levels_define}
\unexpanded\def\startsectionlevel {\dosingleempty \strc_levels_start }
-\unexpanded\def\strc_levels_define[#category][#list]{\ctxcommand{definesectionlevels("#category","#list")}}
-\unexpanded\def\strc_levels_start [#category]{\ctxcommand{startsectionlevel("#category")}}
-\unexpanded\def\stopsectionlevel {\ctxcommand{stopsectionlevel()}}
+\unexpanded\def\strc_levels_define[#category][#list]{\clf_definesectionlevels{#category}{#list}}
+\unexpanded\def\strc_levels_start [#category]{\clf_startsectionlevel{#category}}
+\unexpanded\def\stopsectionlevel {\clf_stopsectionlevel}
\unexpanded\def\nostarthead{\dotripleargument\strc_levels_start_nop} % used at the lua end
\unexpanded\def\nostophead {\dosingleargument\strc_levels_stop_nop } % used at the lua end
\unexpanded\def\strc_levels_start[#category]%
- {\doifassignmentelse{#category}
- {\ctxcommand{startsectionlevel("\v!default")}[#category]}
- {\ctxcommand{startsectionlevel("#category")}}}
+ {\doifelseassignment{#category}
+ {\clf_startsectionlevel{\v!default}[#category]}
+ {\clf_startsectionlevel{#category}}}
\unexpanded\def\strc_levels_start_nop[#category][#settings][#userdata]%
{\blank
diff --git a/tex/context/base/strc-lnt.mkvi b/tex/context/base/strc-lnt.mkvi
index f84521002..ee7d5dd88 100644
--- a/tex/context/base/strc-lnt.mkvi
+++ b/tex/context/base/strc-lnt.mkvi
@@ -92,10 +92,10 @@
\def\page_lines_in_to {\in[lr:e:\currentlinenotereference]}
\unexpanded\def\strc_linenotes_range_normal#1% order
- {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ {\doifelsereferencefound{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
\ifconditional\c_page_lines_current_from
\xdef\m_page_lines_current_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \doifelsereferencefound{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
\ifconditional\c_page_lines_current_to
\xdef\m_page_lines_current_to{\currentreferencelinenumber}%
\page_lines_in_from
@@ -111,10 +111,10 @@
\fi}
\unexpanded\def\strc_linenotes_range_sparse#1% order
- {\doifreferencefoundelse{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
+ {\doifelsereferencefound{lr:b:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_from
\ifconditional\c_page_lines_current_from
\xdef\m_page_lines_current_from{\currentreferencelinenumber}%
- \doifreferencefoundelse{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
+ \doifelsereferencefound{lr:e:\currentlinenotereference}\settrue\setfalse\c_page_lines_current_to
\ifconditional\c_page_lines_current_to
\xdef\m_page_lines_current_to{\currentreferencelinenumber}%
\ifx\m_page_lines_previous_from\m_page_lines_current_from
diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua
index d86368b6a..fd79bbd7a 100644
--- a/tex/context/base/strc-lst.lua
+++ b/tex/context/base/strc-lst.lua
@@ -15,12 +15,16 @@ if not modules then modules = { } end modules ['strc-lst'] = {
--
-- move more to commands
-local format, gmatch, gsub = string.format, string.gmatch, string.gsub
-local tonumber = tonumber
-local concat, insert, remove = table.concat, table.insert, table.remove
+local tonumber, type = tonumber, type
+local concat, insert, remove, sort = table.concat, table.insert, table.remove, table.sort
local lpegmatch = lpeg.match
-local simple_hash_to_string, settings_to_hash = utilities.parsers.simple_hash_to_string, utilities.parsers.settings_to_hash
-local allocate, checked = utilities.storage.allocate, utilities.storage.checked
+
+local setmetatableindex = table.setmetatableindex
+local sortedkeys = table.sortedkeys
+
+local settings_to_set = utilities.parsers.settings_to_set
+local allocate = utilities.storage.allocate
+local checked = utilities.storage.checked
local trace_lists = false trackers.register("structures.lists", function(v) trace_lists = v end)
@@ -28,44 +32,72 @@ local report_lists = logs.reporter("structure","lists")
local context = context
local commands = commands
-
-local texgetcount = tex.getcount
+local implement = interfaces.implement
local structures = structures
local lists = structures.lists
local sections = structures.sections
local helpers = structures.helpers
local documents = structures.documents
-local pages = structures.pages
local tags = structures.tags
+local counters = structures.counters
local references = structures.references
local collected = allocate()
local tobesaved = allocate()
local cached = allocate()
local pushed = allocate()
+local kinds = allocate()
+local names = allocate()
lists.collected = collected
lists.tobesaved = tobesaved
lists.enhancers = lists.enhancers or { }
-lists.internals = allocate(lists.internals or { }) -- to be checked
+-----.internals = allocate(lists.internals or { }) -- to be checked
lists.ordered = allocate(lists.ordered or { }) -- to be checked
lists.cached = cached
lists.pushed = pushed
+lists.kinds = kinds
+lists.names = names
+
+local sorters = sorters
+local sortstripper = sorters.strip
+local sortsplitter = sorters.splitters.utf
+local sortcomparer = sorters.comparers.basic
local sectionblocks = allocate()
lists.sectionblocks = sectionblocks
references.specials = references.specials or { }
-local variables = interfaces.variables
local matchingtilldepth = sections.matchingtilldepth
local numberatdepth = sections.numberatdepth
+local getsectionlevel = sections.getlevel
+local typesetnumber = sections.typesetnumber
+local autosectiondepth = sections.autodepth
--- -- -- -- -- --
+local variables = interfaces.variables
-local function zerostrippedconcat(t,separator) -- for the moment not public
+local v_all = variables.all
+local v_reference = variables.reference
+local v_title = variables.title
+local v_number = variables.reference
+local v_command = variables.command
+local v_text = variables.text
+local v_current = variables.current
+local v_previous = variables.previous
+local v_next = variables.next
+local v_intro = variables.intro
+local v_here = variables.here
+local v_component = variables.component
+local v_reference = variables.reference
+local v_local = variables["local"]
+local v_default = variables.default
+
+-- for the moment not public --
+
+local function zerostrippedconcat(t,separator)
local f, l = 1, #t
for i=f,l do
if t[i] == 0 then
@@ -85,10 +117,11 @@ end
local function initializer()
-- create a cross reference between internal references
-- and list entries
- local collected = lists.collected
- local internals = checked(references.internals)
- local ordered = lists.ordered
- local blockdone = { }
+ local collected = lists.collected
+ local internals = checked(references.internals)
+ local ordered = lists.ordered
+ local usedinternals = references.usedinternals
+ local blockdone = { }
for i=1,#collected do
local c = collected[i]
local m = c.metadata
@@ -99,6 +132,7 @@ local function initializer()
local internal = r.internal
if internal then
internals[internal] = c
+ usedinternals[internal] = r.used
end
local block = r.block
if block and not blockdone[block] then
@@ -107,7 +141,8 @@ local function initializer()
end
end
-- access by order in list
- local kind, name = m.kind, m.name
+ local kind = m.kind
+ local name = m.name
if kind and name then
local ok = ordered[kind]
if ok then
@@ -120,6 +155,12 @@ local function initializer()
else
ordered[kind] = { [name] = { c } }
end
+ kinds[kind] = true
+ names[name] = true
+ elseif kind then
+ kinds[kind] = true
+ elseif name then
+ names[name] = true
end
end
if r then
@@ -128,9 +169,24 @@ local function initializer()
end
end
-job.register('structures.lists.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ local usedviews = references.usedviews
+ for i=1,#tobesaved do
+ local r = tobesaved[i].references
+ if r then
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
+end
+
+job.register('structures.lists.collected', tobesaved, initializer, finalizer)
-local groupindices = table.setmetatableindex("table")
+local groupindices = setmetatableindex("table")
function lists.groupindex(name,group)
local groupindex = groupindices[name]
@@ -139,15 +195,24 @@ end
-- we could use t (as hash key) in order to check for dup entries
-function lists.addto(t)
- local m = t.metadata
- local u = t.userdata
- if u and type(u) == "string" then
- t.userdata = helpers.touserdata(u) -- nicer at the tex end
- end
+function lists.addto(t) -- maybe more more here (saves parsing at the tex end)
+ local metadata = t.metadata
+ local userdata = t.userdata
local numberdata = t.numberdata
+ if userdata and type(userdata) == "string" then
+ t.userdata = helpers.touserdata(userdata)
+ end
+ if not metadata.level then
+ metadata.level = structures.sections.currentlevel() -- this is not used so it will go away
+ end
+ if numberdata then
+ local numbers = numberdata.numbers
+ if type(numbers) == "string" then
+ numberdata.numbers = counters.compact(numbers,nil,true)
+ end
+ end
local group = numberdata and numberdata.group
- local name = m.name
+ local name = metadata.name
if not group then
-- forget about it
elseif group == "" then
@@ -158,7 +223,14 @@ function lists.addto(t)
numberdata.numbers = cached[groupindex].numberdata.numbers
end
end
+ local setcomponent = references.setcomponent
+ if setcomponent then
+ setcomponent(t) -- can be inlined
+ end
local r = t.references
+ if r and not r.section then
+ r.section = structures.sections.currentid()
+ end
local i = r and r.internal or 0 -- brrr
local p = pushed[i]
if not p then
@@ -167,10 +239,6 @@ function lists.addto(t)
pushed[i] = p
r.listindex = p
end
- local setcomponent = references.setcomponent
- if setcomponent then
- setcomponent(t) -- might move to the tex end
- end
if group then
groupindices[name][group] = p
end
@@ -204,6 +272,11 @@ end
local enhanced = { }
+local synchronizepage = function(r) -- bah ... will move
+ synchronizepage = references.synchronizepage
+ return synchronizepage(r)
+end
+
function lists.enhance(n)
local l = cached[n]
if not l then
@@ -220,7 +293,7 @@ function lists.enhance(n)
-- save in the right order (happens at shipout)
lists.tobesaved[#lists.tobesaved+1] = l
-- default enhancer (cross referencing)
- references.realpage = texgetcount("realpageno")
+ synchronizepage(references)
-- tags
local kind = metadata.kind
local name = metadata.name
@@ -250,51 +323,88 @@ end
local nesting = { }
function lists.pushnesting(i)
- local parent = lists.result[i]
- local name = parent.metadata.name
+ local parent = lists.result[i]
+ local name = parent.metadata.name
local numberdata = parent and parent.numberdata
- local numbers = numberdata and numberdata.numbers
- local number = numbers and numbers[sections.getlevel(name)] or 0
- insert(nesting, { number = number, name = name, result = lists.result, parent = parent })
+ local numbers = numberdata and numberdata.numbers
+ local number = numbers and numbers[getsectionlevel(name)] or 0
+ insert(nesting, {
+ number = number,
+ name = name,
+ result = lists.result,
+ parent = parent
+ })
end
function lists.popnesting()
local old = remove(nesting)
- lists.result = old.result
+ if old then
+ lists.result = old.result
+ else
+ report_lists("nesting error")
+ end
end
--- will be split
-
-- Historically we had blocks but in the mkiv approach that could as well be a level
-- which would simplify things a bit.
-local splitter = lpeg.splitat(":")
+local splitter = lpeg.splitat(":") -- maybe also :: or have a block parameter
--- this will become filtercollected(specification) and then we'll also have sectionblock as key
-
-local sorters = {
- [variables.command] = function(a,b)
+local listsorters = {
+ [v_command] = function(a,b)
if a.metadata.kind == "command" or b.metadata.kind == "command" then
return a.references.internal < b.references.internal
else
return a.references.order < b.references.order
end
end,
- [variables.all] = function(a,b)
+ [v_all] = function(a,b)
return a.references.internal < b.references.internal
end,
+ [v_title] = function(a,b)
+ local da = a.titledata
+ local db = b.titledata
+ if da and db then
+ local ta = da.title
+ local tb = db.title
+ if ta and tb then
+ local sa = da.split
+ if not sa then
+ sa = sortsplitter(sortstripper(ta))
+ da.split = sa
+ end
+ local sb = db.split
+ if not sb then
+ sb = sortsplitter(sortstripper(tb))
+ db.split = sb
+ end
+ return sortcomparer(da,db) == -1
+ end
+ end
+ return a.references.internal < b.references.internal
+ end
}
--- some day soon we will pass a table .. also split the function
+-- was: names, criterium, number, collected, forced, nested, sortorder
+
+local filters = setmetatableindex(function(t,k) return t[v_default] end)
-local function filtercollected(names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string
- local numbers, depth = documents.data.numbers, documents.data.depth
- local result, nofresult, detail = { }, 0, nil
- local block = false -- all
- criterium = gsub(criterium or ""," ","") -- not needed
- -- new, will be applied stepwise
+local function filtercollected(specification)
+ --
+ local names = specification.names or { }
+ local criterium = specification.criterium or v_default
+ local number = 0 -- specification.number
+ local reference = specification.reference or ""
+ local collected = specification.collected or lists.collected
+ local forced = specification.forced or { }
+ local nested = specification.nested or false
+ local sortorder = specification.sortorder or specification.order
+ --
+ local numbers = documents.data.numbers
+ local depth = documents.data.depth
+ local block = false -- all
local wantedblock, wantedcriterium = lpegmatch(splitter,criterium) -- block:criterium
- if wantedblock == "" or wantedblock == variables.all or wantedblock == variables.text then
+ if wantedblock == "" or wantedblock == v_all or wantedblock == v_text then
criterium = wantedcriterium ~= "" and wantedcriterium or criterium
elseif not wantedcriterium then
block = documents.data.block
@@ -304,236 +414,393 @@ local function filtercollected(names, criterium, number, collected, forced, nest
if block == "" then
block = false
end
--- print(">>",block,criterium)
- --
- forced = forced or { } -- todo: also on other branched, for the moment only needed for bookmarks
if type(names) == "string" then
- names = settings_to_hash(names)
+ names = settings_to_set(names)
end
- local all = not next(names) or names[variables.all] or false
+ local all = not next(names) or names[v_all] or false
+ --
+ specification.names = names
+ specification.criterium = criterium
+ specification.number = 0 -- obsolete
+ specification.reference = reference -- new
+ specification.collected = collected
+ specification.forced = forced -- todo: also on other branched, for the moment only needed for bookmarks
+ specification.nested = nested
+ specification.sortorder = sortorder
+ specification.numbers = numbers
+ specification.depth = depth
+ specification.block = block
+ specification.all = all
+ --
if trace_lists then
- report_lists("filtering names %a, criterium %a, block %a, number %a",names,criterium,block or "*",number)
+ report_lists("filtering names %,t, criterium %a, block %a",sortedkeys(names), criterium, block or "*")
end
- if criterium == variables.intro then
- -- special case, no structure yet
- for i=1,#collected do
- local v = collected[i]
+ local result = filters[criterium](specification)
+ if trace_lists then
+ report_lists("criterium %a, block %a, found %a",specification.criterium, specification.block or "*", #result)
+ end
+ --
+ if sortorder then -- experiment
+ local sorter = listsorters[sortorder]
+ if sorter then
+ if trace_lists then
+ report_lists("sorting list using method %a",sortorder)
+ end
+ for i=1,#result do
+ result[i].references.order = i
+ end
+ sort(result,sorter)
+ end
+ end
+ --
+ return result
+end
+
+filters[v_intro] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local all = specification.all
+ local names = specification.names
+ for i=1,#collected do
+ local v = collected[i]
+ local metadata = v.metadata
+ if metadata and (all or names[metadata.name or false]) then
local r = v.references
if r and r.section == 0 then
nofresult = nofresult + 1
result[nofresult] = v
end
end
- elseif all or criterium == variables.all or criterium == variables.text then
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or block == r.block) then
- local metadata = v.metadata
- if metadata then
- local name = metadata.name or false
- local sectionnumber = (r.section == 0) or sections.collected[r.section]
- if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
- end
- end
- end
- elseif criterium == variables.current then
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
- else
+ end
+ return result
+end
+
+filters[v_reference] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local names = specification.names
+ local sections = sections.collected
+ local reference = specification.reference
+ if reference ~= "" then
+ local prefix, rest = lpegmatch(references.prefixsplitter,reference) -- p::r
+ local r = prefix and rest and references.derived[prefix][rest] or references.derived[""][reference]
+ local s = r and r.numberdata -- table ref !
+ if s then
+ local depth = getsectionlevel(r.metadata.name)
+ local numbers = s.numbers
for i=1,#collected do
local v = collected[i]
local r = v.references
if r and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then
- local ok = true
- for d=1,depth do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
+ local metadata = v.metadata
+ if metadata and names[metadata.name or false] then
+ local sectionnumber = (r.section == 0) or sections[r.section]
+ if sectionnumber then
+ if matchingtilldepth(depth,numbers,sectionnumber.numbers) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
end
end
end
end
- end
- elseif criterium == variables.here then
- -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
else
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r then -- and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
- local ok = true
- for d=1,depth do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
- end
- end
- end
+ report_lists("unknown reference %a specified",reference)
+ end
+ else
+ report_lists("no reference specified")
+ end
+ return result
+end
+
+filters[v_all] = function(specification)
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local block = specification.block
+ local all = specification.all
+ local forced = specification.forced
+ local names = specification.names
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local metadata = v.metadata
+ if metadata then
+ local name = metadata.name or false
+ local sectionnumber = (r.section == 0) or sections[r.section]
+ if forced[name] or (sectionnumber and not metadata.nolist and (all or names[name])) then -- and not sectionnumber.hidenumber then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
end
end
- elseif criterium == variables.previous then
- if depth == 0 then
- return filtercollected(names,variables.intro,number,collected,forced,false,sortorder)
- else
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or block == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then -- and not sectionnumber.hidenumber then
- local cnumbers = sectionnumber.numbers
- local metadata = v.metadata
- if cnumbers then
- if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
- local ok = true
- for d=1,depth-1 do
- local cnd = cnumbers[d]
- if not (cnd == 0 or cnd == numbers[d]) then
- ok = false
- break
- end
- end
- if ok then
- nofresult = nofresult + 1
- result[nofresult] = v
- end
+ end
+ return result
+end
+
+filters[v_text] = filters[v_all]
+
+filters[v_current] = function(specification)
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local depth = specification.depth
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers > depth then
+ local ok = true
+ for d=1,depth do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
end
end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
end
end
end
end
- elseif criterium == variables["local"] then -- not yet ok
- local nested = nesting[#nesting]
- if nested then
- return filtercollected(names,nested.name,nested.number,collected,forced,nested,sortorder)
- elseif sections.autodepth(documents.data.numbers) == 0 then
- return filtercollected(names,variables.all,number,collected,forced,false,sortorder)
- else
- return filtercollected(names,variables.current,number,collected,forced,false,sortorder)
- end
- elseif criterium == variables.component then
- -- special case, no structure yet
- local component = resolvers.jobs.currentcomponent() or ""
- if component ~= "" then
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- local m = v.metadata
- if r and r.component == component and (m and names[m.name] or all) then
- nofresult = nofresult + 1
- result[nofresult] = v
+ end
+ return result
+end
+
+filters[v_here] = function(specification)
+ -- this is quite dirty ... as cnumbers is not sparse we can misuse #cnumbers
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local depth = specification.depth
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r then -- and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
+ local ok = true
+ for d=1,depth do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
+ end
+ end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ end
end
end
end
- else -- sectionname, number
- -- not the same as register
- local depth = sections.getlevel(criterium)
- local number = tonumber(number) or numberatdepth(depth) or 0
- if trace_lists then
- local t = sections.numbers()
- detail = format("depth %s, number %s, numbers %s, startset %s",depth,number,(#t>0 and concat(t,".",1,depth)) or "?",#collected)
- end
- if number > 0 then
- local pnumbers = nil
- local pblock = block
- local parent = nested and nested.parent
- if parent then
- pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested
- pblock = parent.references.block or pblock
- end
- for i=1,#collected do
- local v = collected[i]
- local r = v.references
- if r and (not block or not r.block or pblock == r.block) then
- local sectionnumber = sections.collected[r.section]
- if sectionnumber then
- local metadata = v.metadata
- local cnumbers = sectionnumber.numbers
- if cnumbers then
- if (all or names[metadata.name or false]) and #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then
- nofresult = nofresult + 1
- result[nofresult] = v
+ end
+ return result
+end
+
+filters[v_previous] = function(specification)
+ if specification.depth == 0 then
+ specification.nested = false
+ specification.criterium = v_intro
+ return filters[v_intro](specification)
+ end
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local block = specification.block
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ local depth = specification.depth
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or block == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then -- and not sectionnumber.hidenumber then
+ local cnumbers = sectionnumber.numbers
+ local metadata = v.metadata
+ if cnumbers then
+ if metadata and not metadata.nolist and (all or names[metadata.name or false]) and #cnumbers >= depth then
+ local ok = true
+ for d=1,depth-1 do
+ local cnd = cnumbers[d]
+ if not (cnd == 0 or cnd == numbers[d]) then
+ ok = false
+ break
end
end
+ if ok then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
end
end
end
end
end
- if trace_lists then
- report_lists("criterium %a, block %a, found %a, detail %a",criterium,block or "*",#result,detail)
+ return result
+end
+
+filters[v_local] = function(specification)
+ local numbers = specification.numbers
+ local nested = nesting[#nesting]
+ if nested then
+ return filtercollected {
+ names = specification.names,
+ criterium = nested.name,
+ collected = specification.collected,
+ forced = specification.forced,
+ nested = nested,
+ sortorder = specification.sortorder,
+ }
+ else
+ specification.criterium = autosectiondepth(numbers) == 0 and v_all or v_current
+ specification.nested = false
+ return filtercollected(specification) -- rechecks, so better (for determining all)
end
+end
- if sortorder then -- experiment
- local sorter = sorters[sortorder]
- if sorter then
- if trace_lists then
- report_lists("sorting list using method %a",sortorder)
- end
- for i=1,#result do
- result[i].references.order = i
+
+filters[v_component] = function(specification)
+ -- special case, no structure yet
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ local all = specification.all
+ local names = specification.names
+ local component = resolvers.jobs.currentcomponent() or ""
+ if component ~= "" then
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ local m = v.metadata
+ if r and r.component == component and (m and names[m.name] or all) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
end
- table.sort(result,sorter)
end
end
-
return result
end
-lists.filtercollected = filtercollected
+-- local number = tonumber(number) or numberatdepth(depth) or 0
+-- if number > 0 then
+-- ...
+-- end
-function lists.filter(specification)
- return filtercollected(
- specification.names,
- specification.criterium,
- specification.number,
- lists.collected,
- specification.forced,
- false,
- specification.order
- )
+filters[v_default] = function(specification) -- is named
+ local collected = specification.collected
+ local result = { }
+ local nofresult = #result
+ ----- depth = specification.depth
+ local block = specification.block
+ local criterium = specification.criterium
+ local all = specification.all
+ local names = specification.names
+ local numbers = specification.numbers
+ local sections = sections.collected
+ local reference = specification.reference
+ local nested = specification.nested
+ --
+ if reference then
+ reference = tonumber(reference)
+ end
+ --
+ local depth = getsectionlevel(criterium)
+ local pnumbers = nil
+ local pblock = block
+ local parent = nested and nested.parent
+ --
+ if parent then
+ pnumbers = parent.numberdata.numbers or pnumbers -- so local as well as nested
+ pblock = parent.references.block or pblock
+ if trace_lists then
+ report_lists("filtering by block %a and section %a",pblock,criterium)
+ end
+ end
+ --
+ for i=1,#collected do
+ local v = collected[i]
+ local r = v.references
+ if r and (not block or not r.block or pblock == r.block) then
+ local sectionnumber = sections[r.section]
+ if sectionnumber then
+ local metadata = v.metadata
+ local cnumbers = sectionnumber.numbers
+ if cnumbers then
+ if all or names[metadata.name or false] then
+ if reference then
+ -- filter by number
+ if reference == cnumbers[depth] then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ else
+ if #cnumbers >= depth and matchingtilldepth(depth,cnumbers,pnumbers) then
+ nofresult = nofresult + 1
+ result[nofresult] = v
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return result
end
+-- names, criterium, number, collected, forced, nested, sortorder) -- names is hash or string
+
+lists.filter = filtercollected
+
lists.result = { }
+function lists.getresult(r)
+ return lists.result[r]
+end
+
function lists.process(specification)
- lists.result = lists.filter(specification)
- local specials = utilities.parsers.settings_to_hash(specification.extras or "")
+ lists.result = filtercollected(specification)
+ local specials = settings_to_set(specification.extras or "")
specials = next(specials) and specials or nil
for i=1,#lists.result do
local r = lists.result[i]
@@ -544,7 +811,7 @@ function lists.process(specification)
end
function lists.analyze(specification)
- lists.result = lists.filter(specification)
+ lists.result = filtercollected(specification)
end
function lists.userdata(name,r,tag) -- to tex (todo: xml)
@@ -584,7 +851,7 @@ function lists.sectionnumber(name,n,spec)
local data = lists.result[n]
local sectiondata = sections.collected[data.references.section]
-- hm, prefixnumber?
- sections.typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too
+ typesetnumber(sectiondata,"prefix",spec,sectiondata) -- data happens to contain the spec too
end
-- some basics (todo: helpers for pages)
@@ -661,18 +928,18 @@ function lists.number(name,n,spec)
if data then
local numberdata = data.numberdata
if numberdata then
- sections.typesetnumber(numberdata,"number",spec or false,numberdata or false)
+ typesetnumber(numberdata,"number",spec or false,numberdata or false)
end
end
end
-function lists.prefixednumber(name,n,prefixspec,numberspec)
+function lists.prefixednumber(name,n,prefixspec,numberspec,forceddata)
local data = lists.result[n]
if data then
helpers.prefix(data,prefixspec)
- local numberdata = data.numberdata
+ local numberdata = data.numberdata or forceddata
if numberdata then
- sections.typesetnumber(numberdata,"number",numberspec or false,numberdata or false)
+ typesetnumber(numberdata,"number",numberspec or false,numberdata or false)
end
end
end
@@ -701,29 +968,175 @@ end
-- interface (maybe strclistpush etc)
-commands.pushlist = lists.pushnesting
-commands.poplist = lists.popnesting
-commands.enhancelist = lists.enhance
-commands.processlist = lists.process
-commands.analyzelist = lists.analyze
-commands.listtitle = lists.title
-commands.listprefixednumber = lists.prefixednumber
-commands.listprefixedpage = lists.prefixedpage
+if not lists.reordered then
+ function lists.reordered(data)
+ return data.numberdata
+ end
+end
+
+implement { name = "pushlist", actions = lists.pushnesting, arguments = "integer" }
+implement { name = "poplist", actions = lists.popnesting }
+
+implement {
+ name = "addtolist",
+ actions = { lists.addto, context },
+ arguments = {
+ {
+ { "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "section", "integer" },
+ { "location" },
+ { "prefix" },
+ { "reference" },
+ { "order", "integer" },
+ }
+ },
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "level", "integer" },
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" },
+ { "setup" },
+ }
+ },
+ { "userdata" },
+ { "titledata", {
+ { "label" },
+ { "title" },
+ { "bookmark" },
+ { "marking" },
+ { "list" },
+ }
+ },
+ { "prefixdata", {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ }
+ },
+ { "numberdata", {
+ { "numbers" },
+ { "groupsuffix" },
+ { "group" },
+ { "counter" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+ }
+ }
+}
+
+implement {
+ name = "enhancelist",
+ actions = lists.enhance,
+ arguments = "integer"
+}
+implement {
+ name = "processlist",
+ actions = lists.process,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ { "extras" },
+ { "order" },
+ }
+ }
+}
-function commands.addtolist (...) context(lists.addto (...)) end
-function commands.listsize (...) context(lists.size (...)) end
-function commands.listlocation (...) context(lists.location (...)) end
-function commands.listlabel (...) context(lists.label (...)) end
-function commands.listrealpage (...) context(lists.realpage (...)) end
-function commands.listgroupindex (...) context(lists.groupindex(...)) end
+implement {
+ name = "analyzelist",
+ actions = lists.analyze,
+ arguments = {
+ {
+ { "names" },
+ { "criterium" },
+ { "reference" },
+ }
+ }
+}
-function commands.currentsectiontolist()
- context(lists.addto(sections.current()))
-end
+implement {
+ name = "listtitle",
+ actions = lists.title,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "listprefixednumber",
+ actions = lists.prefixednumber,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+}
+
+implement {
+ name = "listprefixedpage",
+ actions = lists.prefixedpage,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "prefix" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ }
+ }
+}
+
+implement { name = "listsize", actions = { lists.size, context } }
+implement { name = "listlocation", actions = { lists.location, context }, arguments = "integer" }
+implement { name = "listlabel", actions = { lists.label, context }, arguments = { "integer", "string" } }
+implement { name = "listrealpage", actions = { lists.realpage, context }, arguments = { "string", "integer" } }
+implement { name = "listgroupindex", actions = { lists.groupindex, context }, arguments = { "string", "string" } }
+
+implement {
+ name = "currentsectiontolist",
+ actions = { sections.current, lists.addto, context }
+}
-function commands.listuserdata(...)
- local str, metadata = lists.userdata(...)
+local function userdata(name,r,tag)
+ local str, metadata = lists.userdata(name,r,tag)
if str then
-- local catcodes = metadata and metadata.catcodes
-- if catcodes then
@@ -735,25 +1148,21 @@ function commands.listuserdata(...)
end
end
+implement {
+ name = "listuserdata",
+ actions = userdata,
+ arguments = { "string", "integer", "string" }
+}
+
-- we could also set variables .. names will change (when this module is done)
-- maybe strc_lists_savedtitle etc
-function commands.doiflisthastitleelse (...) commands.doifelse(lists.hastitledata (...)) end
-function commands.doiflisthaspageelse (...) commands.doifelse(lists.haspagedata (...)) end
-function commands.doiflisthasnumberelse(...) commands.doifelse(lists.hasnumberdata(...)) end
-function commands.doiflisthasentry (n) commands.doifelse(lists.iscached (n )) end
+implement { name = "doifelselisthastitle", actions = { lists.hastitledata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthaspage", actions = { lists.haspagedata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthasnumber", actions = { lists.hasnumberdata, commands.doifelse }, arguments = { "string", "integer" } }
+implement { name = "doifelselisthasentry", actions = { lists.iscached, commands.doifelse }, arguments = { "integer" } }
-function commands.savedlistnumber(name,n)
- local data = cached[tonumber(n)]
- if data then
- local numberdata = data.numberdata
- if numberdata then
- sections.typesetnumber(numberdata,"number",numberdata or false)
- end
- end
-end
-
-function commands.savedlisttitle(name,n,tag)
+local function savedlisttitle(name,n,tag)
local data = cached[tonumber(n)]
if data then
local titledata = data.titledata
@@ -763,39 +1172,56 @@ function commands.savedlisttitle(name,n,tag)
end
end
--- function commands.savedlistprefixednumber(name,n)
--- local data = cached[tonumber(n)]
--- if data then
--- local numberdata = data.numberdata
--- if numberdata then
--- helpers.prefix(data,data.prefixdata)
--- sections.typesetnumber(numberdata,"number",numberdata or false)
--- end
--- end
--- end
-
-if not lists.reordered then
- function lists.reordered(data)
- return data.numberdata
+local function savedlistnumber(name,n)
+ local data = cached[tonumber(n)]
+ if data then
+ local numberdata = data.numberdata
+ if numberdata then
+ typesetnumber(numberdata,"number",numberdata or false)
+ end
end
end
-function commands.savedlistprefixednumber(name,n)
+local function savedlistprefixednumber(name,n)
local data = cached[tonumber(n)]
if data then
local numberdata = lists.reordered(data)
if numberdata then
helpers.prefix(data,data.prefixdata)
- sections.typesetnumber(numberdata,"number",numberdata or false)
+ typesetnumber(numberdata,"number",numberdata or false)
end
end
end
-commands.discardfromlist = lists.discard
+lists.savedlisttitle = savedlisttitle
+lists.savedlistnumber = savedlistnumber
+lists.savedlistprefixednumber = savedlistprefixednumber
--- new and experimental and therefore off by default
+implement {
+ name = "savedlistnumber",
+ actions = savedlistnumber,
+ arguments = { "string", "integer" }
+}
-local sort, setmetatableindex = table.sort, table.setmetatableindex
+implement {
+ name = "savedlisttitle",
+ actions = savedlisttitle,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "savedlistprefixednumber",
+ actions = savedlistprefixednumber,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "discardfromlist",
+ actions = lists.discard,
+ arguments = { "integer" }
+}
+
+-- new and experimental and therefore off by default
lists.autoreorder = false -- true
diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi
index 63c3e030a..71fc09829 100644
--- a/tex/context/base/strc-lst.mkvi
+++ b/tex/context/base/strc-lst.mkvi
@@ -47,7 +47,7 @@
\c!state=\v!start,
\c!coupling=\v!off,
\c!criterium=\v!local,
- \c!number=\zerocount,
+ \c!reference=,% was number which was sort of obsolete
\c!width=3\emwidth,
%\c!maxwidth=,
\c!distance=\zeropoint,
@@ -94,6 +94,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflistelse\doifelselist
+
%D Regular list entries are bound to a specific location in order to
%D get the right pagenumber etc.\ associated. When pushing something
%D inbetween (in mkiv) it ends up directtly in the list. This is the
@@ -125,37 +127,48 @@
\def\strc_lists_inject_nop[#dummya][#dummyb]%
{\endgroup}
-\def\strc_lists_inject_yes[#settings][#userdata]%
+\unexpanded\def\strc_lists_inject_enhance#listindex#internal%
+ {\normalexpanded{\ctxlatecommand{enhancelist(#listindex)}}}
+
+\unexpanded\def\strc_lists_inject_yes[#settings][#userdata]% can be used directly
{\setupcurrentlist[\c!type=userdata,\c!location=\v!none,#settings]% grouped (use \let...
\edef\p_location{\listparameter\c!location}%
\setnextinternalreference
- \edef\currentlistnumber{\ctxcommand{addtolist{
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock", % handy for lists, like bibl
- section = structures.sections.currentid(),
- % location = "\p_location",
- },
- metadata = {
- kind = "\listparameter\c!type",
- name = "\currentlist",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- },
- userdata = \!!bs\detokenize{#userdata}\!!es % will be converted to table at the lua end
- }}}%
+ \scratchcounter\clf_addtolist
+ references {
+ internal \nextinternalreference
+ % block {\currentsectionblock}
+ % section structures.sections.currentid()
+ % location {\p_location}
+ }
+ metadata {
+ kind {\listparameter\c!type}
+ name {\currentlist}
+ % level structures.sections.currentlevel()
+ catcodes \catcodetable
+ }
+ userdata {\detokenize\expandafter{\normalexpanded{#userdata}}}
+ \relax
+ \edef\currentlistnumber{\the\scratchcounter}%
\ifx\p_location\v!here
% this branch injects nodes !
- \expanded{\ctxlatecommand{enhancelist(\currentlistnumber)}}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}% will change
- \xdef\currentstructurelistattribute{\number\lastdestinationattribute}%
+ \strc_lists_inject_enhance{\currentlistnumber}{\nextinternalreference}%
+ \clf_setinternalreference internal \nextinternalreference\relax % this will change
+ \xdef\currentstructurelistattribute{\the\lastdestinationattribute}%
\dontleavehmode\hbox attr \destinationattribute \lastdestinationattribute{}% todo
\else
% and this one doesn't
- \ctxcommand{enhancelist(\currentlistnumber)}%
+ \clf_enhancelist\currentlistnumber\relax
\fi
\endgroup}
+% todo: make like \strc_references_direct_full_user ... with {}{}{}
+
+\unexpanded\def\strc_lists_inject_direct[#tag]% [#settings][#userdata]
+ {\begingroup
+ \edef\currentlist{#tag}%
+ \strc_lists_inject_yes} % [#settings][#userdata]
+
\unexpanded\def\writebetweenlist{\dodoubleempty \strc_lists_write_between}
\unexpanded\def\writedatatolist {\dotripleargument\strc_lists_write_data_to}
\unexpanded\def\writetolist {\dodoubleempty \strc_lists_write_to}
@@ -244,7 +257,7 @@
\strc_lists_place_current % maybe inline
{#list}%
{\listparameter\c!criterium}%
- {\listparameter\c!number}%
+ {\listparameter\c!reference}%
{\listparameter\c!extras}%
{\listparameter\c!order}%
% \stoplistreferences
@@ -288,7 +301,7 @@
{\dotripleempty\strc_lists_combined_define}
\def\strc_lists_combined_define[#tag][#list][#settings]%
- {\definelist[#tag][\c!criterium=\v!local,\c!number=0,\c!alternative=,\c!list={#list},#settings]% inherits from root
+ {\definelist[#tag][\c!criterium=\v!local,\c!reference=,\c!alternative=,\c!list={#list},#settings]% inherits from root
\setvalue{\e!setup #tag\e!endsetup}{\dodoubleempty\strc_lists_combined_setup [#tag]}%
\setvalue{\e!place #tag}{\dodoubleempty\strc_lists_combined_place [#tag]}%
\setvalue{\e!complete #tag}{\dodoubleempty\strc_lists_combined_complete[#tag]}}
@@ -324,42 +337,46 @@
\def\currentstructurelistnumber{0} % injection
\def\currentlistmethod {entry} % typesetting
-\def\currentlistindex {0} % typesetting
+\def\currentlistindex {0} % typesetting (maybe also a real counter)
+
+\unexpanded\def\savedlistnumber #1#2{\clf_savedlistnumber {#1}\numexpr#2\relax}
+\unexpanded\def\savedlisttitle #1#2{\clf_savedlisttitle {#1}\numexpr#2\relax}
+\unexpanded\def\savedlistprefixednumber#1#2{\clf_savedlistprefixednumber{#1}\numexpr#2\relax}
\def\structurelistlocation
- {\ctxcommand{listlocation(\currentlistindex)}}
+ {\clf_listlocation\numexpr\currentlistindex\relax}
\def\structurelistrealpagenumber
- {\ctxcommand{listrealpage("\currentlist",\currentlistindex)}}
+ {\clf_listrealpage{\currentlist}\numexpr\currentlistindex\relax}
\unexpanded\def\structurelistpagenumber
{\dostarttagged\t!listpage\empty
- \ctxcommand{listprefixedpage(
- "\currentlist",
- \currentlistindex,
- {
- separatorset = "\listparameter\c!pageprefixseparatorset",
- conversionset = "\listparameter\c!pageprefixconversionset",
- set = "\listparameter\c!pageprefixset",
- segments = "\listparameter\c!pageprefixsegments",
- connector = \!!bs\listparameter\c!pageprefixconnector\!!es,
- },
- {
- prefix = "\listparameter\c!pageprefix",
- conversionset = "\listparameter\c!pageconversionset",
- starter = \!!bs\listparameter\c!pagestarter\!!es,
- stopper = \!!bs\listparameter\c!pagestopper\!!es,
+ \clf_listprefixedpage
+ {\currentlist}
+ \currentlistindex
+ {
+ separatorset {\listparameter\c!pageprefixseparatorset}
+ conversionset {\listparameter\c!pageprefixconversionset}
+ set {\listparameter\c!pageprefixset}
+ segments {\listparameter\c!pageprefixsegments}
+ connector {\listparameter\c!pageprefixconnector}
+ }
+ {
+ prefix {\listparameter\c!pageprefix}
+ conversionset {\listparameter\c!pageconversionset}
+ starter {\listparameter\c!pagestarter}
+ stopper {\listparameter\c!pagestopper}
}
- )}%
+ \relax
\dostoptagged}
\unexpanded\def\structurelistuservariable#name%
{\dostarttagged\t!listdata{#name}%
- \ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}%
+ \clf_listuserdata{\currentlist}\currentlistindex{#name}%
\dostoptagged}
\def\rawstructurelistuservariable#name%
- {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}}
+ {\clf_listuserdata{\currentlist}\currentlistindex{#name}}
\unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s!
\unexpanded\def\structurelistsecond{\structurelistuservariable\s!second} % s!
@@ -367,63 +384,70 @@
\def\rawstructurelistfirst {\rawstructurelistuservariable\s!first } % s! % was \unexpanded
\def\rawstructurelistsecond{\rawstructurelistuservariable\s!second} % s! % was \unexpanded
-\unexpanded\def\doifstructurelisthaspageelse
- {\ctxcommand{doiflisthaspageelse("\currentlist",\currentlistindex)}}
+\unexpanded\def\doifelsestructurelisthaspage
+ {\clf_doifelselisthaspage{\currentlist}\numexpr\currentlistindex\relax}
-\unexpanded\def\doifstructurelisthasnumberelse
- {\ctxcommand{doiflisthasnumberelse("\currentlist",\currentlistindex)}}
+\unexpanded\def\doifelsestructurelisthasnumber
+ {\clf_doifelselisthasnumber{\currentlist}\numexpr\currentlistindex\relax}
+
+\let\doifstructurelisthaspageelse \doifelsestructurelisthaspage
+\let\doifstructurelisthasnumberelse\doifelsestructurelisthasnumber
\unexpanded\def\structurelistgenerictitle
{\dostarttagged\t!listcontent\empty
- \ctxcommand{listtitle("\currentlist",\currentlistindex)}%
+ \clf_listtitle{\currentlist}\currentlistindex\relax
\dostoptagged}
\unexpanded\def\structurelistgenericnumber % tricky, we need to delay tagging as we have nested lua calls
{\dostarttagged\t!listtag\empty
- \ctxcommand{listprefixednumber("\currentlist",\currentlistindex, {
- prefix = "\listparameter\c!prefix",
- separatorset = "\listparameter\c!prefixseparatorset",
- conversionset = "\listparameter\c!prefixconversionset",
- starter = \!!bs\listparameter\c!prefixstarter\!!es,
- stopper = \!!bs\listparameter\c!prefixstopper\!!es,
- set = "\listparameter\c!prefixset",
- segments = "\listparameter\c!prefixsegments",
- connector = \!!bs\listparameter\c!prefixconnector\!!es,
- },
- {
- separatorset = "\listparameter\c!numberseparatorset",
- conversionset = "\listparameter\c!numberconversionset",
- starter = \!!bs\listparameter\c!numberstarter\!!es,
- stopper = \!!bs\listparameter\c!numberstopper\!!es,
- segments = "\listparameter\c!numbersegments",
- } )}%
+ \clf_listprefixednumber
+ {\currentlist}%
+ \currentlistindex
+ {%
+ prefix {\listparameter\c!prefix}%
+ separatorset {\listparameter\c!prefixseparatorset}%
+ conversionset {\listparameter\c!prefixconversionset}%
+ starter {\listparameter\c!prefixstarter}%
+ stopper {\listparameter\c!prefixstopper}%
+ set {\listparameter\c!prefixset}%
+ segments {\listparameter\c!prefixsegments}%
+ connector {\listparameter\c!prefixconnector}%
+ }%
+ {%
+ separatorset {\listparameter\c!numberseparatorset}%
+ conversionset {\listparameter\c!numberconversionset}%
+ starter {\listparameter\c!numberstarter}%
+ stopper {\listparameter\c!numberstopper}%
+ segments {\listparameter\c!numbersegments}%
+ }%
+ \relax
\dostoptagged}
% TODO: pass extra tag name (contents, figures, bibliography ...)
-\unexpanded\def\strc_lists_place_current#list#criterium#number#extras#order% beware, not a user command
- {\dostarttagged\t!list\empty
- \ctxcommand{processlist{
- names = "#list",
- criterium = "#criterium",
- number = "#number",
- extras = "#extras",
- order = "#order"
- }}%
+\unexpanded\def\strc_lists_place_current#list#criterium#reference#extras#order% beware, not a user command
+ {\dostarttaggedchained\t!list\empty\??list
+ \clf_processlist
+ names {#list}
+ criterium {#criterium}
+ reference {#reference}
+ extras {#extras}
+ order {#order}
+ \relax
\dostoptagged}
-\unexpanded\def\strc_lists_analyze#list#criterium#number%
- {\ctxcommand{analyzelist{
- names = "#list",
- criterium = "#criterium",
- number = "#number"
- }}}
+\unexpanded\def\strc_lists_analyze#list#criterium#reference%
+ {\clf_analyzelist
+ names {#list}
+ criterium {#criterium}
+ reference {#reference}
+ \relax}
-\def\firststructureelementinlist#list%
- {\ctxcommand{firstinset("#list")}}
+\def\firststructureelementinlist#list% expandable
+ {\clf_firstinset{#list}}
\def\structurelistsize
- {\ctxcommand{listsize()}}
+ {\clf_listsize}
%D Depending on what kind of list we have (e.g.\ a section related one)
%D processors can be defined.
@@ -451,14 +475,12 @@
\def\usestructurelistprocessor#tag%
{\csname\??structurelistprocessor#tag\endcsname}
-\unexpanded\def\strclistsentryprocess#tag#method#index#extra% This one is called at the lua end!
- {\ctxcommand{pushlist(#index)}%
- \edef\currentlist {#tag}%
- \edef\currentlistmethod{#method}%
- \edef\currentlistindex {#index}%
- \edef\currentlistextra {#extra}%
- \listextraparameter\c!before
+\let\dotaglistlocation\relax
+
+\def\strc_lists_entry_process % assume things to be set up
+ {\listextraparameter\c!before
\dostarttagged\t!listitem\currentlist
+ \dotaglistlocation
\csname\??structurelistprocessor
\ifcsname\??structurelistprocessor\currentlist:\currentlistmethod\endcsname\currentlist:\currentlistmethod\else
\ifcsname\??structurelistprocessor\currentlistmethod \endcsname\currentlistmethod \else
@@ -466,8 +488,16 @@
\s!default \fi\fi\fi
\endcsname
\dostoptagged
- \listextraparameter\c!after
- \ctxcommand{poplist()}}
+ \listextraparameter\c!after}
+
+\unexpanded\def\strclistsentryprocess#tag#method#index#extra% This one is called at the lua end!
+ {\clf_pushlist#index\relax
+ \edef\currentlist {#tag}%
+ \edef\currentlistmethod{#method}%
+ \edef\currentlistindex {#index}%
+ \edef\currentlistextra {#extra}%
+ \strc_lists_entry_process
+ \clf_poplist}
% lists that have a number/title are kind of generic and can share code
@@ -481,7 +511,7 @@
\strc_lists_apply_renderingsetup}
\installstructurelistprocessor\s!command
- {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"\s!command")}}
+ {\clf_listuserdata{\currentlist}\currentlistindex{\s!command}}
\installstructurelistprocessor{section}
{\let\currentlistentrynumber \structurelistgenericnumber
@@ -498,7 +528,7 @@
% example of usage elsewhere:
%
% \installstructcurelistprocessor{pubs:userdata}
-% {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"bibref")}}
+% {\clf_listuserdata{\currentlist}\currentlistindex{bibref}}
%D List symbols are used in interactive documents where no numbers
%D are used but nevertheless structure is present. Beware, the list
@@ -520,7 +550,7 @@
\def\strc_lists_assign_dimen#dimension#key#default%
{\edef\m_strc_list_dimen{\listparameter#key}%
- \doifinsetelse\m_strc_list_dimen{\v!fit,\v!broad}{#dimension#default}{#dimension\m_strc_list_dimen}\relax}
+ \doifelseinset\m_strc_list_dimen{\v!fit,\v!broad}{#dimension#default}{#dimension\m_strc_list_dimen}\relax}
\definesymbol[\v!list][\v!none ][\strc_lists_symbol_none]
\definesymbol[\v!list][\v!one ][\strc_lists_symbol_one]
@@ -531,7 +561,7 @@
\unexpanded\def\currentlistsymbol
{\edef\p_symbol{\listparameter\c!symbol}%
- \doifinsymbolsetelse\v!list\p_symbol
+ \doifelseinsymbolset\v!list\p_symbol
{\directsymbol\v!list\p_symbol}
{\directsymbol\v!list\s!default}}
@@ -589,7 +619,7 @@
{\currentlistentrynumber}
\setvalue{\??listsymbollabels\v!yes}% auto (use value stored in tuc file)
- {\edef\currentlistlabel{\ctxcommand{listlabel(\currentlistindex,"\currentlistlabel")}}%
+ {\edef\currentlistlabel{\clf_listlabel\currentlistindex{\currentlistlabel}}%
\leftlabeltext\currentlistlabel
\listparameter\c!starter
\currentlistentrynumber
@@ -700,7 +730,7 @@
\let\p_method\v!command
\fi
\normalexpanded{\definelistalternative[#tag][\p_method]}[\c!command=\strc_lists_placement_command]%
- \doifnextbgroupelse
+ \doifelsenextbgroup
{\strc_lists_define_placement_yes{#tag}}
{\strc_lists_define_placement_nop{#tag}}}
@@ -747,7 +777,7 @@
% better is to use a special list entry but we keep this for compatibility
\let\\=\space
% so expanding this token register has to come *after* the font switch
- \dontconvertfont
+ \dontconvertfont % (**) this has to become an option (see publ)
\to \t_lists_every_renderingtext
\appendtoks
@@ -764,7 +794,7 @@
\settrue\c_lists_has_page
\settrue\c_lists_show_page
\else
- \doifstructurelisthaspageelse\settrue\setfalse\c_lists_has_page
+ \doifelsestructurelisthaspage\settrue\setfalse\c_lists_has_page
\ifx\p_pagenumber\v!yes
\settrue\c_lists_show_page
\else
@@ -777,7 +807,7 @@
\settrue\c_lists_has_number
\settrue\c_lists_show_number
\else
- \doifstructurelisthasnumberelse\settrue\setfalse\c_lists_has_number
+ \doifelsestructurelisthasnumber\settrue\setfalse\c_lists_has_number
\ifx\p_headnumber\v!yes
\settrue\c_lists_show_number
\else
@@ -818,6 +848,8 @@
% todo: provide packager via attributes
+\doinstallinjector\s!list
+
\installcorenamespace{listalternativemethods} % the general wrapper of a rendering
\startsetups[\??listrenderings:none]
@@ -845,12 +877,15 @@
% \stopsetups
\startsetups[\??listrenderings:generic]
+ \typo_injectors_check_list
\listparameter\c!before % can be \hskip
\edef\p_command{\listalternativeparameter\c!command}
\ifx\p_command\empty
\listalternativeparameter\c!before
\vbox {
\forgetall
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all {
\ifconditional\c_lists_show_number
% \ifconditional\c_lists_has_page
@@ -878,6 +913,8 @@
}
\listalternativeparameter\c!after
\else
+ \noindent % otherwise annotations are mirrored up
+\typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
}
@@ -889,6 +926,8 @@
\startsetups[\??listrenderings:abc]
\endgraf % are we grouped?
+ \typo_injectors_check_list
+ % \advance % yes or no ... \rightskip is also honored
\leftskip\listparameter\c!margin % after \endgraf !
\listparameter\c!before
\endgraf
@@ -907,7 +946,8 @@
\scratchwidth\p_width
\fi
\fi\fi
- \noindent
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\setlocalhsize
\hsize\localhsize
@@ -917,11 +957,19 @@
\scratchhsize\hsize
\ifconditional\c_lists_has_number
\ifconditional\c_lists_show_number
- \setbox\b_strc_lists_number\hbox \strc_lists_get_reference_attribute\v!number \ifdim\scratchwidth>\zeropoint to \scratchwidth \fi {
- \strc_lists_set_style_color\c!numberstyle\c!numbercolor\v!number
- \listparameter\c!numbercommand\currentlistsymbol
- \hfill
- }
+ \setbox\b_strc_lists_number
+% \hbox
+% \strc_lists_get_reference_attribute\v!number
+% \ifdim\scratchwidth>\zeropoint to \scratchwidth \fi
+ \simplealignedboxplus
+ \scratchwidth
+ {\listparameter\c!numberalign}
+ {\strc_lists_get_reference_attribute\v!number}
+ {
+ \strc_lists_set_style_color\c!numberstyle\c!numbercolor\v!number
+ \listparameter\c!numbercommand\currentlistsymbol
+% \hfill
+ }
\else
\setbox\b_strc_lists_number\emptyhbox
\fi
@@ -951,6 +999,8 @@
\hsize\scratchhsize
\usealignparameter\listparameter
\ifdim\scratchwidth<\hsize
+ % we have leftskip so we'd better just skip back instead of messing
+ % with hang*
\edef\p_hang{\listparameter\c!hang}
\hangindent\dimexpr\wd\b_strc_lists_number+\scratchdistance\relax
\hangafter\ifx\p_hang\v!no\zerocount\else\plusone\fi
@@ -963,7 +1013,7 @@
\scratchdistance\zeropoint
\fi
\parindent\zeropoint
- \dontleavehmode
+ \dontleavehmode % this nils hang: i need to figure out why
% % topaligned
%
% \scratchdimen\wd\b_strc_lists_number
@@ -1049,7 +1099,7 @@
\listparameter\c!numbercommand\currentlistsymbol
\listparameter\c!right
\endgroup
- \kern.5em
+ \kern.5\emwidth\relax
\nobreak
\fi
\fi
@@ -1068,7 +1118,7 @@
\ifconditional\c_lists_has_page
\ifconditional\c_lists_show_page
\nobreak
- \hskip.75em\relax
+ \hskip.75\emwidth\relax
\nobreak
\strc_lists_set_reference_attribute\v!pagenumber
\strc_lists_set_style_color\c!pagestyle\c!pagecolor\v!pagenumber
@@ -1088,7 +1138,9 @@
\stopsetups
\startsetups[\??listrenderings:e]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\letlistparameter\c!depth\zeropoint
\letlistparameter\c!color\empty
@@ -1108,7 +1160,9 @@
\stopsetups
\startsetups[\??listrenderings:f]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\dosetraggedhbox{\listparameter\c!align}%
\raggedbox {
@@ -1126,7 +1180,9 @@
\stopsetups
\startsetups[\??listrenderings:g]
- \noindent
+ \typo_injectors_check_list
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
\hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
\midaligned {
\strc_lists_set_style_color\c!style\c!color\v!all
@@ -1147,19 +1203,115 @@
\definelistalternative
[\v!interactive]
- [\c!renderingsetup=\??listrenderings:interactive]
+ [\c!renderingsetup=\??listrenderings:interactive,
+ \c!before=\endgraf, % new per 2014-11-08
+ \c!after=\endgraf] % new per 2014-11-08
\startsetups[\??listrenderings:interactive]
- \edef\p_command{\listalternativeparameter\c!command}%
- \ifx\p_command\empty
- [\currentlist: \currentlistentrynumber\space -- \currentlistentrytitle\space -- \currentlistentrypagenumber]%
- \else
- \listparameter\c!before
- \hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
- \p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
- }
- \listparameter\c!after
- \fi
+ \edef\p_command{\listalternativeparameter\c!command}%
+ \typo_injectors_check_list
+ \listparameter\c!before
+ \noindent % otherwise annotations are mirrored up
+ \typo_injectors_mark_list
+ \hbox \strc_lists_get_reference_attribute\v!all \strc_lists_get_destination_attribute {
+ \ifx\p_command\empty
+ [
+ \currentlist:\space
+ \currentlistentrynumber
+ \space\emdash\space
+ \currentlistentrytitle
+ \space\emdash\space
+ \currentlistentrypagenumber
+ ]
+ \else
+ \p_command\currentlistentrynumber\currentlistentrytitle\currentlistentrypagenumber
+ \fi
+ }
+ \listparameter\c!after
+\stopsetups
+
+%D One special for publications (as Alan loves to hangindent). No fonts and
+%D such (for now). No interaction either as that is dealt with elsewhere.
+%D
+%D \currentlistsymbol
+%D \currentlistentry
+%D \currentlistentrypagenumber % not really used
+
+\definelistalternative
+ [\v!paragraph]
+ [\c!filler=\hskip.25\emwidth,
+ \c!renderingsetup=\??listrenderings:\v!paragraph]
+
+\startsetups[\??listrenderings:\v!paragraph]
+ \endgraf % are we grouped?
+ \typo_injectors_check_list % ?
+ \listparameter\c!before
+ \endgraf
+ \begingroup
+ \forgetall
+ \noindent
+ \parindent\zeropoint
+ \edef\p_width{\listparameter\c!width}%
+ \edef\p_distance{\listparameter\c!distance}% we are nice for bib users
+ \edef\p_margin{\listparameter\c!margin}% we are nice for bib users
+ \ifx\p_distance\v!none
+ \scratchdistance\zeropoint
+ \else
+ \scratchdistance\p_distance
+ \fi
+ \ifx\p_margin\v!none
+ \scratchoffset\zeropoint
+ \else
+ \scratchoffset\p_margin
+ \fi
+ \ifx\p_width\v!fit
+ \scratchwidth\zeropoint
+ \leftskip\scratchoffset
+ \else
+ \scratchwidth\p_width
+ \ifdim\scratchoffset=\zeropoint
+ \leftskip\dimexpr\scratchwidth+\scratchdistance\relax
+ \else
+ \leftskip\scratchoffset
+ \fi
+ \fi
+ \usealignparameter\listparameter
+ \hskip-\leftskip
+ \ifconditional\c_lists_has_number
+ \ifconditional\c_lists_show_number
+ \setbox\scratchbox
+ \simplealignedbox\scratchwidth{\listparameter\c!numberalign}
+ \bgroup
+ \useliststyleandcolor\c!numberstyle\c!numbercolor
+ \currentlistsymbol
+ \egroup
+ \ifdim\wd\scratchbox>\zeropoint
+ \box\scratchbox
+ \hskip\scratchdistance\relax
+ \fi
+ \fi
+ \fi
+ \begingroup
+ \useliststyleandcolor\c!textstyle\c!textcolor
+ \setstrut
+ \begstrut
+ \currentlistentrytitle
+ \endstrut
+ \endgroup
+ \ifconditional\c_lists_has_page
+ \ifconditional\c_lists_show_page
+ \nobreak
+ \listalternativeparameter\c!filler\relax
+ \begingroup
+ \useliststyleandcolor\c!pagestyle\c!pagecolor
+ \currentlistentrypagenumber
+ \endgroup
+ \fi
+ \fi
+ \endgraf
+ \endgroup
+ \allowbreak
+ \listparameter\c!after
\stopsetups
%D List elements are packaged in such a way that we can click on them
@@ -1302,7 +1454,7 @@
\let\listlength\!!zerocount
\else
\setupcurrentlist[#settings]%
- \strc_lists_analyze{#list}{\listparameter\c!criterium}{\listparameter\c!number}%
+ \strc_lists_analyze{#list}{\listparameter\c!criterium}{\listparameter\c!reference}%
\normalexpanded{\endgroup\noexpand\edef\noexpand\listlength{\structurelistsize}}%
\fi
\strc_lists_set_mode}
diff --git a/tex/context/base/strc-mar.lua b/tex/context/base/strc-mar.lua
index b3a6e8f35..3af9113bf 100644
--- a/tex/context/base/strc-mar.lua
+++ b/tex/context/base/strc-mar.lua
@@ -10,23 +10,37 @@ if not modules then modules = { } end modules ['strc-mar'] = {
-- todo: only commands.* print to tex, native marks return values
local insert, concat = table.insert, table.concat
-local tostring, next, rawget = tostring, next, rawget
+local tostring, next, rawget, type = tostring, next, rawget, type
local lpegmatch = lpeg.match
local context = context
local commands = commands
+local implement = interfaces.implement
+
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
-local traversenodes = nodes.traverse
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local getbox = nuts.getbox
+
+local traversenodes = nuts.traverse
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local texsetattribute = tex.setattribute
-local texgetbox = tex.getbox
local a_marks = attributes.private("structure","marks")
@@ -106,9 +120,9 @@ end
local function sweep(head,first,last)
for n in traversenodes(head) do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local a = n[a_marks]
+ local a = getattr(n,a_marks)
if not a then
-- next
elseif first == 0 then
@@ -118,7 +132,7 @@ local function sweep(head,first,last)
end
elseif id == hlist_code or id == vlist_code then
if boxes_too then
- local a = n[a_marks]
+ local a = getattr(n,a_marks)
if not a then
-- next
elseif first == 0 then
@@ -127,7 +141,7 @@ local function sweep(head,first,last)
last = a
end
end
- local list = n.list
+ local list = getlist(n)
if list then
first, last = sweep(list,first,last)
end
@@ -143,9 +157,9 @@ setmetatableindex(classes, function(t,k) local s = settings_to_array(k) t[k] = s
local lasts = { }
function marks.synchronize(class,n,option)
- local box = texgetbox(n)
+ local box = getbox(n)
if box then
- local first, last = sweep(box.list,0,0)
+ local first, last = sweep(getlist(box),0,0)
if option == v_keep and first == 0 and last == 0 then
if trace_marks_get or trace_marks_set then
report_marks("action %a, class %a, box %a","retain at synchronize",class,n)
@@ -204,7 +218,11 @@ local function resolve(t,k)
end
function marks.define(name,settings)
- settings = settings or { }
+ if not settings then
+ settings = { }
+ elseif type(settings) == "string" then
+ settings = { parent = settings }
+ end
data[name] = settings
local parent = settings.parent
if parent == nil or parent == "" or parent == name then
@@ -699,17 +717,17 @@ end
-- interface
-commands.definemarking = marks.define
-commands.relatemarking = marks.relate
-commands.setmarking = marks.set
-commands.resetmarking = marks.reset
-commands.synchronizemarking = marks.synchronize
-commands.getmarking = marks.fetch
-commands.fetchonemark = marks.fetchonemark
-commands.fetchtwomarks = marks.fetchtwomarks
-commands.fetchallmarks = marks.fetchallmarks
-
-function commands.doifelsemarking(str) -- can be shortcut
- commands.doifelse(marks.exists(str))
-end
+implement { name = "markingtitle", actions = marks.title, arguments = { "string", "string" } }
+implement { name = "markingnumber", actions = marks.number, arguments = { "string", "string" } }
+
+implement { name = "definemarking", actions = marks.define, arguments = { "string", "string" } }
+implement { name = "relatemarking", actions = marks.relate, arguments = { "string", "string" } }
+implement { name = "setmarking", actions = marks.set, arguments = { "string", "string" } }
+implement { name = "resetmarking", actions = marks.reset, arguments = { "string" } }
+implement { name = "synchronizemarking", actions = marks.synchronize, arguments = { "string", "integer", "string" } }
+implement { name = "getmarking", actions = marks.fetch, arguments = { "string", "string", "string" } }
+implement { name = "fetchonemark", actions = marks.fetchonemark, arguments = { "string", "string", "string" } }
+implement { name = "fetchtwomarks", actions = marks.fetchtwomarks, arguments = { "string", "string" } }
+implement { name = "fetchallmarks", actions = marks.fetchallmarks, arguments = { "string", "string" } }
+implement { name = "doifelsemarking", actions = { marks.exists, commands.doifelse }, arguments = "string" }
diff --git a/tex/context/base/strc-mar.mkiv b/tex/context/base/strc-mar.mkiv
index 3685b66a7..8bd8c094e 100644
--- a/tex/context/base/strc-mar.mkiv
+++ b/tex/context/base/strc-mar.mkiv
@@ -56,35 +56,31 @@
\unexpanded\def\synchronizemarking{\dotripleargument\strc_markings_synchronize}
\appendtoks
- \ctxcommand{definemarking("\currentmarking",{ parent = "\currentmarkingparent" })}%
+ \clf_definemarking{\currentmarking}{\currentmarkingparent}%
\to \everydefinemarking
\def\strc_markings_relate[#1][#2]%
- {\ctxcommand{relatemarking("#1","#2")}}
+ {\clf_relatemarking{#1}{#2}}
\def\strc_markings_set[#1]#2%
{\ifconditional\inhibitsetmarking
% nothing
\else
\doifelse{\namedmarkingparameter{#1}\c!expansion}\v!yes
- {\ctxcommand{setmarking("#1",\!!bs#2\!!es)}}
- {\ctxcommand{setmarking("#1",\!!bs\detokenize{#2}\!!es)}}%
+ {\clf_setmarking{#1}{#2}}
+ {\clf_setmarking{#1}{\detokenize{#2}}}%
\fi}
\def\strc_markings_reset[#1]%
- {\ctxcommand{resetmarking("#1")}}
+ {\clf_resetmarking{#1}}
\def\strc_markings_synchronize[#1][#2][#3]% #1=class #2=boxnumber (some day also name) #3=options, maybe second argument table
- {\ifvoid#2\else\ctxcommand{synchronizemarking("#1",\number#2,"#3")}\fi}
+ {\ifvoid#2\else\clf_synchronizemarking{#1}#2{#3}\fi}
-% \def\doifelsemarking#1% why no \unexpanded
-% {\ctxcommand{doifelsemarking("#1")}}
+\def\doifelsemarking#1% no \noexpanded
+ {\clf_doifelsemarking{#1}}
-% \def\doifelsemarking#1%
-% {\normalexpanded{\noexpand\ctxcommand{doifelsemarking("\noexpand\detokenize{#1}")}}}
-
-\def\doifelsemarking#1%
- {\ctxcommand{doifelsemarking(\!!bs#1\!!es)}}
+\let\doifmarkingelse \doifelsemarking
% \appendtoks
% \strc_markings_synchronize[\v!page][\normalpagebox][\v!keep]% keep if no marks
@@ -116,25 +112,25 @@
\setsystemmode\v!marking
\the\everymarking
\ifthirdargument
- \ctxcommand{getmarking("#1","#2","#3")}%
+ \clf_getmarking{#1}{#2}{#3}%
\else
- \ctxcommand{getmarking("#1","\v!page","#2")}%
+ \clf_getmarking{#1}{\v!page}{#2}%
\fi
\endgroup}}
% the fetchers are fully expandable: [name][method]
-\def\fetchonemark[#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchonemark ("#1","\v!page","#2")}\fi}
-\def\fetchtwomarks [#1]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchtwomarks("#1","\v!page")}\fi}
-\def\fetchallmarks [#1]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchallmarks("#1","\v!page")}\fi}
+\def\fetchonemark[#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchonemark {#1}{\v!page}{#2}\fi}
+\def\fetchtwomarks [#1]{\ifconditional\inhibitgetmarking\else\clf_fetchtwomarks{#1}{\v!page}\fi}
+\def\fetchallmarks [#1]{\ifconditional\inhibitgetmarking\else\clf_fetchallmarks{#1}{\v!page}\fi}
\let\fetchmark\fetchonemark
% also fully expandable but here we have: [name][range][method]
-\def\fetchonemarking[#1]#2[#3]#4[#5]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchonemark ("#1","#3","#5")}\fi}
-\def\fetchtwomarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchtwomarks("#1","#3")}\fi}
-\def\fetchallmarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\ctxcommand{fetchallmarks("#1","#3")}\fi}
+\def\fetchonemarking[#1]#2[#3]#4[#5]{\ifconditional\inhibitgetmarking\else\clf_fetchonemark {#1}{#3}{#5}\fi}
+\def\fetchtwomarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchtwomarks{#1}{#3}\fi}
+\def\fetchallmarkings [#1]#2[#3]{\ifconditional\inhibitgetmarking\else\clf_fetchallmarks{#1}{#3}\fi}
\let\fetchmarking\fetchonemarking
diff --git a/tex/context/base/strc-mat.lua b/tex/context/base/strc-mat.lua
index 98b1e996c..87f35ed1d 100644
--- a/tex/context/base/strc-mat.lua
+++ b/tex/context/base/strc-mat.lua
@@ -6,28 +6,35 @@ if not modules then modules = { } end modules ['strc-mat'] = {
license = "see context related readme files"
}
+----- copytable = table.copy
+
local structures = structures
-local lists = structures.lists
-local sections = structures.sections
-local floats = structures.floats
-local helpers = structures.helpers
-local formulas = structures.formulas
+local lists = structures.lists
+local sections = structures.sections
+local floats = structures.floats
+local helpers = structures.helpers
+local formulas = structures.formulas -- not used but reserved
-lists.enhancers = lists.enhancers or { }
+----- context = context
+----- simplify = helpers.simplify
-- maybe we want to do clever things with formulas, the store might go away
-local formuladata = { }
-
-function formulas.store(data)
- formuladata[#formuladata+1] = data
- context(#formuladata)
-end
-
-function formulas.current()
- return formuladata[#formuladata]
-end
+-- local formuladata = { }
+--
+-- function formulas.store(data)
+-- formuladata[#formuladata+1] = data
+-- context(#formuladata)
+-- end
+--
+-- function formulas.current()
+-- return formuladata[#formuladata]
+-- end
+
+-- function formulas.simplify(entry)
+-- return simplify(copytable(entry or formuladata[#formuladata]))
+-- end
function helpers.formulanumber(data,spec)
if data then
@@ -38,10 +45,9 @@ function helpers.formulanumber(data,spec)
end
end
-function formulas.simplify(entry)
- return helpers.simplify(table.copy(entry or formuladata[#formuladata]))
-end
-
function lists.formulanumber(name,n,spec)
- helpers.formulanumber(lists.result[n])
+ local result = lists.result
+ if result then
+ helpers.formulanumber(result[n])
+ end
end
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index b9263cdb0..22fa54889 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -360,7 +360,7 @@
\def\strc_formulas_handle_sub_numbering_indeed
{\let\strc_formulas_handle_sub_numbering\relax % else error: see math/numbering-001.tex
- \doiftextelse\currentsubformulasuffix
+ \doifelsetext\currentsubformulasuffix
{\strc_counters_setown_sub\v!formula\plustwo\currentsubformulasuffix}
{\strc_counters_increment_sub\v!formula\plustwo}%
\placecurrentformulanumber}
@@ -528,7 +528,7 @@
\unexpanded\def\strc_formulas_start_formula_indeed[#1][#2]% setting leftskip adaption is slow !
{\bgroup % HERE
\def\currentformula{#1}%
- \dostarttagged\t!formula\currentformula
+ \dostarttaggedchained\t!formula\currentformula\??formula
\the\everybeforedisplayformula
\d_strc_formulas_display_skip_par\parskip\relax
%\formulastrutdp\strutdepth
@@ -564,7 +564,7 @@
\unexpanded\def\strc_formulas_start_formula_nested#1%
{\bgroup
\let\strc_formulas_stop_formula\strc_formulas_stop_formula_nested
- \dostarttagged\t!subformula}
+ \dostarttagged\t!subformula\empty}
\unexpanded\def\strc_formulas_stop_formula_nested
{\dostoptagged
@@ -630,13 +630,13 @@
% \prevdepth-\maxdimen % texbook pagina 79-80
% \fi
% \noindent % else funny hlist with funny baselineskip
-% $$% \Ustartdisplaymath
+% $$% \Ucheckedstartdisplaymath
% \setdisplaydimensions
% \startinnermath}
%
% \unexpanded\def\stopdisplaymath
% {\stopinnermath
-% $$% \Ustopdisplaymath
+% $$% \Ucheckedstopdisplaymath
% \par
% \afterdisplayspace
% \par
@@ -663,13 +663,13 @@
\fi
\fi
\noindent % else funny hlist with funny baselineskip
- $$% \Ustartdisplaymath
+ \Ucheckedstartdisplaymath
\setdisplaydimensions
\startinnermath}
\unexpanded\def\stopdisplaymath
{\stopinnermath
- $$% \Ustopdisplaymath
+ \Ucheckedstopdisplaymath
\par
\ifvmode
\ifcase\c_strc_formulas_space_model
@@ -835,14 +835,14 @@
\def\strc_formulas_place[#1]%
{\def\currentplaceformulareference{#1}%
\let\currentplaceformulasuffix\empty
- \doifnextbgroupelse\strc_formulas_place_yes\strc_formulas_place_nop\strc_formulas_place_nop} % [ref]{}
+ \doifelsenextbgroup\strc_formulas_place_yes\strc_formulas_place_nop\strc_formulas_place_nop} % [ref]{}
\def\strc_formulas_place_yes#1%
{\def\currentplaceformulasuffix{#1}%
\strc_formulas_place_nop}
\def\strc_formulas_place_nop
- {\doifnextcharelse$\strc_formulas_place_pickup\strc_formulas_place_indeed} % [ref]$$ [ref]\start
+ {\doifelsenextchar$\strc_formulas_place_pickup\strc_formulas_place_indeed} % [ref]$$ [ref]\start
\def\strc_formulas_place_indeed
{\strc_formulas_place_numbering}
diff --git a/tex/context/base/strc-not.lua b/tex/context/base/strc-not.lua
index 40b78d59f..71eccf1ce 100644
--- a/tex/context/base/strc-not.lua
+++ b/tex/context/base/strc-not.lua
@@ -38,6 +38,8 @@ local variables = interfaces.variables
local context = context
local commands = commands
+local implement = interfaces.implement
+
-- state: store, insert, postpone
local function store(tag,n)
@@ -69,9 +71,11 @@ end
notes.store = store
-function commands.storenote(tag,n)
- context(store(tag,n))
-end
+implement {
+ name = "storenote",
+ actions = { store, context },
+ arguments = { "string", "integer" }
+}
local function get(tag,n) -- tricky ... only works when defined
local nd = notedata[tag]
@@ -108,9 +112,11 @@ end
notes.listindex = listindex
-function commands.notelistindex(tag,n)
- context(listindex(tag,n))
-end
+implement {
+ name = "notelistindex",
+ actions = { listindex, context },
+ arguments = { "string", "integer" }
+}
local function setstate(tag,newkind)
local state = notestates[tag]
@@ -144,18 +150,28 @@ end
notes.setstate = setstate
notes.getstate = getstate
-commands.setnotestate = setstate
+implement {
+ name = "setnotestate",
+ actions = setstate,
+ arguments = { "string", "string" }
+}
-function commands.getnotestate(tag)
- context(getstate(tag))
-end
+implement {
+ name = "getnotestate",
+ actions = { getstate, context },
+ arguments = "string"
+}
function notes.define(tag,kind,number)
local state = setstate(tag,kind)
state.number = number
end
-commands.definenote = notes.define
+implement {
+ name = "definenote",
+ actions = notes.define,
+ arguments = { "string", "string", "integer" }
+}
function notes.save(tag,newkind)
local state = notestates[tag]
@@ -184,8 +200,8 @@ function notes.restore(tag,forcedstate)
end
end
-commands.savenote = notes.save
-commands.restorenote = notes.restore
+implement { name = "savenote", actions = notes.save, arguments = { "string", "string" } }
+implement { name = "restorenote", actions = notes.restore, arguments = { "string", "string" } }
local function hascontent(tag)
local ok = notestates[tag]
@@ -205,9 +221,11 @@ end
notes.hascontent = hascontent
-function commands.doifnotecontent(tag)
- commands.doif(hascontent(tag))
-end
+implement {
+ name = "doifnotecontent",
+ actions = { hascontent, commands.doif },
+ arguments = "string",
+}
local function internal(tag,n)
local nd = get(tag,n)
@@ -243,9 +261,11 @@ end
notes.doifonsamepageasprevious = onsamepageasprevious
-function commands.doifnoteonsamepageasprevious(tag)
- commands.doifelse(onsamepageasprevious(tag))
-end
+implement {
+ name = "doifnoteonsamepageasprevious",
+ actions = { onsamepageasprevious, commands.doifelse },
+ arguments = "string",
+}
function notes.checkpagechange(tag) -- called before increment !
local nd = notedata[tag] -- can be unset at first entry
@@ -277,7 +297,10 @@ function notes.postpone()
end
end
-commands.postponenotes = notes.postpone
+implement {
+ name = "postponenotes",
+ actions = notes.postpone
+}
function notes.setsymbolpage(tag,n,l)
local l = l or listindex(tag,n)
@@ -297,7 +320,11 @@ function notes.setsymbolpage(tag,n,l)
end
end
-commands.setnotesymbolpage = notes.setsymbolpage
+implement {
+ name = "setnotesymbolpage",
+ actions = notes.setsymbolpage,
+ arguments = { "string", "integer" }
+}
local function getsymbolpage(tag,n)
local li = internal(tag,n)
@@ -351,11 +378,11 @@ notes.getsymbolpage = getsymbolpage
notes.getnumberpage = getnumberpage
notes.getdeltapage = getdeltapage
-function commands.notesymbolpage(tag,n) context(getsymbolpage(tag,n)) end
-function commands.notenumberpage(tag,n) context(getnumberpage(tag,n)) end
-function commands.notedeltapage (tag,n) context(getdeltapage (tag,n)) end
+implement { name = "notesymbolpage", actions = { getsymbolpage, context }, arguments = { "string", "integer" } }
+implement { name = "notenumberpage", actions = { getnumberpage, context }, arguments = { "string", "integer" } }
+implement { name = "notedeltapage", actions = { getdeltapage, context }, arguments = { "string", "integer" } }
-function commands.flushnotes(tag,whatkind,how) -- store and postpone
+local function flushnotes(tag,whatkind,how) -- store and postpone
local state = notestates[tag]
local kind = state.kind
if kind == whatkind then
@@ -411,15 +438,26 @@ function commands.flushnotes(tag,whatkind,how) -- store and postpone
end
end
-function commands.flushpostponednotes()
+local function flushpostponednotes()
if trace_notes then
report_notes("flushing all postponed notes")
end
for tag, _ in next, notestates do
- commands.flushnotes(tag,"postpone")
+ flushnotes(tag,"postpone")
end
end
+implement {
+ name = "flushpostponednotes",
+ actions = flushpostponednotes
+}
+
+implement {
+ name = "flushnotes",
+ actions = flushnotes,
+ arguments = { "string", "string", "string" }
+}
+
function notes.resetpostponed()
if trace_notes then
report_notes("resetting all postponed notes")
@@ -432,13 +470,17 @@ function notes.resetpostponed()
end
end
-function commands.notetitle(tag,n)
- command.savedlisttitle(tag,notedata[tag][n])
-end
+implement {
+ name = "notetitle",
+ actions = function(tag,n) lists.savedlisttitle(tag,notedata[tag][n]) end,
+ arguments = { "string", "integer" }
+}
-function commands.noteprefixednumber(tag,n,spec)
- commands.savedlistprefixednumber(tag,notedata[tag][n])
-end
+implement {
+ name = "noteprefixednumber",
+ actions = function(tag,n) lists.savedlistprefixednumber(tag,notedata[tag][n]) end,
+ arguments = { "string", "integer" }
+}
function notes.internalid(tag,n)
local nd = get(tag,n)
diff --git a/tex/context/base/strc-not.mkvi b/tex/context/base/strc-not.mkvi
index a1aecf83a..25a1072a3 100644
--- a/tex/context/base/strc-not.mkvi
+++ b/tex/context/base/strc-not.mkvi
@@ -170,7 +170,7 @@
\ifx\p_counter\empty %
\let\p_counter\currentnotation
\fi
- \doifcounterelse\p_counter\donothing{\strc_notes_define_counter\p_counter}%
+ \doifelsecounter\p_counter\donothing{\strc_notes_define_counter\p_counter}%
\letnotationparameter\s!counter\p_counter
%\strc_notes_setup_counter\currentnotation
\to \everydefinenotation
@@ -181,6 +181,7 @@
\unexpanded\setvalue{\??constructioninitializer\v!notation}%
{\let\currentnotation \currentconstruction
\let\constructionparameter \notationparameter
+ \let\constructionnamespace \??notation
\let\detokenizedconstructionparameter\detokenizednotationparameter
\let\letconstructionparameter \letnotationparameter
\let\useconstructionstyleandcolor \usenotationstyleandcolor
@@ -231,7 +232,7 @@
\ifnotesenabled
\strc_counters_increment_sub\currentconstructionnumber\currentconstructionlevel
\fi
- \doifnextoptionalelse\strc_notations_command_yes\strc_notations_command_nop}
+ \doifelsenextoptionalcs\strc_notations_command_yes\strc_notations_command_nop}
\unexpanded\def\strc_notations_command_nop#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -240,7 +241,7 @@
\normalexpanded{\endgroup\noteparameter\c!next}}
\unexpanded\def\strc_notations_command_yes[#optional]%
- {\doifassignmentelse{#optional}\strc_notations_command_assignment\strc_notations_command_argument[#optional]}
+ {\doifelseassignment{#optional}\strc_notations_command_assignment\strc_notations_command_argument[#optional]}
\unexpanded\def\strc_notations_command_assignment[#settings]%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference=,\c!title=,\c!bookmark=,\c!list=,#settings][]%
@@ -265,7 +266,7 @@
% \normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
% {\normaldef\noexpand\strc_pickup_yes[##1]##2\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[##1]{##2}}%
% \normaldef\noexpand\strc_pickup_nop ##1\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {##1}}}%
-% \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+% \doifnextoptionalcselse\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start#tag#stoptag%
{\begingroup
@@ -278,7 +279,7 @@
\normalexpanded % not that efficient but also not that frequently used (\normaldef for parser)
{\def\noexpand\strc_pickup_yes[#one]#two\csname\e!stop#stoptag\endcsname{\strc_notations_command_yes[#one]{#two}}%
\def\noexpand\strc_pickup_nop #one\csname\e!stop#stoptag\endcsname{\strc_notations_command_nop {#one}}}%
- \doifnextoptionalelse\strc_pickup_yes\strc_pickup_nop}
+ \doifelsenextoptionalcs\strc_pickup_yes\strc_pickup_nop}
\unexpanded\def\strc_notations_start_yes[#reference]#title%
{\strc_constructions_register[\c!label={\descriptionparameter\c!text},\c!reference={#reference},\c!title={#title},\c!bookmark=,\c!list=][]%
@@ -382,7 +383,7 @@
\appendtoks
\ifx\currentnoteparent\empty
- \doifinsertionelse\currentnote
+ \doifelseinsertion\currentnote
\donothing
{\defineinsertion[\currentnote]% could be an option
\normalexpanded{\t_strc_notes{\the\t_strc_notes\noexpand\strc_notes_process_list{\currentnote}}}}%
@@ -392,7 +393,7 @@
\setexpandednoteparameter\s!insert{\namednoteparameter\currentnoteparent\s!insert}%
\definenotation[\currentnote][\currentnoteparent][\c!type=\v!note]%
\fi
- \ctxcommand{definenote("\currentnote","insert",\number\currentnoteinsertionnumber)}%
+ \clf_definenote{\currentnote}{insert}\currentnoteinsertionnumber\relax
\to \everydefinenote
% maybe we will share this at some point:
@@ -437,7 +438,7 @@
\strc_notes_inject_dummy
\else
\begingroup
- \edef\currentnotenumber{\ctxcommand{storenote("\currentnote",\currentconstructionlistentry)}}%
+ \edef\currentnotenumber{\clf_storenote{\currentnote}\currentconstructionlistentry}%
\settrue\processingnote
\ifconditional\c_strc_notes_skip
\globallet\lastnotesymbol\strc_notes_inject_symbol_nop
@@ -460,7 +461,11 @@
\else\ifconditional\inlocalnotes % todo: per note class
\global\settrue\postponednote
\else
+\ifconditional\c_strc_notes_delayed
+ % probably end notes
+\else
\handlenoteinsert\currentnote\currentnotenumber % either an insert or just delayed
+\fi
\fi\fi
\endgroup
\fi
@@ -515,7 +520,7 @@
\endcsname}
\setvalue{\??noteinteractioninline\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notenumberpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notenumberpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_get_reference_attribute_symbol{attr\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_style_color_inline \strc_notes_set_style_color_inline_yes}
@@ -535,7 +540,7 @@
% page(...) : we could have a dedicated one
\setvalue{\??noteinteractiondisplay\v!yes}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_number{\attribute\referenceattribute\currentreferenceattribute}%
\let \strc_notes_set_reference_attribute_text \donothing
\let \strc_notes_set_destination_attribute_text\donothing
@@ -548,7 +553,7 @@
\let\strc_notes_set_style_color_display \strc_notes_set_style_color_display_nop}
\setvalue{\??noteinteractiondisplay\v!all}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -557,7 +562,7 @@
\let \strc_notes_set_style_color_display \strc_notes_set_style_color_display_yes}
\setvalue{\??noteinteractiondisplay\v!text}%
- {\strc_references_get_simple_page_reference{page(\ctxcommand{notesymbolpage("\currentnote",\currentnotenumber)})}%
+ {\strc_references_get_simple_page_reference{page(\clf_notesymbolpage{\currentnote}\currentnotenumber)}%
\edef\strc_notes_set_reference_attribute_text {\attribute\referenceattribute\currentreferenceattribute}%
%\strc_references_set_simple_page_reference{note:\cldcontext{structures.notes.internalid("\currentnote",\currentnotenumber)}}%
\strc_references_set_simple_page_reference{note:\currentnote:\currentnotenumber}%
@@ -582,7 +587,7 @@
{\iftrialtypesetting
% keep
\else\ifx\currentcolorparameter\empty
- \scratchcounter\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax % todo calculate once
+ \scratchcounter\clf_notedeltapage{\currentnote}\currentnotenumber\relax % todo calculate once
\setlocationcolorspecified\scratchcounter
\fi\fi}
@@ -601,7 +606,7 @@
% in mkii the pointer only showed up in pagewise notes
\unexpanded\def\strc_notes_inject_pointer % todo calculate once
- {\ifcase\ctxcommand{notedeltapage("\currentnote",\currentnotenumber)}\relax
+ {\ifcase\clf_notedeltapage{\currentnote}\currentnotenumber\relax\relax
% unknown
\or
% same page
@@ -634,7 +639,7 @@
\unexpanded\def\strc_notes_inject_symbol_indeed#synchronize%
{\removeunwantedspaces
- \doifitalicelse\/\donothing % Charles IV \footnote{the fourth}
+ \doifelseitalic\/\donothing % Charles IV \footnote{the fourth}
\ifdim\lastkern=\notesignal
% \kern\noteparameter\c!distance % yes or no note font? or main text
\strc_notes_inject_separator
@@ -650,18 +655,18 @@
\hbox \strc_notes_get_reference_attribute_symbol \bgroup
\dostarttagged\t!descriptionsymbol\currentnote
\dotagsetnotesymbol
- \noteparameter\c!textcommand{\ctxcommand{noteprefixednumber("\currentnote",\currentnotenumber)}}%
+ \noteparameter\c!textcommand{\clf_noteprefixednumber{\currentnote}\currentnotenumber\relax}%
% the next one can cycle so we need to make sure it has no advance width
\doif{\noteparameter\c!indicator}\v!yes\strc_notes_inject_pointer
\dostoptagged
\egroup
\endgroup
- \dostoptagged % check
+% \dostoptagged % check
\globallet\lastnotesymbol\relax}
\unexpanded\def\strc_notes_inject_dummy % temp hack
{\removeunwantedspaces
- \doifitalicelse\/\donothing % Charles IV \footnote{the fourth}
+ \doifelseitalic\/\donothing % Charles IV \footnote{the fourth}
\ifdim\lastkern=\notesignal
% \kern\noteparameter\c!distance % yes or no note font? or main text
\strc_notes_inject_separator
@@ -756,7 +761,9 @@
%appendtoks \notesenabledfalse \to \everymarking
\appendtoks \notesenabledfalse \to \everybeforepagebody
-\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everystructurelist % quick hack
+\appendtoks \notesenabledfalse \to \everysimplifycommands % quick hack
+\appendtoks \notesenabledfalse \to \everypreroll % quick hack
%D Often we need to process the whole set of notes and to make that
%D fast, we use a token register:
@@ -1023,7 +1030,7 @@
\def\strc_notes_set_location_text % we don't use inserts anyway (e.g. endnotes)
{\settrue\c_strc_notes_delayed
- \ctxcommand{setnotestate("\currentnote","store")}%
+ \clf_setnotestate{\currentnote}{store}%
\page_inserts_set_location\currentnoteinsertion\v!text % \setupinsertion[\currentnote][\c!location=\v!text]%
\global\count\currentnoteinsertionnumber\zerocount
\global\dimen\currentnoteinsertionnumber\maxdimen
@@ -1175,6 +1182,7 @@
\insert\currentnoteinsertionnumber\bgroup
\the\everyinsidenoteinsert\relax
\usesetupsparameter\noteparameter % experimental
+ \useinterlinespaceparameter\noteparameter
\doifelse{\noteparameter\c!paragraph}\v!yes
{\nointerlineskip
\startvboxtohboxseparator
@@ -1197,7 +1205,7 @@
\edef\currentnote{#tag}%
\strc_constructions_initialize{#tag}%
\strc_notes_synchronize
- \edef\currentconstructionlistentry{\ctxcommand{notelistindex("\currentnote",#id)}}% index in list cache
+ \edef\currentconstructionlistentry{\clf_notelistindex{\currentnote}#id}% index in list cache
% as we can have collected notes (e.g. in tables) we need to recover
% \currentdescriptionattribute and \currentdescriptionsynchronize
%
@@ -1211,14 +1219,14 @@
\begstrut
\strc_references_flush_destination_nodes
\strc_notes_set_destination_attribute_text
- \strc_notes_inject_text
- \endstrut
+ \strc_notes_inject_text\relax
+ \ifvmode\obeydepth\else\endstrut\fi % \obeydepth is new per 2015-01-10
\strc_constructions_stored_stop
% \endgroup
}
\unexpanded\def\strc_notes_inject_text % hm main?
- {\ctxcommand{savedlisttitle("\currentconstructionmain",\currentconstructionlistentry)}}
+ {\clf_savedlisttitle{\currentconstructionmain}\currentconstructionlistentry\relax}
\let\startpushnote\relax
\let\stoppushnote \relax
@@ -1242,6 +1250,7 @@
\appendtoks
\doif{\noteparameter\c!scope}\v!page{\floatingpenalty\maxdimen}% experiment
\penalty\currentnotepenalty
+ %\interlinepenalty\maxdimen % todo
\forgetall
\strc_notes_set_bodyfont
\redoconvertfont % to undo \undo calls in in headings etc
@@ -1290,7 +1299,7 @@
\noteparameter\c!before
\fi
% \bgroup
- % \setupalign[\noteparameter\c!align]%
+ % \usealignparameter\noteparameter
\placenoterule % alleen in ..mode
% \par
% \egroup
@@ -1298,6 +1307,7 @@
\strc_notes_set_bodyfont
\setbox\scratchbox\hbox
{\strc_notes_flush_inserts}%
+ \page_postprocessors_linenumbers_deepbox\scratchbox
\setbox\scratchbox\hbox
{\setupcurrentnote
[\c!location=,
@@ -1345,11 +1355,14 @@
% idea: tag with attr and then just flush them again
\def\strc_notes_flush_global
- {\doifelse{\noteparameter\c!paragraph}\v!yes
+ {\begingroup
+ \useinterlinespaceparameter\noteparameter
+ \doifelse{\noteparameter\c!paragraph}\v!yes
{\vbox\starthboxestohbox
\iftrialtypesetting\unvcopy\else\unvbox\fi\currentnoteinsertionnumber
\stophboxestohbox}
- {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}}
+ {\iftrialtypesetting\unvcopied\else\unvboxed\fi\currentnoteinsertionnumber}%
+ \endgroup}
%D Supporting end notes is surprisingly easy. Even better, we
%D can combine this feature with solving the common \TEX\
@@ -1387,7 +1400,7 @@
{\ifconditional\postponingnotes\else
\global\settrue\postponingnotes
\global\let\flushnotes\doflushnotes
- \ctxcommand{postponenotes()}%
+ \clf_postponenotes
\fi}
\let\flushnotes\relax
@@ -1397,7 +1410,7 @@
\begingroup
\let\flushnotes \relax
\let\postponenotes\relax
- \ctxcommand{flushpostponednotes()}% this also resets the states !
+ \clf_flushpostponednotes% this also resets the states !
\global\setfalse\postponednote
\global\setfalse\postponingnotes
\global\let\flushnotes\relax
@@ -1444,20 +1457,20 @@
\strc_counters_save{#tag}%
\strc_counters_reset{#tag}%
\fi
- \ctxcommand{savenote("#tag","store")}}
+ \clf_savenote{#tag}{store}}
\def\strc_notes_local_stop_step#tag%
{\p_strc_notes_continue{\noteparameter\c!continue}%
\ifx\p_strc_notes_continue\v!yes \else
\strc_counters_restore{#tag}%
\fi
- \ctxcommand{restorenote("#tag")}}
+ \clf_restorenote{#tag}}
\unexpanded\def\placelocalnotes
{\dodoubleempty\strc_notes_local_place}
\def\strc_notes_local_place[#tag][#settings]%
- {\doif{\ctxcommand{getnotestate("#tag")}}{store}{\strc_notes_local_place_indeed{#settings}{#tag}}}
+ {\doif{\clf_getnotestate{#tag}}{store}{\strc_notes_local_place_indeed{#settings}{#tag}}}
\def\strc_notes_local_place_indeed#settings#tag%
{\begingroup
@@ -1517,7 +1530,7 @@
\def\strc_notes_place_indeed#settings#tag% settings note
{\edef\currentnote{#tag}% grouping ?
- \doifelse{\ctxcommand{getnotestate("#tag")}}{store}
+ \doifelse{\clf_getnotestate{#tag}}{store}
\strc_notes_local_place_indeed
\strc_notes_global_place_indeed
{#settings}{#tag}}
@@ -1537,7 +1550,7 @@
{\setvalue{\??notealternative#alternative}{#command}}
\unexpanded\def\doifnotescollected#tag%
- {\ctxcommand{doifnotecontent("#tag")}}
+ {\clf_doifnotecontent{#tag}}
\def\strc_notes_place_local_alternative % will be a setup (wrapper)
{\doifnotescollected\currentnote
@@ -1564,7 +1577,7 @@
% setups ?
-\def\flushlocalnotes#tag{\ctxcommand{flushnotes("#tag","store","\noteparameter\c!criterium")}}
+\def\flushlocalnotes#tag{\clf_flushnotes{#tag}{store}{\noteparameter\c!criterium}}
\installnotealternative \v!none
{\flushlocalnotes\currentnote}
@@ -1589,7 +1602,7 @@
\setupcurrentnote[\c!location=]%
\inheritednoteframed
{\edef\currentnotewidth{\noteparameter\c!width}%
- \doifdimensionelse\currentnotewidth\donothing
+ \doifelsedimension\currentnotewidth\donothing
{\edef\currentnotewidth{\the\hsize}}%
\startsimplecolumns[\c!distance=\noteparameter\c!columndistance,\c!n=\noteparameter\c!n,\c!width=\currentnotewidth]%
\flushlocalnotes\currentnote
@@ -1714,8 +1727,8 @@
\def\strc_notes_set_bodyfont
{\let\strc_notes_set_bodyfont\relax
\restoreglobalbodyfont
- \switchtobodyfont[\noteparameter\c!bodyfont]%
- \setupalign[\noteparameter\c!align]}
+ \usebodyfontparameter\noteparameter
+ \usealignparameter\noteparameter}
%D The footnote mechanism defaults to a traditional one
%D column way of showing them. By default we precede them by
@@ -1756,7 +1769,9 @@
%D }
%D \stoptyping
-\def\doifnoteonsamepageelse[#tag]{\ctxcommand{doifnoteonsamepageasprevious("#tag")}}
+\def\doifelsenoteonsamepage[#tag]{\clf_doifnoteonsamepageasprevious{#tag}}
+
+\let\doifnoteonsamepageelse\doifelsenoteonsamepage
%D New trickery:
@@ -1778,19 +1793,19 @@
{\dodoubleempty\strc_notes_symbol}
\def\strc_notes_symbol[#tag][#reference]%
- {\dontleavehmode
- \begingroup
- \edef\currentnote{#tag}%
- \usenotestyleandcolor\c!textstyle\c!textcolor
- \ifnotesenabled
+ {\ifnotesenabled
+ \dontleavehmode
+ \begingroup
+ \edef\currentnote{#tag}%
+ \usenotestyleandcolor\c!textstyle\c!textcolor
\ifsecondargument
\unskip
\noteparameter\c!textcommand{\in[#reference]}% command here?
\else
\noteparameter\c!textcommand\lastnotesymbol % check if command double
\fi
- \fi
- \endgroup}
+ \endgroup
+ \fi}
\unexpanded\def\note
{\dodoubleempty\strc_notes_note}
@@ -1821,4 +1836,24 @@
% [ownnote]
% [\ownnotesymbol]
+% tricky:
+%
+% \enabletrackers[nodes.areas]
+% \enabletrackers[nodes.references]
+% \enabletrackers[nodes.destinations]
+%
+% \setupnotes[interaction=all,rule=no]
+% \setupinteraction[state=start,focus=standard]
+%
+% \starttext
+% \goto{\input tufte\relax}[page(2)] \par
+% \ruledhbox{\gotobox{\vtop{\input tufte\relax}}[page(2)]} \par
+% \ruledhbox{\gotobox{\vbox{\input tufte\relax}}[page(2)]} \par
+% % \completecontent
+% % \chapter{Chapter}
+% % \dorecurse{5}{\input knuth}
+% a\footnote{\input tufte\par\input ward\relax}
+% \stoptext
+
\protect \endinput
+
diff --git a/tex/context/base/strc-num.lua b/tex/context/base/strc-num.lua
index 67e9b1734..0203334ff 100644
--- a/tex/context/base/strc-num.lua
+++ b/tex/context/base/strc-num.lua
@@ -20,6 +20,8 @@ local setmetatableindex = table.setmetatableindex
local trace_counters = false trackers.register("structures.counters", function(v) trace_counters = v end)
local report_counters = logs.reporter("structure","counters")
+local implement = interfaces.implement
+
local structures = structures
local helpers = structures.helpers
local sections = structures.sections
@@ -199,6 +201,24 @@ local function allocate(name,i) -- can be metatable
return ci
end
+local pattern = lpeg.P(variables.by)^-1 * lpeg.C(lpeg.P(1)^1)
+local lpegmatch = lpeg.match
+
+function counters.way(way)
+ if not way or way == "" then
+ return ""
+ else
+ return lpegmatch(pattern,way)
+ end
+end
+
+implement {
+ name = "way",
+ actions = { counters.way, context },
+ arguments = "string"
+}
+
+
function counters.record(name,i)
return allocate(name,i or 1)
end
@@ -376,10 +396,8 @@ local function check(name,data,start,stop)
end
end
-counters.reset = reset
-counters.set = set
-function counters.setown(name,n,value)
+local function setown(name,n,value)
local cd = counterdata[name]
if cd then
local d = allocate(name,n)
@@ -397,14 +415,14 @@ function counters.setown(name,n,value)
end
end
-function counters.restart(name,n,newstart,noreset)
+local function restart(name,n,newstart,noreset)
local cd = counterdata[name]
if cd then
newstart = tonumber(newstart)
if newstart then
local d = allocate(name,n)
d.start = newstart
- if not noreset then
+ if not noreset then -- why / when needed ?
reset(name,n) -- hm
end
end
@@ -425,7 +443,7 @@ function counters.restore(name)
end
end
-function counters.add(name,n,delta)
+local function add(name,n,delta)
local cd = counterdata[name]
if cd and (cd.state == v_start or cd.state == "") then
local data = cd.data
@@ -494,7 +512,12 @@ local function get(name,n,key)
end
end
-counters.get = get
+counters.reset = reset
+counters.set = set
+counters.add = add
+counters.get = get
+counters.setown = setown
+counters.restart = restart
function counters.value(name,n) -- what to do with own
return get(name,n or 1,'number') or 0
@@ -553,24 +576,7 @@ end
-- interfacing
-commands.definecounter = counters.define
-commands.setcounter = counters.set
-commands.setowncounter = counters.setown
-commands.resetcounter = counters.reset
-commands.restartcounter = counters.restart
-commands.savecounter = counters.save
-commands.restorecounter = counters.restore
-commands.addcounter = counters.add
-
-commands.rawcountervalue = function(...) context(counters.raw (...)) end
-commands.countervalue = function(...) context(counters.value (...)) end
-commands.lastcountervalue = function(...) context(counters.last (...)) end
-commands.firstcountervalue = function(...) context(counters.first (...)) end
-commands.nextcountervalue = function(...) context(counters.next (...)) end
-commands.prevcountervalue = function(...) context(counters.previous(...)) end
-commands.subcountervalues = function(...) context(counters.subs (...)) end
-
-function commands.showcounter(name)
+local function showcounter(name)
local cd = counterdata[name]
if cd then
context("[%s:",name)
@@ -583,19 +589,82 @@ function commands.showcounter(name)
end
end
-function commands.doifelsecounter(name) commands.doifelse(counterdata[name]) end
-function commands.doifcounter (name) commands.doif (counterdata[name]) end
-function commands.doifnotcounter (name) commands.doifnot (counterdata[name]) end
-
-function commands.incrementedcounter(...) context(counters.add(...)) end
+-- the noreset is somewhat messy ... always false messes up e.g. itemize but true the pagenumbers
+--
+-- if this fails i'll clean up this still somewhat experimental mechanism (but i need use cases)
-function commands.checkcountersetup(name,level,start,state)
- counters.restart(name,1,start,true) -- no reset
+local function checkcountersetup(name,level,start,state)
+ local noreset = true -- level > 0 -- was true
+ counters.restart(name,1,start,noreset) -- was true
counters.setstate(name,state)
counters.setlevel(name,level)
sections.setchecker(name,level,counters.reset)
end
+--
+
+implement { name = "addcounter", actions = add, arguments = { "string", "integer", "integer" } }
+implement { name = "setcounter", actions = set, arguments = { "string", 1, "integer" } }
+implement { name = "setowncounter", actions = setown, arguments = { "string", 1, "string" } }
+implement { name = "restartcounter", actions = restart, arguments = { "string", 1, "integer" } }
+implement { name = "resetcounter", actions = reset, arguments = { "string", 1 } }
+implement { name = "incrementcounter", actions = add, arguments = { "string", 1, 1 } }
+implement { name = "decrementcounter", actions = add, arguments = { "string", 1, -1 } }
+
+implement { name = "setsubcounter", actions = set, arguments = { "string", "integer", "integer" } }
+implement { name = "setownsubcounter", actions = setown, arguments = { "string", "integer", "string" } }
+implement { name = "restartsubcounter", actions = restart, arguments = { "string", "integer", "integer" } }
+implement { name = "resetsubcounter", actions = reset, arguments = { "string", "integer" } }
+implement { name = "incrementsubcounter", actions = add, arguments = { "string", "integer", 1 } }
+implement { name = "decrementsubcounter", actions = add, arguments = { "string", "integer", -1 } }
+
+implement { name = "rawcountervalue", actions = { counters.raw , context }, arguments = { "string", 1 } }
+implement { name = "countervalue", actions = { counters.value , context }, arguments = { "string", 1 } }
+implement { name = "lastcountervalue", actions = { counters.last , context }, arguments = { "string", 1 } }
+implement { name = "firstcountervalue", actions = { counters.first , context }, arguments = { "string", 1 } }
+implement { name = "nextcountervalue", actions = { counters.next , context }, arguments = { "string", 1 } }
+implement { name = "prevcountervalue", actions = { counters.previous, context }, arguments = { "string", 1 } }
+implement { name = "subcountervalues", actions = { counters.subs , context }, arguments = { "string", 1 } }
+
+implement { name = "rawsubcountervalue", actions = { counters.raw , context }, arguments = { "string", "integer" } }
+implement { name = "subcountervalue", actions = { counters.value , context }, arguments = { "string", "integer" } }
+implement { name = "lastsubcountervalue", actions = { counters.last , context }, arguments = { "string", "integer" } }
+implement { name = "firstsubcountervalue", actions = { counters.first , context }, arguments = { "string", "integer" } }
+implement { name = "nextsubcountervalue", actions = { counters.next , context }, arguments = { "string", "integer" } }
+implement { name = "previoussubcountervalue", actions = { counters.previous, context }, arguments = { "string", "integer" } }
+implement { name = "subsubcountervalues", actions = { counters.subs , context }, arguments = { "string", "integer" } }
+
+implement { name = "savecounter", actions = counters.save, arguments = "string" }
+implement { name = "restorecounter", actions = counters.restore, arguments = "string" }
+
+implement { name = "incrementedcounter", actions = { add, context }, arguments = { "string", 1, 1 } }
+implement { name = "decrementedcounter", actions = { add, context }, arguments = { "string", 1, -1 } }
+
+implement { name = "showcounter", actions = showcounter, arguments = "string" } -- todo
+implement { name = "checkcountersetup", actions = checkcountersetup, arguments = { "string", "integer", "integer", "string" } }
+
+table.setmetatablecall(counterdata,function(t,k) return t[k] end)
+
+implement { name = "doifelsecounter", actions = { counterdata, commands.doifelse }, arguments = "string" }
+implement { name = "doifcounter", actions = { counterdata, commands.doif }, arguments = "string" }
+implement { name = "doifnotcounter", actions = { counterdata, commands.doifnot }, arguments = "string" }
+
+implement {
+ name = "definecounter",
+ actions = counters.define,
+ arguments = {
+ {
+ { "name" } ,
+ { "start", "integer" },
+ { "counter" },
+ { "method" },
+ }
+ }
+}
+
+------------------------------------------------------------------
+------------------------------------------------------------------
+
-- -- move to strc-pag.lua
--
-- function counters.analyze(name,counterspecification)
diff --git a/tex/context/base/strc-num.mkiv b/tex/context/base/strc-num.mkiv
index 2fa8b0e9a..58095b8e7 100644
--- a/tex/context/base/strc-num.mkiv
+++ b/tex/context/base/strc-num.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% work in progress
% to be checked: can we use the command handler code here?
% all settings will move to lua
@@ -25,7 +27,9 @@
\installcommandhandler \??counter {counter} \??counter
-\let\setupstructurecounting\setupcounter
+\let\setupcounters\setupcounter
+
+\let\setupstructurecounting\setupcounter % will disappear
\setupcounter
[\c!way=\v!by\v!chapter,
@@ -39,7 +43,7 @@
\c!state=\v!start]
\def\autostructureprefixsegments#1% todo: \c!prefixsegments=\v!auto
- {2:\thenamedheadlevel{\ctxcommand{way("#1\c!way")}}}
+ {2:\thenamedheadlevel{\clf_way{#1\c!way}}}
\appendtoks
\resetcounterparameter\s!counter
@@ -48,21 +52,27 @@
\appendtoks
\ifx\currentcounterparent\empty
\edef\p_start{\counterparameter\c!start}%
- \ctxcommand{definecounter {
- name = "\currentcounter",
- start = \ifx\p_start\empty0\else\number\p_start\fi,
- counter = "\counterparameter\s!counter",
- method = "\counterparameter\c!method",
- }}%
+ \clf_definecounter
+ name {\currentcounter}%
+ start \ifx\p_start\empty\zerocount\else\numexpr\p_start\relax\fi
+ counter {\counterparameter\s!counter}%
+ method {\counterparameter\c!method}%
+ \relax
\letcounterparameter\s!name\currentcounter
\else
- \letcounterparameter\s!name\currentcounterparent
+ % \letcounterparameter\s!name\currentcounterparent % we need a chained clone
+ \setexpandedcounterparameter\s!name{\namedcounterparameter\currentcounterparent\s!name}%
\fi
\strc_counters_check_setup
\to \everydefinecounter
\appendtoks
\ifx\currentcounter\empty \else
+ \edef\p_number{\counterparameter\c!number}%
+ \ifx\p_number\empty \else
+ \clf_setcounter{\counterparameter\s!name}\numexpr\p_number\relax
+ \letcounterparameter\c!number\empty
+ \fi
\edef\p_start{\counterparameter\c!start}%
\setexpandedcounterparameter\c!start{\ifx\p_start\empty0\else\number\p_start\fi}%
\strc_counters_check_setup
@@ -71,40 +81,28 @@
% % % %
-\def\strc_counters_way#1% slow, we need to store it at the tex end
- {\ctxcommand{way("\namedcounterparameter{#1}\c!way")}}
-
-% \def\thenamedcounterlevel#1%
-% {\thenamedheadlevel{\strc_counters_way{#1}}}
+\def\strc_counters_way#1{\clf_way{\namedcounterparameter{#1}\c!way}}
\def\thenamedcounterlevel#1%
- {\xthenamedheadlevel{\strc_counters_way{#1}}}
-
-% \def\strc_counters_check_setup#1% does it have to happen here?
-% {% this can be done at the lua end / a bit messy here ... todo ...
-% \ifcsname\??counter#1\c!number\endcsname
-% \doifelsevalue {\??counter#1\c!number}{#1} {\letbeundefined{\??counter#1\c!number}}%
-% {\doifvaluenothing{\??counter#1\c!number} {\letbeundefined{\??counter#1\c!number}}}%
-% \fi
-% \ifcsname\??counter#1\c!number\endcsname
-% % it's a clone
-% \else
-% \edef\currentcounterlevel{\thenamedcounterlevel{#1}}%
-% \edef\p_start{\counterparameter{#1}\c!start}%
-% \ctxcommand{checkcountersetup("#1",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter{#1}\c!state")}%
-% \fi}
+ {\xthenamedheadlevel{\clf_way{\namedcounterparameter{#1}\c!way}}}
-\def\strc_counters_check_setup
+\unexpanded\def\strc_counters_check_setup
{\edef\p_name{\directcounterparameter\s!name}%
\ifx\currentcounter\p_name
\edef\currentcounterlevel{\thenamedcounterlevel\currentcounter}%
\edef\p_start{\counterparameter\c!start}%
- \ctxcommand{checkcountersetup("\currentcounter",\currentcounterlevel,\ifx\p_start\empty0\else\number\p_start\fi,"\counterparameter\c!state")}%
+ \clf_checkcountersetup
+ {\currentcounter}%
+ \numexpr\currentcounterlevel\relax
+ \numexpr\ifx\p_start\empty\zerocount\else\p_start\fi\relax % bug in scanner
+ {\counterparameter\c!state}%
\fi}
-\unexpanded\def\doifcounterelse #1{\ctxcommand{doifelsecounter("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\doifcounter #1{\ctxcommand{doifcounter ("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\doifnotcounter #1{\ctxcommand{doifnotcounter ("\namedcounterparameter{#1}\s!name")}}
+\unexpanded\def\doifelsecounter #1{\clf_doifelsecounter{\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\doifcounter #1{\clf_doifcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\doifnotcounter #1{\clf_doifnotcounter {\namedcounterparameter{#1}\s!name}}
+
+\let\doifcounterelse\doifelsecounter
\unexpanded\def\setcounter {\dotripleempty \strc_counters_set_interfaced}
\unexpanded\def\setcounterown {\dotripleempty \strc_counters_setown_interfaced}
@@ -245,90 +243,90 @@
\expandafter\gobbleoneoptional
\fi}
-\def\strc_counters_set_two [#1][#2][#3]{\strc_counters_set_sub {#1}{#2}{#3}}
-\def\strc_counters_setown_two [#1][#2][#3]{\strc_counters_setown_sub {#1}{#2}{#3}}
-\def\strc_counters_restart_two [#1][#2][#3]{\strc_counters_restart_sub {#1}{#2}{#3}}
-\def\strc_counters_reset_two [#1][#2]{\strc_counters_reset_sub {#1}{#2}}
-\def\strc_counters_increment_two [#1][#2]{\strc_counters_increment_sub{#1}{#2}}
-\def\strc_counters_decrement_two [#1][#2]{\strc_counters_decrement_sub{#1}{#2}}
-
-\def\strc_counters_raw_two [#1][#2]{\strc_counters_raw_sub {#1}{#2}}
-\def\strc_counters_last_two [#1][#2]{\strc_counters_last_sub {#1}{#2}}
-\def\strc_counters_first_two [#1][#2]{\strc_counters_first_sub {#1}{#2}}
-\def\strc_counters_next_two [#1][#2]{\strc_counters_next_sub {#1}{#2}}
-\def\strc_counters_prev_two [#1][#2]{\strc_counters_prev_sub {#1}{#2}}
-\def\strc_counters_subs_two [#1][#2]{\strc_counters_subs_sub {#1}{#2}}
-
-\def\strc_counters_set_one [#1][#2][#3]{\strc_counters_set_sub {#1}\plusone{#2}}
-\def\strc_counters_setown_one [#1][#2][#3]{\strc_counters_setown_sub {#1}\plusone{#2}}
-\def\strc_counters_restart_one [#1][#2][#3]{\strc_counters_restart_sub {#1}\plusone{#2}}
-\def\strc_counters_reset_one [#1][#2]{\strc_counters_reset_sub {#1}\plusone}
-\def\strc_counters_increment_one [#1][#2]{\strc_counters_increment_sub{#1}\plusone}
-\def\strc_counters_decrement_one [#1][#2]{\strc_counters_decrement_sub{#1}\plusone}
-
-\def\strc_counters_raw_one [#1][#2]{\strc_counters_raw_sub {#1}\plusone}
-\def\strc_counters_last_one [#1][#2]{\strc_counters_last_sub {#1}\plusone}
-\def\strc_counters_first_one [#1][#2]{\strc_counters_first_sub {#1}\plusone}
-\def\strc_counters_next_one [#1][#2]{\strc_counters_next_sub {#1}\plusone}
-\def\strc_counters_prev_one [#1][#2]{\strc_counters_prev_sub {#1}\plusone}
-\def\strc_counters_subs_one [#1][#2]{\strc_counters_subs_sub {#1}\plusone}
-
-\def\strc_counters_save_one [#1]{\strc_counters_save {#1}}
-\def\strc_counters_restore_one [#1]{\strc_counters_restore {#1}}
-
-\unexpanded\def\strc_counters_set #1#2{\strc_counters_set_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_setown #1#2{\strc_counters_setown_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_restart #1#2{\strc_counters_restart_sub {#1}\plusone{#2}}
-\unexpanded\def\strc_counters_reset #1{\strc_counters_reset_sub {#1}\plusone}
-\unexpanded\def\strc_counters_increment #1{\strc_counters_increment_sub{#1}\plusone}
-\unexpanded\def\strc_counters_decrement #1{\strc_counters_decrement_sub{#1}\plusone}
-
- \def\strc_counters_raw #1{\strc_counters_raw_sub {#1}\plusone}
- \def\strc_counters_last #1{\strc_counters_last_sub {#1}\plusone}
- \def\strc_counters_first #1{\strc_counters_first_sub {#1}\plusone}
- \def\strc_counters_next #1{\strc_counters_next_sub {#1}\plusone}
- \def\strc_counters_prev #1{\strc_counters_prev_sub {#1}\plusone}
- \def\strc_counters_subs #1{\strc_counters_subs_sub {#1}\plusone}
-
-\unexpanded\def\strc_counters_set_sub #1#2#3{\ctxcommand{setcounter ("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_setown_sub #1#2#3{\ctxcommand{setowncounter ("\namedcounterparameter{#1}\s!name",\number#2,"#3")}}
-\unexpanded\def\strc_counters_restart_sub #1#2#3{\ctxcommand{restartcounter("\namedcounterparameter{#1}\s!name",\number#2,\number#3)}}
-\unexpanded\def\strc_counters_reset_sub #1#2{\ctxcommand{resetcounter ("\namedcounterparameter{#1}\s!name",\number#2)}}
-\unexpanded\def\strc_counters_increment_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,1)}}
-\unexpanded\def\strc_counters_decrement_sub #1#2{\ctxcommand{addcounter ("\namedcounterparameter{#1}\s!name",\number#2,-1)}}
-
- \def\strc_counters_raw_sub #1#2{\ctxcommand{countervalue ("\namedcounterparameter{#1}\s!name",\number#2)}} % maybe raw
- \def\strc_counters_last_sub #1#2{\ctxcommand{lastcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_first_sub #1#2{\ctxcommand{firstcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_next_sub #1#2{\ctxcommand{nextcountervalue ("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_prev_sub #1#2{\ctxcommand{previouscountervalue("\namedcounterparameter{#1}\s!name",\number#2)}}
- \def\strc_counters_subs_sub #1#2{\ctxcommand{subcountervalues ("\namedcounterparameter{#1}\s!name",\number#2)}}
-
-\unexpanded\def\strc_counters_save #1{\ctxcommand{savecounter ("\namedcounterparameter{#1}\s!name")}}
-\unexpanded\def\strc_counters_restore #1{\ctxcommand{restorecounter("\namedcounterparameter{#1}\s!name")}}
-
-\unexpanded\def\strc_counters_incremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1, 1)}}
-\unexpanded\def\strc_counters_decremented #1{\ctxcommand{incrementedcounter("\namedcounterparameter{#1}\s!name",1,-1)}}
-
-\unexpanded\def\showcounter [#1]{\ctxcommand{tracecounter("\namedcounterparameter{#1}\s!name")}}
-
-\unexpanded\def\incrementedcounter [#1]{\strc_counters_incremented{#1}} % expandable, no \dosingleargument
-\unexpanded\def\decrementedcounter [#1]{\strc_counters_decremented{#1}} % expandable, no \dosingleargument
+\def\strc_counters_set_two [#1][#2][#3]{\clf_setsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\def\strc_counters_setown_two [#1][#2][#3]{\clf_setownsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax{#3}}
+\def\strc_counters_restart_two [#1][#2][#3]{\clf_restartsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\def\strc_counters_reset_two [#1][#2]{\clf_resetsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_increment_two [#1][#2]{\clf_incrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_decrement_two [#1][#2]{\clf_decrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\def\strc_counters_raw_two [#1][#2]{\clf_subcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_last_two [#1][#2]{\clf_lastsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_first_two [#1][#2]{\clf_firstsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_next_two [#1][#2]{\clf_nextsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_prev_two [#1][#2]{\clf_previoussubcountervalue{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_subs_two [#1][#2]{\clf_subsubcountervalues {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\def\strc_counters_set_one [#1][#2][#3]{\clf_setcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_setown_one [#1][#2][#3]{\clf_setowncounter {\namedcounterparameter{#1}\s!name}{#2}}
+\def\strc_counters_restart_one [#1][#2][#3]{\clf_restartcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\def\strc_counters_reset_one [#1][#2]{\clf_resetcounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_increment_one [#1][#2]{\clf_incrementcounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_decrement_one [#1][#2]{\clf_decrementcounter {\namedcounterparameter{#1}\s!name}}
+
+\def\strc_counters_raw_one [#1][#2]{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_last_one [#1][#2]{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_first_one [#1][#2]{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_next_one [#1][#2]{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_prev_one [#1][#2]{\clf_previouscountervalue {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_subs_one [#1][#2]{\clf_subcountervalues {\namedcounterparameter{#1}\s!name}}
+
+\def\strc_counters_save_one [#1]{\clf_savecounter {\namedcounterparameter{#1}\s!name}}
+\def\strc_counters_restore_one [#1]{\clf_restorecounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_set #1#2{\clf_setcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_setown #1#2{\clf_setowncounter {\namedcounterparameter{#1}\s!name}{#2}}
+\unexpanded\def\strc_counters_restart #1#2{\clf_restartcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_reset #1{\clf_resetcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_increment #1{\clf_incrementcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_decrement #1{\clf_decrementcounter {\namedcounterparameter{#1}\s!name}}
+
+ \def\strc_counters_raw #1{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_last #1{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_first #1{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_next #1{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_prev #1{\clf_previouscountervalue {\namedcounterparameter{#1}\s!name}}
+ \def\strc_counters_subs #1{\clf_subcountervalues {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_set_sub #1#2#3{\clf_setsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\unexpanded\def\strc_counters_setown_sub #1#2#3{\clf_setownsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax{#3}}
+\unexpanded\def\strc_counters_restart_sub #1#2#3{\clf_restartsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax\numexpr#3\relax}
+\unexpanded\def\strc_counters_reset_sub #1#2{\clf_resetsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_increment_sub #1#2{\clf_incrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+\unexpanded\def\strc_counters_decrement_sub #1#2{\clf_decrementsubcounter {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+ \def\strc_counters_raw_sub #1#2{\clf_subcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax} % maybe raw
+ \def\strc_counters_last_sub #1#2{\clf_lastsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_first_sub #1#2{\clf_firstsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_next_sub #1#2{\clf_nextsubcountervalue {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_prev_sub #1#2{\clf_previoussubcountervalue{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+ \def\strc_counters_subs_sub #1#2{\clf_subsubcountervalues {\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
+
+\unexpanded\def\strc_counters_save #1{\clf_savecounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_restore #1{\clf_restorecounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\strc_counters_incremented #1{\clf_incrementedcounter {\namedcounterparameter{#1}\s!name}}
+\unexpanded\def\strc_counters_decremented #1{\clf_decrementedcounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\showcounter [#1]{\clf_showcounter {\namedcounterparameter{#1}\s!name}}
+
+\unexpanded\def\incrementedcounter [#1]{\clf_incrementedcounter {\namedcounterparameter{#1}\s!name}} % no \dosingleargument
+\unexpanded\def\decrementedcounter [#1]{\clf_decrementedcounter {\namedcounterparameter{#1}\s!name}} % no \dosingleargument
% public variants ... beware, for old cases, from now on the value variants are the
% ones that are expandable
-\def\rawcountervalue [#1]{\strc_counters_raw_sub {#1}\plusone}
-\def\lastcountervalue [#1]{\strc_counters_last_sub {#1}\plusone}
-\def\firstcountervalue[#1]{\strc_counters_first_sub{#1}\plusone}
-\def\nextcountervalue [#1]{\strc_counters_next_sub {#1}\plusone}
-\def\prevcountervalue [#1]{\strc_counters_prev_sub {#1}\plusone}
+\def\rawcountervalue [#1]{\clf_countervalue {\namedcounterparameter{#1}\s!name}}
+\def\lastcountervalue [#1]{\clf_lastcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\firstcountervalue [#1]{\clf_firstcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\nextcountervalue [#1]{\clf_nextcountervalue {\namedcounterparameter{#1}\s!name}}
+\def\prevcountervalue [#1]{\clf_previouscountervalue{\namedcounterparameter{#1}\s!name}}
-\let\rawsubcountervalue \strc_counters_raw_two
-\let\lastsubcountervalue \strc_counters_last_two
-\let\firstsubcountervalue\strc_counters_first_two
-\let\nextsubcountervalue \strc_counters_next_two
-\let\prevsubcountervalue \strc_counters_prev_two
+\let\rawsubcountervalue \strc_counters_raw_two
+\let\lastsubcountervalue \strc_counters_last_two
+\let\firstsubcountervalue \strc_counters_first_two
+\let\nextsubcountervalue \strc_counters_next_two
+\let\prevsubcountervalue \strc_counters_prev_two
% The bypage check needs a multipass reference and therefore we only check for it when we increment
% and know that some content will be placed. We could also check for spreads.
@@ -342,7 +340,7 @@
\strc_counters_reset{#1}%
\fi
\fi
- \ctxcommand{addcounter("\namedcounterparameter{#1}\s!name",\number#2,1)}}
+ \clf_incrementsubcounter{\namedcounterparameter{#1}\s!name}\numexpr#2\relax}
\unexpanded\def\convertedcounter
{\dodoubleempty\strc_counters_converted}
@@ -351,58 +349,58 @@
{\begingroup
\edef\currentcounter{#1}%
\ifsecondargument\setupcurrentcounter[#2]\fi
- \ctxlua{structures.sections.prefixedconverted(
- "\counterparameter\s!name",
+ \clf_prefixedconverted
+ {\counterparameter\s!name}
{
- prefix = "\counterparameter\c!prefix",
- separatorset = "\counterparameter\c!prefixseparatorset",
- conversion = "\counterparameter\c!prefixconversion",
- conversionset = "\counterparameter\c!prefixconversionset",
- starter = \!!bs\counterparameter\c!prefixstarter\!!es,
- stopper = \!!bs\counterparameter\c!prefixstopper\!!es,
- set = "\counterparameter\c!prefixset",
- segments = "\counterparameter\c!prefixsegments",
- connector = \!!bs\counterparameter\c!prefixconnector\!!es,
- },
+ prefix {\counterparameter\c!prefix}
+ separatorset {\counterparameter\c!prefixseparatorset}
+ conversion {\counterparameter\c!prefixconversion}
+ conversionset {\counterparameter\c!prefixconversionset}
+ starter {\counterparameter\c!prefixstarter}
+ stopper {\counterparameter\c!prefixstopper}
+ set {\counterparameter\c!prefixset}
+ segments {\counterparameter\c!prefixsegments}
+ connector {\counterparameter\c!prefixconnector}
+ }
{
- order = "\counterparameter\c!numberorder",
- separatorset = "\counterparameter\c!numberseparatorset",
- conversion = \!!bs\counterparameter\c!numberconversion\!!es,
- conversionset = "\counterparameter\c!numberconversionset",
- starter = \!!bs\counterparameter\c!numberstarter\!!es,
- stopper = \!!bs\counterparameter\c!numberstopper\!!es,
- segments = "\counterparameter\c!numbersegments",
- type = "\counterparameter\c!type",
- criterium = "\counterparameter\c!criterium", % might change if we also want this with sectioning
+ order {\counterparameter\c!numberorder}
+ separatorset {\counterparameter\c!numberseparatorset}
+ conversion {\counterparameter\c!numberconversion}
+ conversionset {\counterparameter\c!numberconversionset}
+ starter {\counterparameter\c!numberstarter}
+ stopper {\counterparameter\c!numberstopper}
+ segments {\counterparameter\c!numbersegments}
+ type {\counterparameter\c!type}
+ criterium {\counterparameter\c!criterium}
}
- )}%
+ \relax
\endgroup}
\def\directconvertedcounter#1#2% name, type
- {\ctxlua{structures.sections.prefixedconverted(
- "\namedcounterparameter{#1}\s!name",
+ {\clf_prefixedconverted
+ {\namedcounterparameter{#1}\s!name}
{
- prefix = "\namedcounterparameter{#1}\c!prefix",
- separatorset = "\namedcounterparameter{#1}\c!prefixseparatorset",
- conversion = "\namedcounterparameter{#1}\c!prefixconversion",
- conversionset = "\namedcounterparameter{#1}\c!prefixconversionset",
- % starter = \!!bs\namedcounterparameter{#1}\c!prefixstarter\!!es,
- % stopper = \!!bs\namedcounterparameter{#1}\c!prefixstopper\!!es,
- set = "\namedcounterparameter{#1}\c!prefixset",
- segments = "\namedcounterparameter{#1}\c!prefixsegments",
- connector = \!!bs\namedcounterparameter{#1}\c!prefixconnector\!!es,
- },
+ prefix {\namedcounterparameter{#1}\c!prefix}
+ separatorset {\namedcounterparameter{#1}\c!prefixseparatorset}
+ conversion {\namedcounterparameter{#1}\c!prefixconversion}
+ conversionset {\namedcounterparameter{#1}\c!prefixconversionset}
+ % starter {\namedcounterparameter{#1}\c!prefixstarter}
+ % stopper {\namedcounterparameter{#1}\c!prefixstopper}
+ set {\namedcounterparameter{#1}\c!prefixset}
+ segments {\namedcounterparameter{#1}\c!prefixsegments}
+ connector {\namedcounterparameter{#1}\c!prefixconnector}
+ }
{
- order = "\namedcounterparameter{#1}\c!numberorder",
- separatorset = "\namedcounterparameter{#1}\c!numberseparatorset",
- conversion = \!!bs\namedcounterparameter{#1}\c!numberconversion\!!es,
- conversionset = "\namedcounterparameter{#1}\c!numberconversionset",
- starter = \!!bs\namedcounterparameter{#1}\c!numberstarter\!!es,
- stopper = \!!bs\namedcounterparameter{#1}\c!numberstopper\!!es,
- segments = "\namedcounterparameter{#1}\c!numbersegments",
- type = "#2",
+ order {\namedcounterparameter{#1}\c!numberorder}
+ separatorset {\namedcounterparameter{#1}\c!numberseparatorset}
+ conversion {\namedcounterparameter{#1}\c!numberconversion}
+ conversionset {\namedcounterparameter{#1}\c!numberconversionset}
+ starter {\namedcounterparameter{#1}\c!numberstarter}
+ stopper {\namedcounterparameter{#1}\c!numberstopper}
+ segments {\namedcounterparameter{#1}\c!numbersegments}
+ type {#2}
}
- )}}
+ \relax}
\unexpanded\def\convertedsubcounter
{\dotripleempty\strc_counters_converted_sub}
@@ -437,7 +435,9 @@
\unexpanded\def\doifdefinedcounter {\doifcommandhandler \??counter}
\unexpanded\def\doifundefinedcounter {\doifnotcommandhandler \??counter}
-\unexpanded\def\doifdefinedcounterelse{\doifelsecommandhandler\??counter}
+\unexpanded\def\doifelsedefinedcounter{\doifelsecommandhandler\??counter}
+
+\let\doifdefinedcounterelse\doifelsedefinedcounter
%D What follows is a compatibility layer.
@@ -458,7 +458,8 @@
\let \doifdefinednumber \doifdefinedcounter % {number}{true}
\let \doifundefinednumber \doifnotdefinedcounter % {number}{true}
-\let \doifdefinednumberelse \doifdefinedcounterelse % {number}{true}{false}
+\let \doifelsedefinednumber \doifelsedefinedcounter % {number}{true}{false}
+\let \doifdefinednumberelse \doifelsedefinedcounter % {number}{true}{false}
\let \setupnumbering \setupcounter
@@ -504,119 +505,164 @@
\fi
%
\ifx\p_hascaption\v!yes
- \xdef\currentstructurecomponentname {#3\s!name}%
- \xdef\currentstructurecomponentlevel {#3\c!level}%
- \edef\currentstructurecomponentexpansion {#3\c!expansion}%
- \xdef\currentstructurecomponentxmlsetup {#3\c!xmlsetup}%
- \xdef\currentstructurecomponentcatcodes {#3\s!catcodes}%
- \xdef\currentstructurecomponentlabel {#3\c!label}%
- \xdef\currentstructurecomponentreference {#3\c!reference}%
- \xdef\currentstructurecomponentreferenceprefix{#3\c!referenceprefix}%
- \ifx\currentstructurecomponentexpansion\s!xml
- \xmlstartraw
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \xmlstopraw
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!xml
- \else
- \ifx\currentstructurecomponentexpansion\v!yes
- \xdef\currentstructurecomponenttitle {#3\c!title}%
- \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#3\c!marking}%
- \xdef\currentstructurecomponentlist {#3\c!list}%
- \else
- \xdef\currentstructurecomponenttitle {#4\c!title}%
- \xdef\currentstructurecomponentbookmark{#4\c!bookmark}%
- \xdef\currentstructurecomponentmarking {#4\c!marking}%
- \xdef\currentstructurecomponentlist {#4\c!list}%
- \iflocation \ifx\currentstructurecomponentbookmark\empty
- \begingroup
- \simplifycommands
- \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#3\c!title}}}%
- \endgroup
- \fi \fi
- \fi
- \ifx\currentstructurecomponentlist\empty
- \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
- \fi
- \globallet\currentstructurecomponentcoding\s!tex
+ \strc_counters_register_component_list{#1}{#3}{#4}{#9}%
+ \else\ifx\currentstructurecomponentreference\empty
+ \strc_counters_register_component_none
+ \else
+ \strc_counters_register_component_page{#3}%
+ \fi\fi
+ \endgroup}
+
+\def\strc_counters_register_component_none
+ {\glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_check_prefix
+ {\ifx\currentstructurecomponentreferenceprefix\empty
+ \let\currentstructurecomponentreferenceprefix\currentstructurereferenceprefix
+ \fi
+ \ifx\currentstructurecomponentreferenceprefix\empty
+ \let\currentstructurecomponentreferenceprefix\referenceprefix
+ \fi
+ \ifdefined\currentstructurecomponentreferenceprefix\else
+ \let\currentstructurecomponentreferenceprefix\empty
+ \fi}
+
+\def\strc_counters_register_component_page#1%
+ {\xdef\currentstructurecomponentreference {#1\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#1\c!referenceprefix}%
+ \strc_counters_register_component_check_prefix
+ \setnextinternalreference
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}% move to lua
+ view {\interactionparameter\c!focus}%
+ prefix {\currentstructurecomponentreferenceprefix}%
+ reference {\currentstructurecomponentreference}%
+ }%
+ metadata {%
+ kind {\s!page}%
+ }%
+ }%
+ \relax
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \glet\m_strc_counters_last_registered_index \relax
+ \glet\m_strc_counters_last_registered_synchronize\relax}
+
+\def\strc_counters_register_component_list#1#2#3#4%
+ {\xdef\currentstructurecomponentname {#2\s!name}%
+ \xdef\currentstructurecomponentlevel {#2\c!level}%
+ \edef\currentstructurecomponentexpansion {#2\c!expansion}%
+ \xdef\currentstructurecomponentxmlsetup {#2\c!xmlsetup}%
+ \xdef\currentstructurecomponentcatcodes {#2\s!catcodes}%
+ \xdef\currentstructurecomponentlabel {#2\c!label}%
+ \xdef\currentstructurecomponentreference {#2\c!reference}%
+ \xdef\currentstructurecomponentreferenceprefix{#2\c!referenceprefix}%
+ \strc_counters_register_component_check_prefix
+ \ifx\currentstructurecomponentexpansion\s!xml
+ \xmlstartraw
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
+ \xmlstopraw
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
\fi
- %
- \setnextinternalreference
- \xdef\m_strc_counters_last_registered_index{\ctxcommand{addtolist{
- metadata = {
- kind = "#1",
- name = "\currentname",
- level = structures.sections.currentlevel(),
- catcodes = \the\ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi,
- coding = "\currentstructurecomponentcoding",
- \ifx\currentstructurecomponentcoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurecomponentxmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- },
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurecomponentreference",
- referenceprefix = "\currentstructurecomponentreferenceprefix",
- section = structures.sections.currentid(),
- },
- titledata = {
- label = \!!bs\detokenize\expandafter{\currentstructurecomponentlabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructurecomponenttitle }\!!es,
- \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurecomponentbookmark }\!!es,
- \fi
- \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructurecomponentmarking }\!!es,
- \fi
- \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurecomponentlist}\!!es,
- \fi
- },
- \ifx\p_hasnumber\v!yes
- prefixdata = {
- prefix = "#3\c!prefix",
- separatorset = "#3\c!prefixseparatorset",
- conversion = \!!bs#3\c!prefixconversion\!!es,
- conversionset = "#3\c!prefixconversionset",
- set = "#3\c!prefixset",
- % segments = "#3\c!prefixsegments",
- segments = "\p_prefixsegments",
- connector = \!!bs#3\c!prefixconnector\!!es,
- },
- numberdata = { % more helpers here, like compact elsewhere
- numbers = structures.counters.compact("\currentcounter",nil,true),
- group = "#3\c!group",
- groupsuffix = \!!bs#3\c!groupsuffix\!!es,
- counter = "\currentcounter",
- separatorset = "#3\c!numberseparatorset",
- conversion = \!!bs#3\c!numberconversion\!!es,
- conversionset = "#3\c!numberconversionset",
- starter = \!!bs#3\c!numberstarter\!!es,
- stopper = \!!bs#3\c!numberstopper\!!es,
- segments = "#3\c!numbersegments",
- },
- \fi
- userdata = \!!bs\detokenize{#9}\!!es % will be converted to table at the lua end
- }
- }}%
- \xdef\m_strc_counters_last_registered_attribute {\ctxcommand {setinternalreference(nil,nil,\nextinternalreference)}}%
- \xdef\m_strc_counters_last_registered_synchronize{\ctxlatecommand{enhancelist(\m_strc_counters_last_registered_index)}}%
+ \globallet\currentstructurecomponentcoding\s!xml
\else
- \glet\m_strc_counters_last_registered_index \relax
- \glet\m_strc_counters_last_registered_attribute \attributeunsetvalue
- \glet\m_strc_counters_last_registered_synchronize\relax
+ \ifx\currentstructurecomponentexpansion\v!yes
+ \xdef\currentstructurecomponenttitle {#2\c!title}%
+ \xdef\currentstructurecomponentbookmark{#2\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#2\c!marking}%
+ \xdef\currentstructurecomponentlist {#2\c!list}%
+ \else
+ \xdef\currentstructurecomponenttitle {#3\c!title}%
+ \xdef\currentstructurecomponentbookmark{#3\c!bookmark}%
+ \xdef\currentstructurecomponentmarking {#3\c!marking}%
+ \xdef\currentstructurecomponentlist {#3\c!list}%
+ \iflocation \ifx\currentstructurecomponentbookmark\empty
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurecomponentbookmark{\detokenize\expandafter{\normalexpanded{#2\c!title}}}%
+ \endgroup
+ \fi \fi
+ \fi
+ \ifx\currentstructurecomponentlist\empty
+ \globallet\currentstructurecomponentlist\currentstructurecomponenttitle
+ \fi
+ \globallet\currentstructurecomponentcoding\s!tex
\fi
- \endgroup}
+ %
+ \setnextinternalreference
+ \scratchcounter\clf_addtolist %{
+ metadata {
+ kind {#1}
+ name {\currentname}
+ % level structures.sections.currentlevel()
+ catcodes \ifx\currentstructurecomponentcatcodes\empty\catcodetable\else\csname\currentstructurecomponentcatcodes\endcsname\fi
+ coding {\currentstructurecomponentcoding}
+ \ifx\currentstructurecomponentcoding\s!xml
+ xmlroot {\xmldocument}
+ \fi
+ \ifx\currentstructurecomponentxmlsetup\empty \else
+ xmlsetup {\currentstructurexmlsetup}
+ \fi
+ }
+ references {
+ internal \nextinternalreference
+ % block {\currentsectionblock}
+ reference {\currentstructurecomponentreference}
+ prefix {\currentstructurecomponentreferenceprefix}
+ % section structures.sections.currentid()
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentstructurecomponentlabel}}
+ title {\detokenize\expandafter{\currentstructurecomponenttitle}}
+ \ifx\currentstructurecomponentbookmark\currentstructurecomponenttitle \else
+ bookmark {\detokenize\expandafter{\currentstructurecomponentbookmark}}
+ \fi
+ \ifx\currentstructurecomponentmarking\currentstructurecomponenttitle \else
+ marking {\detokenize\expandafter{\currentstructurecomponentmarking}}
+ \fi
+ \ifx\currentstructurecomponentlist\currentstructurecomponenttitle \else
+ list {\detokenize\expandafter{\currentstructurecomponentlist}}
+ \fi
+ }
+ \ifx\p_hasnumber\v!yes
+ prefixdata {
+ prefix {#2\c!prefix}
+ separatorset {#2\c!prefixseparatorset}
+ conversion {#2\c!prefixconversion}
+ conversionset {#2\c!prefixconversionset}
+ set {#2\c!prefixset}
+ % segments {#2\c!prefixsegments}
+ segments {\p_prefixsegments}
+ connector {#2\c!prefixconnector}
+ }
+ numberdata { % more helpers here, like compact elsewhere
+ numbers {\currentcounter}
+ group {#2\c!group}
+ groupsuffix {#2\c!groupsuffix}
+ counter {\currentcounter}
+ separatorset {#2\c!numberseparatorset}
+ conversion {#2\c!numberconversion}
+ conversionset {#2\c!numberconversionset}
+ starter {#2\c!numberstarter}
+ stopper {#2\c!numberstopper}
+ segments {#2\c!numbersegments}
+ }
+ \fi
+ userdata {\detokenize{#4}}
+ %}
+ \relax
+ \xdef\m_strc_counters_last_registered_index{\the\scratchcounter}%
+ \clf_setinternalreference internal \nextinternalreference\relax
+ \xdef\m_strc_counters_last_registered_attribute {\the\lastdestinationattribute}%
+ \xdef\m_strc_counters_last_registered_synchronize{\strc_lists_inject_enhance{\m_strc_counters_last_registered_index}{\nextinternalreference}}}
\let\m_strc_counters_last_registered_index \relax
\let\m_strc_counters_last_registered_attribute \relax
@@ -764,4 +810,6 @@
% \fi
% \to \everysetupcounter
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-pag.lua b/tex/context/base/strc-pag.lua
index 02ed5610f..96d26e6f6 100644
--- a/tex/context/base/strc-pag.lua
+++ b/tex/context/base/strc-pag.lua
@@ -25,6 +25,7 @@ local counterdata = counters.data
local variables = interfaces.variables
local context = context
local commands = commands
+local implement = interfaces.implement
local processors = typesetters.processors
local applyprocessor = processors.apply
@@ -34,34 +35,42 @@ local stopapplyprocessor = processors.stopapply
local texsetcount = tex.setcount
local texgetcount = tex.getcount
+local ctx_convertnumber = context.convertnumber
+
-- storage
local collected, tobesaved = allocate(), allocate()
pages.collected = collected
pages.tobesaved = tobesaved
+pages.nofpages = 0
local function initializer()
collected = pages.collected
tobesaved = pages.tobesaved
+ pages.nofpages = #collected
end
job.register('structures.pages.collected', tobesaved, initializer)
local specification = { } -- to be checked
-function pages.save(prefixdata,numberdata)
+function pages.save(prefixdata,numberdata,extradata)
local realpage = texgetcount("realpageno")
local userpage = texgetcount("userpageno")
if realpage > 0 then
if trace_pages then
report_pages("saving page %s.%s",realpage,userpage)
end
+ local viewerprefix = extradata.viewerprefix
+ local state = extradata.state
local data = {
- number = userpage,
- block = sections.currentblock(),
- prefixdata = prefixdata and helpers.simplify(prefixdata),
- numberdata = numberdata and helpers.simplify(numberdata),
+ number = userpage,
+ viewerprefix = viewerprefix ~= "" and viewerprefix or nil,
+ state = state ~= "" and state or nil, -- maybe let "start" be default
+ block = sections.currentblock(),
+ prefixdata = prefixdata and helpers.simplify(prefixdata),
+ numberdata = numberdata and helpers.simplify(numberdata),
}
tobesaved[realpage] = data
if not collected[realpage] then
@@ -97,11 +106,11 @@ function counters.specials.userpage()
end
end
-local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
-
-local function convertnumber(str,n)
- return f_convert(str or "numbers",n)
-end
+-- local f_convert = string.formatters["\\convertnumber{%s}{%s}"]
+--
+-- local function convertnumber(str,n)
+-- return f_convert(str or "numbers",n)
+-- end
function pages.number(realdata,pagespec)
local userpage, block = realdata.number, realdata.block or "" -- sections.currentblock()
@@ -114,12 +123,12 @@ function pages.number(realdata,pagespec)
applyprocessor(starter)
end
if conversion ~= "" then
- context.convertnumber(conversion,userpage)
+ ctx_convertnumber(conversion,userpage)
else
if conversionset == "" then conversionset = "default" end
local theconversion = sets.get("structure:conversions",block,conversionset,1,"numbers") -- to be checked: 1
local data = startapplyprocessor(theconversion)
- context.convertnumber(data or "number",userpage)
+ ctx_convertnumber(data or "number",userpage)
stopapplyprocessor()
end
if stopper ~= "" then
@@ -263,6 +272,24 @@ function pages.is_odd(n)
end
end
+function pages.on_right(n)
+ local pagemode = texgetcount("pageduplexmode")
+ if pagemode == 2 or pagemode == 1 then
+ n = n or texgetcount("realpageno")
+ if texgetcount("pagenoshift") % 2 == 0 then
+ return n % 2 == 0
+ else
+ return n % 2 ~= 0
+ end
+ else
+ return true
+ end
+end
+
+function pages.in_body(n)
+ return texgetcount("pagebodymode") > 0
+end
+
-- move to strc-pag.lua
function counters.analyze(name,counterspecification)
@@ -314,3 +341,61 @@ function sections.prefixedconverted(name,prefixspec,numberspec)
counters.converted(name,numberspec)
end
end
+
+--
+
+implement {
+ name = "savepagedata",
+ actions = pages.save,
+ arguments = {
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ },
+ {
+ { "viewerprefix" },
+ { "state" },
+ }
+ }
+}
+
+implement { -- weird place
+ name = "prefixedconverted",
+ actions = sections.prefixedconverted,
+ arguments = {
+ "string",
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "order" },
+ { "separatorset" },
+ { "conversionset" },
+ { "conversion" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ { "type" },
+ { "criterium" },
+ }
+ }
+}
diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv
index 85cfeb40f..72f0cf32a 100644
--- a/tex/context/base/strc-pag.mkiv
+++ b/tex/context/base/strc-pag.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% Allocation:
\countdef\realpageno \zerocount \realpageno \plusone
@@ -24,12 +26,14 @@
\countdef\subpageno \plustwo \subpageno \zerocount % !
\countdef\arrangeno \plusthree \arrangeno \zerocount % !
\countdef\pagenoshift\plusfour \pagenoshift\zerocount % !
+\countdef\lastpageno \plusfive \lastpageno \zerocount % !
\let\pageno\userpageno
\def\realfolio{\the\realpageno}
\def\userfolio{\the\userpageno}
\def\subfolio {\the\subpageno }
+\def\lastfolio{\the\lastpageno}
\newtoks\everyinitializepagecounters
@@ -101,27 +105,34 @@
\strc_counters_set\s!realpage\realpageno
\strc_counters_set\s!userpage\userpageno
\strc_counters_set\s!subpage \subpageno
+ \lastpageno\lastcountervalue[\s!realpage]\relax
\to \everyinitializepagecounters
\let\setuppagenumber\setupuserpagenumber
\let\resetpagenumber\resetuserpagenumber
+% invisible =
+
\def\strc_pagenumbers_page_state_save % \normalexpanded?
- {\ctxlua{structures.pages.save({
- prefix = "\namedcounterparameter\s!userpage\c!prefix",
- separatorset = "\namedcounterparameter\s!userpage\c!prefixseparatorset",
- conversion = "\namedcounterparameter\s!userpage\c!prefixconversion",
- conversionset = "\namedcounterparameter\s!userpage\c!prefixconversionset",
- set = "\namedcounterparameter\s!userpage\c!prefixset",
- segments = "\namedcounterparameter\s!userpage\c!prefixsegments",
- connector = \!!bs\namedcounterparameter\s!userpage\c!prefixconnector\!!es,
- },{
- conversion = "\namedcounterparameter\s!userpage\c!numberconversion",
- conversionset = "\namedcounterparameter\s!userpage\c!numberconversionset",
- starter = \!!bs\namedcounterparameter\s!userpage\c!numberstarter\!!es,
- stopper = \!!bs\namedcounterparameter\s!userpage\c!numberstopper\!!es,
- }
- )}}
+ {\clf_savepagedata
+ {
+ prefix {\namedcounterparameter\s!userpage\c!prefix}
+ separatorset {\namedcounterparameter\s!userpage\c!prefixseparatorset}
+ conversion {\namedcounterparameter\s!userpage\c!prefixconversion}
+ conversionset {\namedcounterparameter\s!userpage\c!prefixconversionset}
+ set {\namedcounterparameter\s!userpage\c!prefixset}
+ segments {\namedcounterparameter\s!userpage\c!prefixsegments}
+ connector {\namedcounterparameter\s!userpage\c!prefixconnector}
+ }{
+ conversion {\namedcounterparameter\s!userpage\c!numberconversion}
+ conversionset {\namedcounterparameter\s!userpage\c!numberconversionset}
+ starter {\namedcounterparameter\s!userpage\c!numberstarter}
+ stopper {\namedcounterparameter\s!userpage\c!numberstopper}
+ }{
+ viewerprefix {\namedcounterparameter\s!userpage\c!viewerprefix}
+ state {\namedcounterparameter\s!userpage\c!state}
+ }%
+ \relax}
\prependtoks
\strc_pagenumbers_page_state_save
@@ -296,6 +307,12 @@
\trackingmarginnotesfalse
\fi
\fi
+ \pageduplexmode
+ \ifsinglesided
+ \ifdoublesided\plustwo\else\zerocount\fi
+ \else
+ \ifdoublesided\plusone\else\zerocount\fi
+ \fi
\page_backgrounds_recalculate
\strc_pagenumbers_set_location
\to \everysetuppagenumbering
@@ -457,4 +474,6 @@
\initializepagecounters
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua
index 938af1ad7..2a1d0dd59 100644
--- a/tex/context/base/strc-ref.lua
+++ b/tex/context/base/strc-ref.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['strc-ref'] = {
local format, find, gmatch, match, strip = string.format, string.find, string.gmatch, string.match, string.strip
local floor = math.floor
-local rawget, tonumber = rawget, tonumber
+local rawget, tonumber, type = rawget, tonumber, type
local lpegmatch = lpeg.match
local insert, remove, copytable = table.insert, table.remove, table.copy
local formatters = string.formatters
@@ -44,19 +44,21 @@ local report_importing = logs.reporter("references","importing")
local report_empty = logs.reporter("references","empty")
local variables = interfaces.variables
-local constants = interfaces.constants
+local v_default = variables.default
+local v_url = variables.url
+local v_file = variables.file
+local v_unknown = variables.unknown
+local v_page = variables.page
+local v_auto = variables.auto
+
local context = context
local commands = commands
+local implement = interfaces.implement
local texgetcount = tex.getcount
local texsetcount = tex.setcount
local texconditionals = tex.conditionals
-local v_default = variables.default
-local v_url = variables.url
-local v_file = variables.file
-local v_unknown = variables.unknown
-local v_yes = variables.yes
local productcomponent = resolvers.jobs.productcomponent
local justacomponent = resolvers.jobs.justacomponent
@@ -75,6 +77,8 @@ local references = structures.references
local lists = structures.lists
local counters = structures.counters
+local jobpositions = job.positions
+
-- some might become local
references.defined = references.defined or allocate()
@@ -82,6 +86,7 @@ references.defined = references.defined or allocate()
local defined = references.defined
local derived = allocate()
local specials = allocate()
+local functions = allocate()
local runners = allocate()
local internals = allocate()
local filters = allocate()
@@ -91,9 +96,13 @@ local tobesaved = allocate()
local collected = allocate()
local tobereferred = allocate()
local referred = allocate()
+local usedinternals = allocate()
+local flaginternals = allocate()
+local usedviews = allocate()
references.derived = derived
references.specials = specials
+references.functions = functions
references.runners = runners
references.internals = internals
references.filters = filters
@@ -103,6 +112,9 @@ references.tobesaved = tobesaved
references.collected = collected
references.tobereferred = tobereferred
references.referred = referred
+references.usedinternals = usedinternals
+references.flaginternals = flaginternals
+references.usedviews = usedviews
local splitreference = references.splitreference
local splitprefix = references.splitcomponent -- replaces: references.splitprefix
@@ -111,6 +123,22 @@ local componentsplitter = references.componentsplitter
local currentreference = nil
+local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
+local context_delayed = context.delayed
+
+local ctx_pushcatcodes = context.pushcatcodes
+local ctx_popcatcodes = context.popcatcodes
+local ctx_dofinishreference = context.dofinishreference
+local ctx_dofromurldescription = context.dofromurldescription
+local ctx_dofromurlliteral = context.dofromurlliteral
+local ctx_dofromfiledescription = context.dofromfiledescription
+local ctx_dofromfileliteral = context.dofromfileliteral
+local ctx_expandreferenceoperation = context.expandreferenceoperation
+local ctx_expandreferencearguments = context.expandreferencearguments
+local ctx_getreferencestructureprefix = context.getreferencestructureprefix
+local ctx_convertnumber = context.convertnumber
+local ctx_emptyreference = context.emptyreference
+
storage.register("structures/references/defined", references.defined, "structures.references.defined")
local initializers = { }
@@ -119,6 +147,7 @@ local finalizers = { }
function references.registerinitializer(func) -- we could use a token register instead
initializers[#initializers+1] = func
end
+
function references.registerfinalizer(func) -- we could use a token register instead
finalizers[#finalizers+1] = func
end
@@ -129,12 +158,32 @@ local function initializer() -- can we use a tobesaved as metatable for collecte
for i=1,#initializers do
initializers[i](tobesaved,collected)
end
+ for prefix, list in next, collected do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ if i then
+ internals[i] = data
+ usedinternals[i] = r.used
+ end
+ end
+ end
end
local function finalizer()
for i=1,#finalizers do
finalizers[i](tobesaved)
end
+ for prefix, list in next, tobesaved do
+ for tag, data in next, list do
+ local r = data.references
+ local i = r.internal
+ local f = flaginternals[i]
+ if f then
+ r.used = usedviews[i] or true
+ end
+ end
+ end
end
job.register('structures.references.collected', tobesaved, initializer, finalizer)
@@ -148,6 +197,38 @@ local function initializer() -- can we use a tobesaved as metatable for collecte
nofreferred = #referred
end
+-- no longer fone this way
+
+-- references.resolvers = references.resolvers or { }
+-- local resolvers = references.resolvers
+--
+-- function resolvers.section(var)
+-- local vi = lists.collected[var.i[2]]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+--
+-- resolvers.float = resolvers.section
+-- resolvers.description = resolvers.section
+-- resolvers.formula = resolvers.section
+-- resolvers.note = resolvers.section
+--
+-- function resolvers.reference(var)
+-- local vi = var.i[2]
+-- if vi then
+-- var.i = vi
+-- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
+-- else
+-- var.i = nil
+-- var.r = 1
+-- end
+-- end
+
-- We make the array sparse (maybe a finalizer should optionally return a table) because
-- there can be quite some page links involved. We only store one action number per page
-- which is normally good enough for what we want (e.g. see above/below) and we do
@@ -215,8 +296,6 @@ local function referredpage(n)
return texgetcount("realpageno")
end
--- setmetatableindex(referred,function(t,k) return referredpage(k) end )
-
references.referredpage = referredpage
function references.registerpage(n) -- called in the backend code
@@ -246,16 +325,15 @@ local function setnextorder(kind,name)
texsetcount("global","locationorder",lastorder)
end
-references.setnextorder = setnextorder
-function references.setnextinternal(kind,name)
+local function setnextinternal(kind,name)
setnextorder(kind,name) -- always incremented with internal
local n = texgetcount("locationcount") + 1
texsetcount("global","locationcount",n)
return n
end
-function references.currentorder(kind,name)
+local function currentorder(kind,name)
return orders[kind] and orders[kind][name] or lastorder
end
@@ -266,43 +344,52 @@ local function setcomponent(data)
local references = data and data.references
if references then
references.component = component
+ if references.prefix == component then
+ references.prefix = nil
+ end
end
return component
end
-- but for the moment we do it here (experiment)
end
-commands.setnextinternalreference = references.setnextinternal
+references.setnextorder = setnextorder
+references.setnextinternal = setnextinternal
+references.currentorder = currentorder
+references.setcomponent = setcomponent
-function commands.currentreferenceorder(kind,name)
- context(references.currentorder(kind,name))
-end
+implement {
+ name = "setnextreferenceorder",
+ actions = setnextorder,
+ arguments = { "string", "string" }
+}
-references.setcomponent = setcomponent
+implement {
+ name = "setnextinternalreference",
+ actions = setnextinternal,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "currentreferenceorder",
+ actions = { currentorder, context },
+ arguments = { "string", "string" }
+}
-function references.set(kind,prefix,tag,data)
--- setcomponent(data)
- local pd = tobesaved[prefix] -- nicer is a metatable
+function references.set(data)
+ local references = data.references
+ local reference = references.reference
+ if not reference or reference == "" then
+ -- report_references("invalid reference") -- harmless
+ return 0
+ end
+ local prefix = references.prefix or ""
+ local pd = tobesaved[prefix] -- nicer is a metatable
if not pd then
pd = { }
tobesaved[prefix] = pd
end
local n = 0
- -- for ref in gmatch(tag,"[^,]+") do
- -- if ref ~= "" then
- -- if check_duplicates and pd[ref] then
- -- if prefix and prefix ~= "" then
- -- report_references("redundant reference %a in namespace %a",ref,prefix)
- -- else
- -- report_references("redundant reference %a",ref)
- -- end
- -- else
- -- n = n + 1
- -- pd[ref] = data
- -- context.dofinishsomereference(kind,prefix,ref)
- -- end
- -- end
- -- end
local function action(ref)
if ref == "" then
-- skip
@@ -315,145 +402,201 @@ function references.set(kind,prefix,tag,data)
else
n = n + 1
pd[ref] = data
- context.dofinishsomereference(kind,prefix,ref)
+ local r = data.references
+ ctx_dofinishreference(prefix or "",ref or "",r and r.internal or 0)
end
end
- process_settings(tag,action)
+ process_settings(reference,action)
return n > 0
end
+-- function references.enhance(prefix,tag)
+-- local l = tobesaved[prefix][tag]
+-- if l then
+-- l.references.realpage = texgetcount("realpageno")
+-- end
+-- end
+
+local getpos = function() getpos = backends.codeinjections.getpos return getpos () end
+
+local function synchronizepage(reference) -- non public helper
+ reference.realpage = texgetcount("realpageno")
+ if jobpositions.used then
+ reference.x, reference.y = getpos()
+ end
+end
+
+references.synchronizepage = synchronizepage
+
function references.enhance(prefix,tag)
local l = tobesaved[prefix][tag]
if l then
- l.references.realpage = texgetcount("realpageno")
+ synchronizepage(l.references)
end
end
-commands.enhancereference = references.enhance
+implement {
+ name = "enhancereference",
+ actions = references.enhance,
+ arguments = { "string", "string" }
+}
-- -- -- related to strc-ini.lua -- -- --
-references.resolvers = references.resolvers or { }
-local resolvers = references.resolvers
+-- no metatable here .. better be sparse
-local function getfromlist(var)
- local vi = var.i
- if vi then
- vi = vi[3] or lists.collected[vi[2]]
- if vi then
- local r = vi.references and vi.references
- if r then
- r = r.realpage
- end
- if not r then
- r = vi.pagedata and vi.pagedata
- if r then
- r = r.realpage
+local function register_from_lists(collected,derived,pages,sections)
+ local derived_g = derived[""] -- global
+ local derived_p = nil
+ local derived_c = nil
+ local prefix = nil
+ local component = nil
+ local entry = nil
+ if not derived_g then
+ derived_g = { }
+ derived[""] = derived_g
+ end
+ local function action(s)
+ if trace_referencing then
+ report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
+ end
+ if derived_p and not derived_p[s] then
+ derived_p[s] = entry
+ end
+ if derived_c and not derived_c[s] then
+ derived_c[s] = entry
+ end
+ if not derived_g[s] then
+ derived_g[s] = entry -- first wins
+ end
+ end
+ for i=1,#collected do
+ entry = collected[i]
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind -- why this check
+ if kind then
+ local references = entry.references
+ if references then
+ local reference = references.reference
+ if reference and reference ~= "" then
+ local realpage = references.realpage
+ if realpage then
+ prefix = references.prefix
+ component = references.component
+ if prefix and prefix ~= "" then
+ derived_p = derived[prefix]
+ if not derived_p then
+ derived_p = { }
+ derived[prefix] = derived_p
+ end
+ end
+ if component and component ~= "" and component ~= prefix then
+ derived_c = derived[component]
+ if not derived_c then
+ derived_c = { }
+ derived[component] = derived_c
+ end
+ end
+ process_settings(reference,action)
+ end
+ end
end
end
- var.i = vi
- var.r = r or 1
- else
- var.i = nil
- var.r = 1
end
- else
- var.i = nil
- var.r = 1
end
+ -- inspect(derived)
end
--- resolvers.section = getfromlist
--- resolvers.float = getfromlist
--- resolvers.description = getfromlist
--- resolvers.formula = getfromlist
--- resolvers.note = getfromlist
-
-setmetatableindex(resolvers,function(t,k)
- local v = getfromlist
- resolvers[k] = v
- return v
-end)
-
-function resolvers.reference(var)
- local vi = var.i[2] -- check
- if vi then
- var.i = vi
- var.r = (vi.references and vi.references.realpage) or (vi.pagedata and vi.pagedata.realpage) or 1
- else
- var.i = nil
- var.r = 1
- end
-end
+references.registerinitializer(function() register_from_lists(lists.collected,derived) end)
-local function register_from_lists(collected,derived,pages,sections)
- local g = derived[""] if not g then g = { } derived[""] = g end -- global
- for i=1,#collected do
- local entry = collected[i]
- local m, r = entry.metadata, entry.references
- if m and r then
- local reference = r.reference or ""
- local prefix = r.referenceprefix or ""
- local component = r.component and r.component or ""
- if reference ~= "" then
- local kind, realpage = m.kind, r.realpage
- if kind and realpage then
- local d = derived[prefix]
- if not d then
- d = { }
- derived[prefix] = d
+-- tracing
+
+local function collectbypage(tracedpages)
+ -- lists
+ do
+ local collected = structures.lists.collected
+ local data = nil
+ local function action(reference)
+ local prefix = data.prefix
+ local component = data.component
+ local realpage = data.realpage
+ if realpage then
+ local pagelist = rawget(tracedpages,realpage)
+ local internal = data.internal or 0
+ local prefix = (prefix ~= "" and prefix) or (component ~= "" and component) or ""
+ local pagedata = { prefix, reference, internal }
+ if pagelist then
+ pagelist[#pagelist+1] = pagedata
+ else
+ tracedpages[realpage] = { pagedata }
+ end
+ if internal > 0 then
+ data.usedprefix = prefix
+ end
+ end
+ end
+ for i=1,#collected do
+ local entry = collected[i]
+ local metadata = entry.metadata
+ if metadata and metadata.kind then
+ data = entry.references
+ if data then
+ local reference = data.reference
+ if reference and reference ~= "" then
+ process_settings(reference,action)
end
- local c = derived[component]
- if not c then
- c = { }
- derived[component] = c
+ end
+ end
+ end
+ end
+ -- references
+ do
+ for prefix, list in next, collected do
+ for reference, entry in next, list do
+ local data = entry.references
+ if data then
+ local realpage = data.realpage
+ local internal = data.internal or 0
+ local pagelist = rawget(tracedpages,realpage)
+ local pagedata = { prefix, reference, internal }
+ if pagelist then
+ pagelist[#pagelist+1] = pagedata
+ else
+ tracedpages[realpage] = { pagedata }
end
- local t = { kind, i, entry }
- -- for s in gmatch(reference,"%s*([^,]+)") do
- -- if trace_referencing then
- -- report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
- -- end
- -- c[s] = c[s] or t -- share them
- -- d[s] = d[s] or t -- share them
- -- g[s] = g[s] or t -- first wins
- -- end
- local function action(s)
- if trace_referencing then
- report_references("list entry %a provides %a reference %a on realpage %a",i,kind,s,realpage)
- end
- c[s] = c[s] or t -- share them
- d[s] = d[s] or t -- share them
- g[s] = g[s] or t -- first wins
+ if internal > 0 then
+ data.usedprefix = prefix
end
- process_settings(reference,action)
end
end
end
end
--- inspect(derived)
end
-references.registerinitializer(function() register_from_lists(lists.collected,derived) end)
+references.tracedpages = table.setmetatableindex(allocate(),function(t,k)
+ if collectbypage then
+ collectbypage(t)
+ collectbypage = nil
+ end
+ return rawget(t,k)
+end)
-- urls
-references.urls = references.urls or { }
-references.urls.data = references.urls.data or { }
-
-local urls = references.urls.data
+local urls = references.urls or { }
+references.urls = urls
+local urldata = urls.data or { }
+urls.data = urldata
-function references.urls.define(name,url,file,description)
+function urls.define(name,url,file,description)
if name and name ~= "" then
- urls[name] = { url or "", file or "", description or url or file or ""}
+ urldata[name] = { url or "", file or "", description or url or file or ""}
end
end
-local pushcatcodes = context.pushcatcodes
-local popcatcodes = context.popcatcodes
-local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
-
-function references.urls.get(name)
- local u = urls[name]
+function urls.get(name)
+ local u = urldata[name]
if u then
local url, file = u[1], u[2]
if file and file ~= "" then
@@ -464,59 +607,93 @@ function references.urls.get(name)
end
end
-function commands.geturl(name)
- local url = references.urls.get(name)
+function urls.found(name)
+ return urldata[name]
+end
+
+local function geturl(name)
+ local url = urls.get(name)
if url and url ~= "" then
- pushcatcodes(txtcatcodes)
+ ctx_pushcatcodes(txtcatcodes)
context(url)
- popcatcodes()
+ ctx_popcatcodes()
end
end
--- function commands.gethyphenatedurl(name,...)
--- local url = references.urls.get(name)
--- if url and url ~= "" then
--- hyphenatedurl(url,...)
--- end
--- end
+implement {
+ name = "doifelseurldefined",
+ actions = { urls.found, commands.doifelse },
+ arguments = "string"
+}
-function commands.doifurldefinedelse(name)
- commands.doifelse(urls[name])
-end
+implement {
+ name = "useurl",
+ actions = urls.define,
+ arguments = { "string", "string", "string", "string" }
+}
-commands.useurl= references.urls.define
+implement {
+ name = "geturl",
+ actions = geturl,
+ arguments = "string",
+}
-- files
-references.files = references.files or { }
-references.files.data = references.files.data or { }
-
-local files = references.files.data
+local files = references.files or { }
+references.files = files
+local filedata = files.data or { }
+files.data = filedata
-function references.files.define(name,file,description)
+function files.define(name,file,description)
if name and name ~= "" then
- files[name] = { file or "", description or file or "" }
+ filedata[name] = { file or "", description or file or "" }
end
end
-function references.files.get(name,method,space) -- method: none, before, after, both, space: yes/no
- local f = files[name]
+function files.get(name,method,space) -- method: none, before, after, both, space: yes/no
+ local f = filedata[name]
if f then
context(f[1])
end
end
-function commands.doiffiledefinedelse(name)
- commands.doifelse(files[name])
+function files.found(name)
+ return filedata[name]
end
-commands.usefile= references.files.define
+local function getfile(name)
+ local fil = files.get(name)
+ if fil and fil ~= "" then
+ ctx_pushcatcodes(txtcatcodes)
+ context(fil)
+ ctx_popcatcodes()
+ end
+end
+
+implement {
+ name = "doifelsefiledefined",
+ actions = { files.found, commands.doifelse },
+ arguments = "string"
+}
+
+implement {
+ name = "usefile",
+ actions = files.define,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "getfile",
+ actions = getfile,
+ arguments = "string"
+}
-- helpers
function references.checkedfile(whatever) -- return whatever if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1]
else
@@ -527,7 +704,7 @@ end
function references.checkedurl(whatever) -- return whatever if not resolved
if whatever then
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -543,11 +720,11 @@ end
function references.checkedfileorurl(whatever,default) -- return nil, nil if not resolved
if whatever then
- local w = files[whatever]
+ local w = filedata[whatever]
if w then
return w[1], nil
else
- local w = urls[whatever]
+ local w = urldata[whatever]
if w then
local u, f = w[1], w[2]
if f and f ~= "" then
@@ -563,25 +740,25 @@ end
-- programs
-references.programs = references.programs or { }
-references.programs.data = references.programs.data or { }
+local programs = references.programs or { }
+references.programs = programs
+local programdata = programs.data or { }
+programs.data = programdata
-local programs = references.programs.data
-
-function references.programs.define(name,file,description)
+function programs.define(name,file,description)
if name and name ~= "" then
- programs[name] = { file or "", description or file or ""}
+ programdata[name] = { file or "", description or file or ""}
end
end
-function references.programs.get(name)
- local f = programs[name]
+function programs.get(name)
+ local f = programdata[name]
return f and f[1]
end
function references.checkedprogram(whatever) -- return whatever if not resolved
if whatever then
- local w = programs[whatever]
+ local w = programdata[whatever]
if w then
return w[1]
else
@@ -590,23 +767,33 @@ function references.checkedprogram(whatever) -- return whatever if not resolved
end
end
-commands.defineprogram = references.programs.define
+implement {
+ name = "defineprogram",
+ actions = programs.define,
+ arguments = { "string", "string", "string" }
+}
-function commands.getprogram(name)
- local f = programs[name]
- if f then
- context(f[1])
+local function getprogram(name)
+ local p = programdata[name]
+ if p then
+ context(p[1])
end
end
+implement {
+ name = "getprogram",
+ actions = getprogram,
+ arguments = "string"
+}
+
-- shared by urls and files
-function references.whatfrom(name)
- context((urls[name] and v_url) or (files[name] and v_file) or v_unknown)
-end
+-- function references.whatfrom(name)
+-- context((urldata[name] and v_url) or (filedata[name] and v_file) or v_unknown)
+-- end
function references.from(name)
- local u = urls[name]
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
@@ -618,7 +805,7 @@ function references.from(name)
return url
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
@@ -630,34 +817,40 @@ function references.from(name)
end
end
-function commands.from(name)
- local u = urls[name]
+local function from(name)
+ local u = urldata[name]
if u then
local url, file, description = u[1], u[2], u[3]
if description ~= "" then
- context.dofromurldescription(description)
+ ctx_dofromurldescription(description)
-- ok
elseif file and file ~= "" then
- context.dofromurlliteral(url .. "/" .. file)
+ ctx_dofromurlliteral(url .. "/" .. file)
else
- context.dofromurlliteral(url)
+ ctx_dofromurlliteral(url)
end
else
- local f = files[name]
+ local f = filedata[name]
if f then
local file, description = f[1], f[2]
if description ~= "" then
- context.dofromfiledescription(description)
+ ctx_dofromfiledescription(description)
else
- context.dofromfileliteral(file)
+ ctx_dofromfileliteral(file)
end
end
end
end
+implement {
+ name = "from",
+ actions = from,
+ arguments = "string"
+}
+
function references.define(prefix,reference,list)
local d = defined[prefix] if not d then d = { } defined[prefix] = d end
- d[reference] = { "defined", list }
+ d[reference] = list
end
function references.reset(prefix,reference)
@@ -667,44 +860,34 @@ function references.reset(prefix,reference)
end
end
-commands.definereference = references.define
-commands.resetreference = references.reset
-
--- \primaryreferencefoundaction
--- \secondaryreferencefoundaction
--- \referenceunknownaction
-
--- t.special t.operation t.arguments t.outer t.inner
+implement {
+ name = "definereference",
+ actions = references.define,
+ arguments = { "string", "string", "string" }
+}
--- to what extend do we check the non prefixed variant
+implement {
+ name = "resetreference",
+ actions = references.reset,
+ arguments = { "string", "string" }
+}
-local strict = false
+setmetatableindex(defined,"table")
local function resolve(prefix,reference,args,set) -- we start with prefix,reference
if reference and reference ~= "" then
if not set then
set = { prefix = prefix, reference = reference }
else
- set.reference = set.reference or reference
- set.prefix = set.prefix or prefix
+ if not set.reference then set.reference = reference end
+ if not set.prefix then set.prefix = prefix end
end
local r = settings_to_array(reference)
for i=1,#r do
local ri = r[i]
- local d
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[ri]
- else
- d = defined[prefix]
- d = d and d[ri]
- if not d then
- d = defined[""]
- d = d and d[ri]
- end
- end
+ local d = defined[prefix][ri] or defined[""][ri]
if d then
- resolve(prefix,d[2],nil,set)
+ resolve(prefix,d,nil,set)
else
local var = splitreference(ri)
if var then
@@ -712,20 +895,10 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere
local vo, vi = var.outer, var.inner
if not vo and vi then
-- to be checked
- if strict then
- d = defined[prefix] or defined[""]
- d = d and d[vi]
- else
- d = defined[prefix]
- d = d and d[vi]
- if not d then
- d = defined[""]
- d = d and d[vi]
- end
- end
+ d = defined[prefix][vi] or defined[""][vi]
--
if d then
- resolve(prefix,d[2],var.arguments,set) -- args can be nil
+ resolve(prefix,d,var.arguments,set) -- args can be nil
else
if args then var.arguments = args end
set[#set+1] = var
@@ -752,35 +925,47 @@ end
references.currentset = nil
-function commands.setreferenceoperation(k,v)
+local function setreferenceoperation(k,v)
references.currentset[k].operation = v
end
-function commands.setreferencearguments(k,v)
+local function setreferencearguments(k,v)
references.currentset[k].arguments = v
end
-local expandreferenceoperation = context.expandreferenceoperation
-local expandreferencearguments = context.expandreferencearguments
-
function references.expandcurrent() -- todo: two booleans: o_has_tex& a_has_tex
local currentset = references.currentset
if currentset and currentset.has_tex then
for i=1,#currentset do
local ci = currentset[i]
local operation = ci.operation
- if operation and find(operation,"\\") then -- if o_has_tex then
- expandreferenceoperation(i,operation)
+ if operation and find(operation,"\\",1,true) then -- if o_has_tex then
+ ctx_expandreferenceoperation(i,operation)
end
local arguments = ci.arguments
- if arguments and find(arguments,"\\") then -- if a_has_tex then
- expandreferencearguments(i,arguments)
+ if arguments and find(arguments,"\\",1,true) then -- if a_has_tex then
+ ctx_expandreferencearguments(i,arguments)
end
end
end
end
-commands.expandcurrentreference = references.expandcurrent -- for the moment the same
+implement {
+ name = "expandcurrentreference",
+ actions = references.expandcurrent
+}
+
+implement {
+ name = "setreferenceoperation",
+ actions = setreferenceoperation,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "setreferencearguments",
+ actions = setreferencearguments,
+ arguments = { "integer", "string" }
+}
local externals = { }
@@ -824,7 +1009,7 @@ local function loadexternalreferences(name,utilitydata)
local realpage = references.realpage
if kind and realpage then
references.pagedata = pages[realpage]
- local prefix = references.referenceprefix or ""
+ local prefix = references.prefix or ""
local target = external[prefix]
if not target then
target = { }
@@ -856,8 +1041,8 @@ end
local externalfiles = { }
-table.setmetatableindex(externalfiles, function(t,k)
- local v = files[k]
+setmetatableindex(externalfiles, function(t,k)
+ local v = filedata[k]
if not v then
v = { k, k }
end
@@ -865,7 +1050,7 @@ table.setmetatableindex(externalfiles, function(t,k)
return v
end)
-table.setmetatableindex(externals,function(t,k) -- either or not automatically
+setmetatableindex(externals, function(t,k) -- either or not automatically
local filename = externalfiles[k][1] -- filename
local fullname = file.replacesuffix(filename,"tuc")
if lfs.isfile(fullname) then -- todo: use other locator
@@ -926,7 +1111,7 @@ local function loadproductreferences(productname,componentname,utilitydata)
local realpage = references.realpage
if kind and realpage then
references.pagedata = pages[realpage]
- local prefix = references.referenceprefix or ""
+ local prefix = references.prefix or ""
local component = references.component
local ctarget, ptarget
if not component or component == componentname then
@@ -952,22 +1137,6 @@ local function loadproductreferences(productname,componentname,utilitydata)
ptarget = { }
productreferences[prefix] = ptarget
end
- -- for s in gmatch(reference,"%s*([^,]+)") do
- -- if ptarget then
- -- if trace_importing then
- -- report_importing("registering %s reference, kind %a, name %a, prefix %a, reference %a",
- -- "product",kind,productname,prefix,s)
- -- end
- -- ptarget[s] = ptarget[s] or entry
- -- end
- -- if ctarget then
- -- if trace_importing then
- -- report_importing("registering %s reference, kind %a, name %a, prefix %a, referenc %a",
- -- "component",kind,productname,prefix,s)
- -- end
- -- ctarget[s] = ctarget[s] or entry
- -- end
- -- end
local function action(s)
if ptarget then
if trace_importing then
@@ -1062,7 +1231,7 @@ references.registerinitializer(function(tobesaved,collected)
productdata.components = componentlist(job.structure.collected) or { }
end)
-function structures.references.loadpresets(product,component) -- we can consider a special components hash
+function references.loadpresets(product,component) -- we can consider a special components hash
if product and component and product~= "" and component ~= "" and not productdata.product then -- maybe: productdata.filename ~= filename
productdata.product = product
productdata.component = component
@@ -1082,13 +1251,13 @@ function structures.references.loadpresets(product,component) -- we can consider
end
end
-structures.references.productdata = productdata
+references.productdata = productdata
local useproduct = commands.useproduct
if useproduct then
- function commands.useproduct(product)
+ local function newuseproduct(product)
useproduct(product)
if texconditionals.autocrossfilereferences then
local component = justacomponent()
@@ -1096,11 +1265,18 @@ if useproduct then
if trace_referencing or trace_importing then
report_references("loading presets for component %a of product %a",component,product)
end
- structures.references.loadpresets(product,component)
+ references.loadpresets(product,component)
end
end
end
+ implement {
+ name = "useproduct",
+ actions = newuseproduct,
+ arguments = "string",
+ overload = true,
+ }
+
end
-- productdata.firstsection.numberdata.numbers
@@ -1194,7 +1370,7 @@ local function identify_arguments(set,var,i)
local s = specials[var.inner]
if s then
-- inner{argument}
- var.kind = "special with arguments"
+ var.kind = "special operation with arguments"
else
var.error = "unknown inner or special"
end
@@ -1204,114 +1380,105 @@ local function identify_arguments(set,var,i)
return var
end
-local function identify_inner(set,var,prefix,collected,derived,tobesaved)
+-- needs checking: if we don't do too much (redundant) checking now
+-- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
+-- foo:bar -> foo == prefix (first we try the global one)
+-- -:bar -> ignore prefix
+
+local function finish_inner(var,p,i)
+ var.kind = "inner"
+ var.i = i
+ var.p = p
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
+ return var
+end
+
+local function identify_inner(set,var,prefix,collected,derived)
local inner = var.inner
- local outer = var.outer
- -- inner ... we could move the prefix logic into the parser so that we have 'm for each entry
- -- foo:bar -> foo == prefix (first we try the global one)
- -- -:bar -> ignore prefix
- local p, i = prefix, nil
- local splitprefix, splitinner
-- the next test is a safeguard when references are auto loaded from outer
- if inner then
- splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
+ if not inner or inner == "" then
+ return false
end
- -- these are taken from other anonymous references
+ local splitprefix, splitinner = lpegmatch(prefixsplitter,inner)
if splitprefix and splitinner then
+ -- we check for a prefix:reference instance in the regular set of collected
+ -- references; a special case is -: which forces a lookup in the global list
if splitprefix == "-" then
- i = collected[""]
- i = i and i[splitinner]
+ local i = collected[""]
if i then
- p = ""
- end
- else
- i = collected[splitprefix]
- i = i and i[splitinner]
- if i then
- p = splitprefix
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
end
- end
- -- todo: strict here
- if not i then
- i = collected[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
- end
- if not i and prefix ~= "" then
- i = collected[""]
- i = i and i[inner]
+ local i = collected[splitprefix]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,splitprefix,i)
+ end
end
- end
- if i then
- var.i = { "reference", i }
- resolvers.reference(var)
- var.kind = "inner"
- var.p = p
- elseif derived then
- -- these are taken from other data structures (like lists)
- if splitprefix and splitinner then
+ if derived then
+ -- next we look for a reference in the regular set of collected references
+ -- using the prefix that is active at this moment (so we overload the given
+ -- these are taken from other data structures (like lists)
if splitprefix == "-" then
- i = derived[""]
- i = i and i[splitinner]
+ local i = derived[""]
if i then
- p = ""
+ i = i[splitinner]
+ if i then
+ return finish_inner(var,"",i)
+ end
end
- else
- i = derived[splitprefix]
- i = i and i[splitinner]
+ end
+ local i = derived[splitprefix]
+ if i then
+ i = i[splitinner]
if i then
- p = splitprefix
+ return finish_inner(var,splitprefix,i)
end
end
end
- if not i then
- i = derived[prefix]
- i = i and i[inner]
- if i then
- p = prefix
- end
+ end
+ -- we now ignore the split prefix and treat the whole inner as a potential
+ -- referenice into the global list
+ local i = collected[prefix]
+ if i then
+ i = i[inner]
+ if i then
+ return finish_inner(var,prefix,i)
end
- if not i and prefix ~= "" then
- i = derived[""]
- i = i and i[inner]
+ end
+ if not i and derived then
+ -- and if not found we look in the derived references
+ local i = derived[prefix]
+ if i then
+ i = i[inner]
if i then
- p = ""
+ return finish_inner(var,prefix,i)
end
end
+ end
+ return false
+end
+
+local function unprefixed_inner(set,var,prefix,collected,derived,tobesaved)
+ local inner = var.inner
+ local s = specials[inner]
+ if s then
+ var.kind = "special"
+ else
+ local i = (collected and collected[""] and collected[""][inner]) or
+ (derived and derived [""] and derived [""][inner]) or
+ (tobesaved and tobesaved[""] and tobesaved[""][inner])
if i then
var.kind = "inner"
- var.i = i
- var.p = p
- local ri = resolvers[i[1]]
- if ri then
- ri(var)
- else
- -- can't happen as we catch it with a metatable now
- report_references("unknown inner resolver for %a",i[1])
- end
+ var.p = ""
+ var.i = i
+ var.r = (i.references and i.references.realpage) or (i.pagedata and i.pagedata.realpage) or 1
else
- -- no prefixes here
- local s = specials[inner]
- if s then
- var.kind = "special"
- else
- i = (collected and collected[""] and collected[""][inner]) or
- (derived and derived [""] and derived [""][inner]) or
- (tobesaved and tobesaved[""] and tobesaved[""][inner])
- if i then
- var.kind = "inner"
- var.i = { "reference", i }
- resolvers.reference(var)
- var.p = ""
- else
- var.error = "unknown inner or special"
- end
- end
+ var.error = "unknown inner or special"
end
end
return var
@@ -1322,9 +1489,8 @@ local function identify_outer(set,var,i)
local inner = var.inner
local external = externals[outer]
if external then
- local v = copytable(var)
- v = identify_inner(set,v,nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1332,9 +1498,8 @@ local function identify_outer(set,var,i)
end
return v
end
- v = copytable(var)
- local v = identify_inner(set,v,v.outer,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,var.outer,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1345,8 +1510,8 @@ local function identify_outer(set,var,i)
end
local external = productdata.componentreferences[outer]
if external then
- local v = identify_inner(set,copytable(var),nil,external)
- if v.i and not v.error then
+ local v = identify_inner(set,var,nil,external)
+ if v then
v.kind = "outer with inner"
set.external = true
if trace_identifying then
@@ -1373,6 +1538,8 @@ local function identify_outer(set,var,i)
local arguments = var.arguments
local operation = var.operation
if inner then
+ -- tricky: in this case we can only use views when we're sure that all inners
+ -- are flushed in the outer document so that should become an option
if arguments then
-- outer::inner{argument}
var.kind = "outer with inner with arguments"
@@ -1380,9 +1547,9 @@ local function identify_outer(set,var,i)
-- outer::inner
var.kind = "outer with inner"
end
- var.i = { "reference", inner }
- resolvers.reference(var)
+ var.i = inner
var.f = outer
+ var.r = (inner.references and inner.references.realpage) or (inner.pagedata and inner.pagedata.realpage) or 1
if trace_identifying then
report_identify_outer(set,var,i,"2e")
end
@@ -1419,57 +1586,62 @@ local function identify_outer(set,var,i)
return var
end
+-- todo: avoid copy
+
local function identify_inner_or_outer(set,var,i)
-- here we fall back on product data
local inner = var.inner
if inner and inner ~= "" then
- local v = identify_inner(set,copytable(var),set.prefix,collected,derived,tobesaved)
- if v.i and not v.error then
- v.kind = "inner" -- check this
+
+ -- first we look up in collected and derived using the current prefix
+
+ local prefix = set.prefix
+
+ local v = identify_inner(set,var,set.prefix,collected,derived)
+ if v then
if trace_identifying then
report_identify_outer(set,v,i,"4a")
end
return v
end
- -- these get auto prefixes but are loaded in the document so they are
- -- internal .. we also set the realpage (for samepage analysis)
+ -- nest we look at each component (but we can omit the already consulted one
local components = job.structure.components
if components then
- for i=1,#components do
- local component = components[i]
- local data = collected[component]
- local vi = data and data[inner]
- if vi then
--- var = copytable(var)
--- var.kind = "inner"
--- var.i = vi
--- var.p = component
--- runners.inner(var.r = vi.references.realpage
--- if trace_identifying then
--- report_identify_outer(set,var,i,"4x")
--- end
--- return var
-local v = identify_inner(set,copytable(var),component,collected) -- is copy needed ?
-if v.i and not v.error then
- v.kind = "inner"
- if trace_identifying then
- report_identify_outer(set,var,i,"4x")
- end
- return v
-end
+ for c=1,#components do
+ local component = components[c]
+ if component ~= prefix then
+ local v = identify_inner(set,var,component,collected,derived)
+ if v then
+ if trace_identifying then
+ report_identify_outer(set,var,i,"4b")
+ end
+ return v
+ end
end
end
end
+ -- as a last resort we will consult the global lists
+
+ local v = unprefixed_inner(set,var,"",collected,derived,tobesaved)
+ if v then
+ if trace_identifying then
+ report_identify_outer(set,v,i,"4c")
+ end
+ return v
+ end
+
+ -- not it gets bad ... we need to look in external files ... keep in mind that
+ -- we can best use explicit references for this ... we might issue a warning
+
local componentreferences = productdata.componentreferences
local productreferences = productdata.productreferences
local components = productdata.components
if components and componentreferences then
- -- for component, data in next, productdata.componentreferences do -- better do this in order of processing:
- for i=1,#components do
- local component = components[i]
+ for c=1,#components do
+ local component = components[c]
local data = componentreferences[component]
if data then
local d = data[""]
@@ -1480,7 +1652,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4b")
+ report_identify_outer(set,var,i,"4d")
end
return var
end
@@ -1500,7 +1672,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4c")
+ report_identify_outer(set,var,i,"4e")
end
return var
end
@@ -1515,7 +1687,7 @@ end
var.kind = "outer with inner"
set.external = true
if trace_identifying then
- report_identify_outer(set,var,i,"4d")
+ report_identify_outer(set,var,i,"4f")
end
return var
end
@@ -1526,30 +1698,18 @@ end
var.error = "no inner"
end
if trace_identifying then
- report_identify_outer(set,var,i,"4e")
+ report_identify_outer(set,var,i,"4g")
end
return var
end
--- local function identify_inner_or_outer(set,var,i)
--- -- we might consider first checking with a prefix prepended and then without
--- -- which is better for fig:oeps
--- local var = do_identify_inner_or_outer(set,var,i)
--- if var.error then
--- local prefix = set.prefix
--- if prefix and prefix ~= "" then
--- var.inner = prefix .. ':' .. var.inner
--- var.error = nil
--- return do_identify_inner_or_outer(set,var,i)
--- end
--- end
--- return var
--- end
-
local function identify_inner_component(set,var,i)
-- we're in a product (maybe ignore when same as component)
local component = var.component
- identify_inner(set,var,component,collected,derived,tobesaved)
+ local v = identify_inner(set,var,component,collected,derived)
+ if not v then
+ var.error = "unknown inner in component"
+ end
if trace_identifying then
report_identify_outer(set,var,i,"5a")
end
@@ -1611,7 +1771,11 @@ local function identify(prefix,reference)
set.n = nofidentified
for i=1,#set do
local var = set[i]
- if var.special then
+ local spe = var.special
+ local fnc = functions[spe]
+ if fnc then
+ var = fnc(var) or { error = "invalid special function" }
+ elseif spe then
var = identify_special(set,var,i)
elseif var.outer then
var = identify_outer(set,var,i)
@@ -1638,7 +1802,7 @@ references.identify = identify
local unknowns, nofunknowns, f_valid = { }, 0, formatters["[%s][%s]"]
-function references.valid(prefix,reference,highlight,newwindow,layer)
+function references.valid(prefix,reference,specification)
local set, bug = identify(prefix,reference)
local unknown = bug or #set == 0
if unknown then
@@ -1653,16 +1817,28 @@ function references.valid(prefix,reference,highlight,newwindow,layer)
unknowns[str] = u + 1
end
else
- set.highlight, set.newwindow, set.layer = highlight, newwindow, layer
+ set.highlight = specification.highlight
+ set.newwindow = specification.newwindow
+ set.layer = specification.layer
currentreference = set[1]
end
-- we can do the expansion here which saves a call
return not unknown
end
-function commands.doifelsereference(prefix,reference,highlight,newwindow,layer)
- commands.doifelse(references.valid(prefix,reference,highlight,newwindow,layer))
-end
+implement {
+ name = "doifelsereference",
+ actions = { references.valid, commands.doifelse },
+ arguments = {
+ "string",
+ "string",
+ {
+ { "highlight", "boolean" },
+ { "newwindow", "boolean" },
+ { "layer" },
+ }
+ }
+}
function references.reportproblems() -- might become local
if nofunknowns > 0 then
@@ -1685,92 +1861,199 @@ end
luatex.registerstopactions(references.reportproblems)
-local innermethod = "names"
+-- The auto method will try to avoid named internals in a clever way which
+-- can make files smaller without sacrificing external references. Some of
+-- the housekeeping happens the backend side.
+
+local innermethod = v_auto -- only page|auto now
+local defaultinnermethod = defaultinnermethod
+references.innermethod = innermethod -- don't mess with this one directly
function references.setinnermethod(m)
- if m then
- if m == "page" or m == "mixed" or m == "names" then
- innermethod = m
- elseif m == true or m == v_yes then
- innermethod = "page"
- end
+ if toboolean(m) or m == v_page then
+ innermethod = v_page
+ else
+ innermethod = v_auto
end
+ references.innermethod = innermethod
function references.setinnermethod()
report_references("inner method is already set and frozen to %a",innermethod)
end
end
+implement {
+ name = "setinnerreferencemethod",
+ actions = references.setinnermethod,
+ arguments = "string",
+ onlyonce = true
+}
+
function references.getinnermethod()
- return innermethod or "names"
+ return innermethod or defaultinnermethod
end
-directives.register("references.linkmethod", function(v) -- page mixed names
+directives.register("references.linkmethod", function(v) -- page auto
references.setinnermethod(v)
end)
-- this is inconsistent
-function references.setinternalreference(prefix,tag,internal,view) -- needs checking
- if innermethod == "page" then
- return unsetvalue
- else
- local t, tn = { }, 0 -- maybe add to current
- if tag then
+local destinationattributes = { }
+
+local function setinternalreference(specification)
+ local internal = specification.internal
+ local destination = unsetvalue
+ if innermethod == v_auto then
+ local t, tn = { }, 0 -- maybe add to current (now only used for tracing)
+ local reference = specification.reference
+ if reference then
+ local prefix = specification.prefix
if prefix and prefix ~= "" then
prefix = prefix .. ":" -- watch out, : here
- -- for ref in gmatch(tag,"[^,]+") do
- -- tn = tn + 1
- -- t[tn] = prefix .. ref
- -- end
local function action(ref)
tn = tn + 1
t[tn] = prefix .. ref
end
- process_settings(tag,action)
+ process_settings(reference,action)
else
- -- for ref in gmatch(tag,"[^,]+") do
- -- tn = tn + 1
- -- t[tn] = ref
- -- end
local function action(ref)
tn = tn + 1
t[tn] = ref
end
- process_settings(tag,action)
+ process_settings(reference,action)
end
end
- if internal and innermethod == "names" then -- mixed or page
+ -- ugly .. later we decide to ignore it when we have a real one
+ -- but for testing we might want to see them all
+ if internal then
tn = tn + 1
- t[tn] = "aut:" .. internal
+ t[tn] = internal -- when number it's internal
end
- local destination = references.mark(t,nil,nil,view) -- returns an attribute
- texsetcount("lastdestinationattribute",destination)
- return destination
+ destination = references.mark(t,nil,nil,specification.view) -- returns an attribute
end
+ if internal then -- new
+ destinationattributes[internal] = destination
+ end
+ texsetcount("lastdestinationattribute",destination)
+ return destination
end
-function references.setandgetattribute(kind,prefix,tag,data,view) -- maybe do internal automatically here
- local attr = references.set(kind,prefix,tag,data) and references.setinternalreference(prefix,tag,nil,view) or unsetvalue
- texsetcount("lastdestinationattribute",attr)
- return attr
+local function getinternalreference(internal)
+ return destinationattributes[internal] or 0
end
-commands.setreferenceattribute = references.setandgetattribute
+references.setinternalreference = setinternalreference
+references.getinternalreference = getinternalreference
-function references.getinternalreference(n) -- n points into list (todo: registers)
- local l = lists.collected[n]
- return l and l.references.internal or n
-end
+implement {
+ name = "setinternalreference",
+ actions = setinternalreference,
+ arguments = {
+ {
+ { "prefix" },
+ { "reference" },
+ { "internal", "integer" },
+ { "view" }
+ }
+ }
+}
-function commands.setinternalreference(prefix,tag,internal,view) -- needs checking
- context(references.setinternalreference(prefix,tag,internal,view))
+-- implement {
+-- name = "getinternalreference",
+-- actions = { getinternalreference, context },
+-- arguments = "integer",
+-- }
+
+function references.setandgetattribute(data) -- maybe do internal automatically here
+ local attr = unsetvalue
+ local mdat = data.metadata
+ local rdat = data.references
+ if mdat and rdat then
+ if not rdat.section then
+ rdat.section = structures.sections.currentid()
+ end
+ local ndat = data.numberdata
+ if ndat then
+ local numbers = ndat.numbers
+ if type(numbers) == "string" then
+ ndat.numbers = counters.compact(numbers,nil,true)
+ end
+ data.numberdata = helpers.simplify(ndat)
+ end
+ local pdat = data.prefixdata
+ if pdat then
+ data.prefixdata = helpers.simplify(pdat)
+ end
+ local udat = data.userdata
+ if type(udat) == "string" then
+ data.userdata = helpers.touserdata(udat)
+ end
+ if not rdat.block then
+ rdat.block = structures.sections.currentblock()
+ end
+ local done = references.set(data) -- we had kind i.e .item -> full
+ if done then
+ attr = setinternalreference {
+ prefix = prefix,
+ reference = tag,
+ internal = rdat.internal,
+ view = rdat.view
+ } or unsetvalue
+ end
+ end
+ texsetcount("lastdestinationattribute",attr)
+ return attr
end
-function commands.getinternalreference(n) -- this will also be a texcount
+implement {
+ name = "setreferenceattribute",
+ actions = references.setandgetattribute,
+ arguments = {
+ {
+ {
+ "references", {
+ { "internal", "integer" },
+ { "block" },
+ { "view" },
+ { "prefix" },
+ { "reference" },
+ },
+ },
+ {
+ "metadata", {
+ { "kind" },
+ { "xmlroot" },
+ { "catcodes", "integer" },
+ },
+ },
+ {
+ "prefixdata", { "*" }
+ },
+ {
+ "numberdata", { "*" }
+ },
+ {
+ "entries", { "*" }
+ },
+ {
+ "userdata"
+ }
+ }
+ }
+}
+
+function references.getinternallistreference(n) -- n points into list (todo: registers)
local l = lists.collected[n]
- context(l and l.references.internal or n)
+ local i = l and l.references.internal
+ return i and destinationattributes[i] or 0
end
+implement {
+ name = "getinternallistreference",
+ actions = { references.getinternallistreference, context },
+ arguments = "integer"
+}
+
--
function references.getcurrentmetadata(tag)
@@ -1778,12 +2061,11 @@ function references.getcurrentmetadata(tag)
return data and data.metadata and data.metadata[tag]
end
-function commands.getcurrentreferencemetadata(tag)
- local data = references.getcurrentmetadata(tag)
- if data then
- context(data)
- end
-end
+implement {
+ name = "getcurrentreferencemetadata",
+ actions = { references.getcurrentmetadata, context },
+ arguments = "string",
+}
local function currentmetadata(tag)
local data = currentreference and currentreference.i
@@ -1793,32 +2075,58 @@ end
references.currentmetadata = currentmetadata
local function getcurrentprefixspec(default)
- -- todo: message
- return currentmetadata("kind") or "?", currentmetadata("name") or "?", default or "?"
+ local data = currentreference and currentreference.i
+ local metadata = data and data.metadata
+ return
+ metatadata and metadata.kind or "?",
+ metatadata and metadata.name or "?",
+ default or "?"
end
references.getcurrentprefixspec = getcurrentprefixspec
-function commands.getcurrentprefixspec(default)
- context.getreferencestructureprefix(getcurrentprefixspec(default))
-end
+-- implement {
+-- name = "getcurrentprefixspec",
+-- actions = { getcurrentprefixspec, context }, -- returns 3 arguments
+-- arguments = "string",
+-- }
+
+implement {
+ name = "getcurrentprefixspec",
+ actions = function(tag)
+ context("{%s}{%s}{%s}",getcurrentprefixspec(tag))
+ end,
+ arguments = "string",
+}
-function references.filter(name,...) -- number page title ...
+local genericfilters = { }
+local userfilters = { }
+local textfilters = { }
+local fullfilters = { }
+local sectionfilters = { }
+
+filters.generic = genericfilters
+filters.user = userfilters
+filters.text = textfilters
+filters.full = fullfilters
+filters.section = sectionfilters
+
+local function filterreference(name,prefixspec,numberspec) -- number page title ...
local data = currentreference and currentreference.i -- maybe we should take realpage from here
if data then
if name == "realpage" then
local cs = references.analyze() -- normally already analyzed but also sets state
- context(tonumber(cs.realpage) or 0) -- todo, return and in command namespace
+ context(tonumber(cs.realpage) or 0)
else -- assumes data is table
local kind = type(data) == "table" and data.metadata and data.metadata.kind
if kind then
- local filter = filters[kind] or filters.generic
- filter = filter and (filter[name] or filter.unknown or filters.generic[name] or filters.generic.unknown)
+ local filter = filters[kind] or genericfilters
+ filter = filter and (filter[name] or filter.unknown or genericfilters[name] or genericfilters.unknown)
if filter then
if trace_referencing then
report_references("name %a, kind %a, using dedicated filter",name,kind)
end
- filter(data,name,...)
+ filter(data,name,prefixspec,numberspec)
elseif trace_referencing then
report_references("name %a, kind %a, using generic filter",name,kind)
end
@@ -1833,18 +2141,30 @@ function references.filter(name,...) -- number page title ...
end
end
-function references.filterdefault()
- return references.filter("default",getcurrentprefixspec(v_default))
+local function filterreferencedefault()
+ return filterreference("default",getcurrentprefixspec("default"))
end
-function commands.currentreferencedefault(tag)
- if not tag then tag = "default" end
- references.filter(tag,context.delayed(getcurrentprefixspec(tag)))
-end
+references.filter = filterreference
+references.filterdefault = filterreferencedefault
+
+implement {
+ name = "filterreference",
+ actions = filterreference,
+ arguments = "string",
+}
-filters.generic = { }
+implement {
+ name = "filterdefaultreference",
+ actions = filterreference,
+ arguments = {
+ "string", -- 'default'
+ { { "*" } }, -- prefixspec
+ { { "*" } }, -- numberspec
+ }
+}
-function filters.generic.title(data)
+function genericfilters.title(data)
if data then
local titledata = data.titledata or data.useddata
if titledata then
@@ -1853,7 +2173,7 @@ function filters.generic.title(data)
end
end
-function filters.generic.text(data)
+function genericfilters.text(data)
if data then
local entries = data.entries or data.useddata
if entries then
@@ -1862,12 +2182,12 @@ function filters.generic.text(data)
end
end
-function filters.generic.number(data,what,prefixspec) -- todo: spec and then no stopper
+function genericfilters.number(data,what,prefixspec,numberspec)
if data then
numberdata = lists.reordered(data) -- data.numberdata
if numberdata then
helpers.prefix(data,prefixspec)
- sections.typesetnumber(numberdata,"number",numberdata)
+ sections.typesetnumber(numberdata,"number",numberspec,numberdata)
else
local useddata = data.useddata
if useddata and useddata.number then
@@ -1877,16 +2197,16 @@ function filters.generic.number(data,what,prefixspec) -- todo: spec and then no
end
end
-filters.generic.default = filters.generic.text
+genericfilters.default = genericfilters.text
-function filters.generic.page(data,prefixspec,pagespec)
+function genericfilters.page(data,prefixspec,pagespec)
local pagedata = data.pagedata
if pagedata then
local number, conversion = pagedata.number, pagedata.conversion
if not number then
-- error
elseif conversion then
- context.convertnumber(conversion,number)
+ ctx_convertnumber(conversion,number)
else
context(number)
end
@@ -1895,14 +2215,12 @@ function filters.generic.page(data,prefixspec,pagespec)
end
end
-filters.user = { }
-
-function filters.user.unknown(data,name)
+function userfilters.unknown(data,name)
if data then
local userdata = data.userdata
local userkind = userdata and userdata.kind
if userkind then
- local filter = filters[userkind] or filters.generic
+ local filter = filters[userkind] or genericfilters
filter = filter and (filter[name] or filter.unknown)
if filter then
filter(data,name)
@@ -1916,9 +2234,7 @@ function filters.user.unknown(data,name)
end
end
-filters.text = { }
-
-function filters.text.title(data)
+function textfilters.title(data)
helpers.title(data.entries.text or "?",data.metadata)
end
@@ -1928,18 +2244,14 @@ end
-- helpers.title(data.entries.text or "?",data.metadata)
-- end
-function filters.text.page(data,prefixspec,pagespec)
+function textfilters.page(data,prefixspec,pagespec)
helpers.prefixpage(data,prefixspec,pagespec)
end
-filters.full = { }
-
-filters.full.title = filters.text.title
-filters.full.page = filters.text.page
-
-filters.section = { }
+fullfilters.title = textfilters.title
+fullfilters.page = textfilters.page
-function filters.section.number(data,what,prefixspec)
+function sectionfilters.number(data,what,prefixspec)
if data then
local numberdata = data.numberdata
if not numberdata then
@@ -1951,7 +2263,7 @@ function filters.section.number(data,what,prefixspec)
local references = data.references
if trace_empty then
report_empty("reference %a has a hidden number",references.reference)
- context.emptyreference() -- maybe an option
+ ctx_emptyreference() -- maybe an option
end
else
sections.typesetnumber(numberdata,"number",prefixspec,numberdata)
@@ -1959,18 +2271,18 @@ function filters.section.number(data,what,prefixspec)
end
end
-filters.section.title = filters.generic.title
-filters.section.page = filters.generic.page
-filters.section.default = filters.section.number
+sectionfilters.title = genericfilters.title
+sectionfilters.page = genericfilters.page
+sectionfilters.default = sectionfilters.number
--- filters.note = { default = filters.generic.number }
--- filters.formula = { default = filters.generic.number }
--- filters.float = { default = filters.generic.number }
--- filters.description = { default = filters.generic.number }
--- filters.item = { default = filters.generic.number }
+-- filters.note = { default = genericfilters.number }
+-- filters.formula = { default = genericfilters.number }
+-- filters.float = { default = genericfilters.number }
+-- filters.description = { default = genericfilters.number }
+-- filters.item = { default = genericfilters.number }
setmetatableindex(filters, function(t,k) -- beware, test with rawget
- local v = { default = filters.generic.number } -- not copy as it might be extended differently
+ local v = { default = genericfilters.number } -- not copy as it might be extended differently
t[k] = v
return v
end)
@@ -1999,12 +2311,71 @@ local specials = references.testspecials
-- pretty slow (progressively). In the pagebody one can best check the reference
-- real page to determine if we need contrastlocation as that is more lightweight.
-local function checkedpagestate(n,page)
- local r = referredpage(n)
+local function checkedpagestate(n,page,actions,position,spread)
local p = tonumber(page)
if not p then
return 0
- elseif p > r then
+ end
+ if position and #actions > 0 then
+ local i = actions[1].i -- brrr
+ if i then
+ local a = i.references
+ if a then
+ local x = a.x
+ local y = a.y
+ if x and y then
+ local jp = jobpositions.collected[position]
+ if jp then
+ local px = jp.x
+ local py = jp.y
+ local pp = jp.p
+ if p == pp then
+ -- same page
+ if py > y then
+ return 5 -- above
+ elseif py < y then
+ return 4 -- below
+ elseif px > x then
+ return 4 -- below
+ elseif px < x then
+ return 5 -- above
+ else
+ return 1 -- same
+ end
+ elseif spread then
+ if pp % 2 == 0 then
+ -- left page
+ if pp > p then
+ return 2 -- before
+ elseif pp + 1 == p then
+-- return 4 -- below (on right page)
+ return 5 -- above (on left page)
+ else
+ return 3 -- after
+ end
+ else
+ -- right page
+ if pp < p then
+ return 3 -- after
+ elseif pp - 1 == p then
+-- return 5 -- above (on left page)
+ return 4 -- below (on right page)
+ else
+ return 2 -- before
+ end
+ end
+ elseif pp > p then
+ return 2 -- before
+ else
+ return 3 -- after
+ end
+ end
+ end
+ end
+ end
+ end
+ local r = referredpage(n) -- sort of obsolete
+ if p > r then
return 3 -- after
elseif p < r then
return 2 -- before
@@ -2043,11 +2414,13 @@ local function setreferencerealpage(actions)
end
end
+references.setreferencerealpage = setreferencerealpage
+
-- we store some analysis data alongside the indexed array
-- at this moment only the real reference page is analyzed
-- normally such an analysis happens in the backend code
-function references.analyze(actions)
+function references.analyze(actions,position,spread)
if not actions then
actions = references.currentset
end
@@ -2062,32 +2435,56 @@ function references.analyze(actions)
elseif actions.external then
actions.pagestate = 0
else
- actions.pagestate = checkedpagestate(actions.n,realpage)
+ actions.pagestate = checkedpagestate(actions.n,realpage,actions,position,spread)
end
end
return actions
end
-function commands.referencepagestate(actions)
- if not actions then
- actions = references.currentset
- end
+local function referencepagestate(position,detail,spread)
+ local actions = references.currentset
if not actions then
- context(0)
+ return 0
else
if not actions.pagestate then
- references.analyze(actions) -- delayed unless explicitly asked for
--- print("NO STATE",actions.reference,actions.pagestate)
+ references.analyze(actions,position,spread) -- delayed unless explicitly asked for
+ end
+ local pagestate = actions.pagestate
+ if detail then
+ return pagestate
+ elseif pagestate == 4 then
+ return 2 -- compatible
+ elseif pagestate == 5 then
+ return 3 -- compatible
+ else
+ return pagestate
end
- context(actions.pagestate)
end
end
-function commands.referencerealpage(actions)
+implement {
+ name = "referencepagestate",
+ actions = { referencepagestate, context },
+ arguments = "string"
+}
+
+implement {
+ name = "referencepagedetail",
+ actions = { referencepagestate, context },
+ arguments = { "string", "boolean", "boolean" }
+}
+
+local function referencerealpage(actions)
actions = actions or references.currentset
- context(not actions and 0 or actions.realpage or setreferencerealpage(actions))
+ return not actions and 0 or actions.realpage or setreferencerealpage(actions)
end
+implement {
+ name = "referencerealpage",
+ actions = { referencerealpage, context },
+ arguments = "string"
+}
+
local plist, nofrealpages
local function realpageofpage(p) -- the last one counts !
@@ -2164,7 +2561,7 @@ runners["special operation with arguments"] = runners["special"]
-- check the validity.
function specials.internal(var,actions)
- local v = references.internals[tonumber(var.operation)]
+ local v = internals[tonumber(var.operation)]
local r = v and v.references.realpage
if r then
actions.realpage = r
@@ -2224,10 +2621,103 @@ function specials.section(var,actions)
end
end
--- needs a better split ^^^
+-- experimental:
+
+local p_splitter = lpeg.splitat(":")
+local p_lower = lpeg.patterns.utf8lower
+
+-- We can cache lowercased titles which saves a lot of time, but then
+-- we can better have a global cache with weak keys.
+
+-- local lowercache = table.setmetatableindex(function(t,k)
+-- local v = lpegmatch(p_lower,k)
+-- t[k] = v
+-- return v
+-- end)
-commands.filterreference = references.filter
-commands.filterdefaultreference = references.filterdefault
+local lowercache = false
+
+local function locate(list,askedkind,askedname,pattern)
+ local kinds = lists.kinds
+ local names = lists.names
+ if askedkind and not kinds[askedkind] then
+ return false
+ end
+ if askedname and not names[askedname] then
+ return false
+ end
+ for i=1,#list do
+ local entry = list[i]
+ local metadata = entry.metadata
+ if metadata then
+ local found = false
+ if askedname then
+ local name = metadata.name
+ if name then
+ found = name == askedname
+ end
+ elseif askedkind then
+ local kind = metadata.kind
+ if kind then
+ found = kind == askedkind
+ end
+ end
+ if found then
+ local titledata = entry.titledata
+ if titledata then
+ local title = titledata.title
+ if title then
+ if lowercache then
+ found = lpegmatch(pattern,lowercache[title])
+ else
+ found = lpegmatch(pattern,lpegmatch(p_lower,title))
+ end
+ if found then
+ return {
+ inner = pattern,
+ kind = "inner",
+ reference = pattern,
+ i = entry,
+ p = "",
+ r = entry.references.realpage,
+ }
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+function functions.match(var,actions)
+ if not var.outer then
+ local operation = var.operation
+ if operation and operation ~= "" then
+ local operation = lpegmatch(p_lower,operation)
+ local list = lists.collected
+ local names = false
+ local kinds = false
+ local where, what = lpegmatch(p_splitter,operation)
+ if where and what then
+ local pattern = lpeg.finder(what)
+ return
+ locate(list,false,where,pattern)
+ or locate(list,where,false,pattern)
+ or { error = "no match" }
+ else
+ local pattern = lpeg.finder(operation)
+ -- todo: don't look at section and float in last pass
+ return
+ locate(list,"section",false,pattern)
+ or locate(list,"float",false,pattern)
+ or locate(list,false,false,pattern)
+ or { error = "no match" }
+ end
+ end
+ end
+end
+
+-- needs a better split ^^^
-- done differently now:
@@ -2235,24 +2725,36 @@ function references.export(usedname) end
function references.import(usedname) end
function references.load (usedname) end
-commands.exportreferences = references.export
+implement { name = "exportreferences", actions =references.export }
-- better done here .... we don't insert/remove, just use a pointer
local prefixstack = { "" }
local prefixlevel = 1
-function commands.pushreferenceprefix(prefix)
+local function pushreferenceprefix(prefix)
prefixlevel = prefixlevel + 1
prefixstack[prefixlevel] = prefix
- context(prefix)
+ return prefix
end
-function commands.popreferenceprefix()
+local function popreferenceprefix()
prefixlevel = prefixlevel - 1
if prefixlevel > 0 then
- context(prefixstack[prefixlevel])
+ return prefixstack[prefixlevel]
else
report_references("unable to pop referenceprefix")
+ return ""
end
end
+
+implement {
+ name = "pushreferenceprefix",
+ actions = { pushreferenceprefix, context }, -- we can use setmacro
+ arguments = "string",
+}
+
+implement {
+ name = "popreferenceprefix",
+ actions = { popreferenceprefix, context }, -- we can use setmacro
+}
diff --git a/tex/context/base/strc-ref.mkvi b/tex/context/base/strc-ref.mkvi
index 85c6a0729..f5d0d1d78 100644
--- a/tex/context/base/strc-ref.mkvi
+++ b/tex/context/base/strc-ref.mkvi
@@ -118,6 +118,11 @@
\unexpanded\def\reference {\dosingleargument\strc_references_full_reference} % never forgotten
\unexpanded\def\setreference {\dodoubleargument\strc_references_set_reference } %
+% maybe: \let\reference\textreference
+
+\unexpanded\def\showreferences
+ {\enabletrackers[nodes.references.show,nodes.destinations.show]}
+
%D These are implemented in a low level form as:
\unexpanded\def\strc_references_text_reference [#labels]{\strc_references_set_named_reference\s!text{#labels}{}}
@@ -140,13 +145,29 @@
\newcount\lastreferenceattribute
\newcount\lastdestinationattribute
-\def\dofinishfullreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label")}}}
-\def\dofinishtextreference#prefix#label{\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#label",{})}}}
+\def\strc_references_finish#prefix#reference#internal%
+ {\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#reference")}}}
+
+\let\dofinishreference\strc_references_finish % used at lua end
+
+% This is somewhat tricky: we want to keep the reference with the following word but
+% that word should also hyphenate. We need to find a better way.
-\let\dofinishpagereference\dofinishfullreference
-\let\dofinishuserreference\dofinishfullreference
+% 0 = nothing
+% 1 = bind to following word
+
+\setnewconstant\c_strc_references_bind_state\plusone
+
+\def\strc_references_inject_before
+ {}
+
+\def\strc_references_inject_after
+ {\ifcase\c_strc_references_bind_state
+ % nothing
+ \or
+ \prewordbreak % to be tested: \removeunwantedspaces\permithyphenation
+ \fi}
-\def\dofinishsomereference#kind{\executeifdefined{dofinish#{kind}reference}\gobbletwoarguments}
\unexpanded\def\strc_references_set_named_reference
{\ifreferencing
@@ -193,32 +214,36 @@
\globallet\currentreferencecoding\s!tex
\fi
% beware, the structures.references.set writes a
- % \setnextinternalreference
+ \setnextinternalreference
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\currentreferencekind", "\referenceprefix","\currentreferencelabels",
- {
- references = {
- % internal = \nextinternalreference, % no need for an internal as we have an explicit
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = { % we could assume page to have no metadata
- kind = "#kind", % \currentreferencekind
- \ifx\currentreferencekind\s!page\else
- catcodes = \the\catcodetable,
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument"\else nil\fi, % only useful when text
- \fi
- },
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentreferencelabels}%
+ }%
+ metadata {%
+ kind {\currentreferencekind}%
+ \ifx\currentreferencekind\s!page\else
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ catcodes \catcodetable
+ \fi
+ }%
\ifx\currentreferencedata\empty\else
- entries = {
- text = \!!bs\currentreferencedata\!!es
- },
+ entries {%
+ text {\currentreferencedata}%
+ }%
\fi
\ifx\currentreferenceuserdata\empty\else
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#userdata}\!!es)
+ userdata {\detokenize{#userdata}}%
\fi
- },"\interactionparameter\c!focus")
- }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\fi
\else
@@ -229,10 +254,11 @@
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
\ifnum\lastdestinationattribute>\zerocount
+ \strc_references_inject_before % new
\dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
\strc_references_flush_destination_nodes
\egroup
- \prewordbreak % new
+ \strc_references_inject_after % new
\fi}
\def\strc_references_set_page_only_destination_attribute#labels% could in fact be fully expandable
@@ -243,17 +269,21 @@
\lastdestinationattribute\attributeunsetvalue
\else
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","\currentreferencelabels",
- {
- references = {
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = {
- kind = "page",
- },
- },"\interactionparameter\c!focus")
- }%
+ \setnextinternalreference
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {\currentreferencelabels}%
+ }%
+ metadata {%
+ kind {page}%
+ }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\fi
\else
@@ -261,23 +291,34 @@
\lastdestinationattribute\attributeunsetvalue
\fi}
-\unexpanded\def\strc_references_direct_full#labels#text%
+\unexpanded\def\strc_references_direct_full_user#user#labels#text%
{\ifreferencing
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!full", "\referenceprefix","#labels",
- {
- references = {
- block = "\currentsectionblock",
- section = structures.sections.currentid(),
- },
- metadata = {
- kind = "\s!full",
- },
- entries = {
- text = \!!bs#text\!!es
- },
- },"\interactionparameter\c!focus")
- }%
+ \setnextinternalreference
+ \edef\m_strc_references_user{#user}%
+ \edef\m_strc_references_text{#text}%
+ \clf_setreferenceattribute
+ {%
+ references {%
+ internal \nextinternalreference
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {#labels}%
+ }%
+ metadata {%
+ kind {\s!full}%
+ }%
+ \ifx\m_strc_references_text\empty \else
+ entries {%
+ text {\m_strc_references_text}%
+ }%
+ \fi
+ \ifx\m_strc_references_user\empty \else
+ userdata {\m_strc_references_user}% \detokenize\expandafter{\normalexpanded{...}}
+ \fi
+ }%
+ \relax
\strc_references_stop_destination_nodes
\else
\setbox\b_strc_destination_nodes\emptyhbox
@@ -287,13 +328,17 @@
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
% will become an option:
\ifnum\lastdestinationattribute>\zerocount
+ \strc_references_inject_before % new
\dontleavehmode\hbox attr \destinationattribute\lastdestinationattribute\bgroup
\strc_references_flush_destination_nodes
\egroup
- \prewordbreak % new
+ \strc_references_inject_after % new
\fi}
-\let\dodirectfullreference\strc_references_direct_full % for at lua end
+\unexpanded\def\strc_references_direct_full
+ {\strc_references_direct_full_user\empty}
+
+\let\dodirectfullreference\strc_references_direct_full % for at lua end (no longer)
\def\strc_references_set_page_only_destination_box_attribute#cs#labels%
{\strc_references_set_page_only_destination_attribute{#labels}%
@@ -329,17 +374,19 @@
\def\strc_references_set_simple_page_reference#label%
{\iflocation
\strc_references_start_destination_nodes
- \ctxcommand{setreferenceattribute("\s!page", "\referenceprefix","#label",
- {
- references = {
- % block = "\currentsectionblock",
- % section = structures.sections.currentid(),
- },
- metadata = { % we could assume page to have no metadata
- kind = "\s!page",
- },
- },"\interactionparameter\c!focus")
- }%
+ \clf_setreferenceattribute
+ {%
+ references {%
+ % block {\currentsectionblock}%
+ view {\interactionparameter\c!focus}%
+ prefix {\referenceprefix}%
+ reference {#label}%
+ }%
+ metadata {%
+ kind {\s!page}%
+ }%
+ }%
+ \relax
\strc_references_stop_destination_nodes
\xdef\currentdestinationattribute{\number\lastdestinationattribute}%
\else
@@ -349,7 +396,15 @@
\def\strc_references_get_simple_page_reference#label%
{\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \ht\strutbox
+ depth \dp\strutbox
+ \extrareferencearguments
+ }%
+ \relax
\xdef\currentreferenceattribute{\number\lastreferenceattribute}%
\else
\xdef\currentreferenceattribute{\number\attributeunsetvalue}%
@@ -489,9 +544,6 @@
\newcount\referencehastexstate % set in backend
-\def\referencepagestate{\ctxcommand{referencepagestate()}}
-\def\referencerealpage {\ctxcommand{referencerealpage ()}}
-
% referencepagestate:
%
% 0 = no page ref, 1=same page, 2=before, 3=after
@@ -579,13 +631,13 @@
{\dodoubleempty\strc_references_define_reference}
\def\strc_references_define_reference[#name][#specification]%
- {\ctxcommand{definereference("\referenceprefix","#name",\!!bs\detokenize{#specification}\!!es)}}
+ {\clf_definereference{\referenceprefix}{#name}{\detokenize{#specification}}}
\unexpanded\def\resetreference[#name]%
- {\ctxcommand{resetreference("\referenceprefix","#name")}}
+ {\clf_resetreference{\referenceprefix}{#name}}
\def\setpagereference#name#specification% hm,. low level ?
- {\ctxcommand{definereference("","#name",\!!bs\v!page(\luaescapestring{#specification})\!!es)}}
+ {\clf_definereference{}{#name}{\v!page(\detokenize{#specification}}} % is detokenize needed here?
%D Chained references are defined as:
%D
@@ -615,17 +667,19 @@
\newconditional\gotonewwindow \setfalse\gotonewwindow
\def\expandtexincurrentreference % will happen in lua some time
- {\ifcase\referencehastexstate\else\ctxcommand{expandcurrentreference()}\fi}
+ {\ifcase\referencehastexstate\else\clf_expandcurrentreference\fi}
-\def\expandreferenceoperation#tag#content{\ctxcommand{setreferenceoperation(#tag,\!!bs#content\!!es)}}
-\def\expandreferencearguments#tag#content{\ctxcommand{setreferencearguments(#tag,\!!bs#content\!!es)}}
+\def\expandreferenceoperation#tag#content{\clf_setreferenceoperation#tag{#content}}
+\def\expandreferencearguments#tag#content{\clf_setreferencearguments#tag{#content}}
-\def\doifreferencefoundelse#labels#yes#nop%
- {\ctxcommand{doifelsereference("\referenceprefix",\!!bs#labels\!!es,\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow)}%
+\def\doifelsereferencefound#label#yes#nop%
+ {\clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
#yes}%
{#nop}}
+\let\doifreferencefoundelse \doifelsereferencefound
+
%D The tester only splits the reference in components but does
%D not look into them. The following macro does a preroll and
%D determines for instance the current real reference pagenumber.
@@ -731,14 +785,14 @@
{\global\advance\locationcount\plusone}
\def\setnextinternalreferences#kind#name% plural
- {\ctxcommand{setnextinternalreference("#kind","#name")}}
+ {\clf_setnextinternalreference{#kind}{#name}}
\def\getinternalorderreference#kind#name%
- {\ctxcommand{currentreferenceorder("#kind","#name")}}
+ {\clf_currentreferenceorder{#kind}{#name}}
\def\thisissomeinternal#kind#name% only for old time sake
{\begingroup
- \ctxcommand{setinternalreference("","#kind:#name")}%
+ \clf_setinternalreference reference {#kind:#name}\relax
\hbox attr \destinationattribute\lastdestinationattribute{}%
\endgroup}
@@ -801,7 +855,7 @@
\appendtoks
\edef\p_export{\referencingparameter\c!export}%
\ifx\p_export\v!yes
- \ctxcommand{exportreferences()}%
+ \clf_exportreferences
\fi
\to \everygoodbye
@@ -816,16 +870,16 @@
% {\popmacro\referenceprefix}
\unexpanded\def\globalpushreferenceprefix#prefix%
- {\xdef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
+ {\xdef\referenceprefix{\clf_pushreferenceprefix{#prefix}}}
\unexpanded\def\globalpopreferenceprefix
- {\xdef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
+ {\xdef\referenceprefix{\clf_popreferenceprefix}}
\unexpanded\def\pushreferenceprefix#prefix%
- {\edef\referenceprefix{\ctxcommand{pushreferenceprefix("#prefix")}}}
+ {\edef\referenceprefix{\clf_pushreferenceprefix{#prefix}}}
\unexpanded\def\popreferenceprefix
- {\edef\referenceprefix{\ctxcommand{popreferenceprefix()}}}
+ {\edef\referenceprefix{\clf_popreferenceprefix}}
\def\m_strc_references_prefix_yes{+}
\def\m_strc_references_prefix_nop{-}
@@ -915,15 +969,15 @@
\ifdefined\over \let\normalmathover \over \unexpanded\def\over {\mathortext\normalmathover \strc_references_about} \else \let\over \strc_references_about \fi
\to \everydump
- \def\filterreference #key{\ctxcommand{filterreference("#key")}} % no checking, expanded
-\unexpanded\def\getreferenceentry#key{\ctxcommand{filterreference("#key")}} % no checking, unexpanded
+ \def\filterreference #key{\clf_filterreference{#key}} % no checking, expanded
+\unexpanded\def\getreferenceentry#key{\clf_filterreference{#key}} % no checking, unexpanded
-\def\currentreferencenumber {\filterreference{number}}
-\def\currentreferencepage {\filterreference{page}}
-\def\currentreferencetitle {\filterreference{title}}
-\def\currentreferencetext {\filterreference{text}}
-\def\currentreferencedefault {\filterreference{default}}
-\def\currentreferencerealpage{\filterreference{realpage}}
+\def\currentreferencenumber {\clf_filterreference{number}}
+\def\currentreferencepage {\clf_filterreference{page}}
+\def\currentreferencetitle {\clf_filterreference{title}}
+\def\currentreferencetext {\clf_filterreference{text}}
+\def\currentreferencedefault {\clf_filterreference{default}}
+\def\currentreferencerealpage{\clf_filterreference{realpage}}
%D The most straightforward way of retrieving references is
%D using \type{\ref}.
@@ -933,7 +987,7 @@
\def\strc_references_get_reference[#key][#label]% #key = number page title text default realpage ...
{\ifsecondargument
- \doifreferencefoundelse{#label}{\ctxcommand{filterreference("#key")}}\dummyreference
+ \doifelsereferencefound{#label}{\clf_filterreference{#key}}\dummyreference
\else
\dummyreference
\fi}
@@ -947,11 +1001,12 @@
\begingroup
\let\crlf\space
\let\\\space
- \postponenotes
+ \postponenotes % might go
\referencingparameter\c!left
- \doifreferencefoundelse{#label}
+ \doifelsereferencefound{#label}
{\goto{\limitatetext\currentreferencetitle{\referencingparameter\c!width}\unknown}[#label]}% not so efficient (dup lookup)
{}% todo
+ \flushnotes % might go
\referencingparameter\c!right
\endgroup}
@@ -967,44 +1022,151 @@
%D ... \atpage[someref] ...
%D \stoptyping
-% 0 = unknown
-% 1 = same
-% 2 = before
-% 3 = after
+% standard detail
+%
+% 0 = unknown unknown
+% 1 = same on same page
+% 2 = before preceding page
+% 3 = after following page
+%
+% 4 = above above on same page
+% 5 = below below on same page
-\def\strc_references_by_reference_page_state#unknown#before#current#after%
- {\ifcase\referencepagestate\space#unknown\or#current\or#before\or#after\fi}
+% todo: optimize for use in pagebody
+% todo: maybe make it optional
-% \unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
-% {\doifreferencefoundelse{#label}% usage needs checking (useless)
-% {\goto{\strc_references_by_reference_page_state{#label}{#backward}{}{#foreward}}[#label]}
-% {[#label]}}
+% \setuppagenumbering[alternative=doublesided]
+% \setupreferencing [doublesided=no] % yes is default
%
-% better:
+% \somewhere{backward}{foreward}[label]
+% \someplace{preceding}{backward}{current}{foreward}{following}[label]
+% \atpage[#label]
+% \doifcheckedpagestate{label}{preceding}{backward}{current}{foreward}{following}{otherwise}
+%
+% \dorecurse {20} {
+% \placefigure[here][fig:#1]{}{\externalfigure[dummy]}
+% \dorecurse {20} {
+% ##1: \atpage[fig:##1] /
+% \doifcheckedpagestate
+% {fig:##1}
+% {preceding}{backward}{current}{foreward}{following}
+% {otherwise}
+% }
+% }
+
+\newcount \nofreferencestates
+\newconditional\pagestatespread
-\unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
- {\doifreferencefoundelse{#label}%
- {\strc_references_by_reference_page_state{#label}{\goto{#backward}[#label]}{}{\goto{#foreward}[#label]}}
- {[#label]}}
+\appendtoks
+ \doifelse{\referencingparameter\c!doublesided}\v!yes\settrue\setfalse\pagestatespread
+\to \everysetupreferencing
-\unexpanded\def\atpage[#label]% todo
- {\doifreferencefoundelse{#label}% kind of inefficient as \goto also analyzes
- {\goto
- {\strc_references_by_reference_page_state
- {\labeltexts\v!page\dummyreference}%
- {\labeltext\v!hencefore}%
- {\labeltexts\v!atpage\currentreferencepage}%
- {\labeltext\v!hereafter}}%
- [#label]}
- {[#label]}}
+\setupreferencing
+ [\c!doublesided=\v!yes]
+
+\def\referencepagestate
+ {\numexpr\clf_referencepagestate
+ {rst::\number\nofreferencestates}%
+ \relax}
+
+\def\referencepagedetail
+ {\numexpr\clf_referencepagestate
+ {rst::\number\nofreferencestates}%
+ true %
+ \ifconditional\pagestatespread false\ifdoublesided true\else false\fi\fi
+ \relax}
+
+\def\referencerealpage
+ {\clf_referencerealpage}
+
+\unexpanded\def\tracedpagestate
+ {{\blue\tttf(\ifcase\referencepagedetail unknown\or same\or previous\or next\or above\or below\else unknown\fi)}}
+
+\unexpanded\def\markreferencepage
+ {\dontleavehmode\begingroup
+ \iftrialtypesetting
+ % issue warning that not stable
+ \else
+ % needs checking ... but probably never in trialmode
+ \global\advance\nofreferencestates\plusone
+ \xypos{rst::\number\nofreferencestates}%
+ % \tracedpagestate
+ \fi
+ \endgroup}
+
+\unexpanded\def\doifcheckedpagestate#label% #preceding#backward#current#foreward#following#otherwise%
+ {\doifelsereferencefound{#label}\strc_references_handle_page_state_yes\strc_references_handle_page_state_nop}
+
+\let\strc_references_handle_page_state_nop\sixthofsixarguments
+
+\def\strc_references_handle_page_state_yes
+ {\markreferencepage
+ \ifcase\referencepagedetail
+ \expandafter\sixthofsixarguments \or
+ \expandafter\thirdofsixarguments \or
+ \expandafter\firstofsixarguments \or
+ \expandafter\fifthofsixarguments \or
+ \expandafter\secondofsixarguments\or
+ \expandafter\fourthofsixarguments\else
+ \expandafter\sixthofsixarguments \fi}
\unexpanded\def\referencesymbol
- {\hbox{\strut\high
- {\setupsymbolset[\interactionparameter\c!symbolset]%
- \symbol[\strc_references_by_reference_page_state\v!somewhere\v!nowhere\v!previous\v!next]}}}
+ {\hbox\bgroup
+ \strut
+ \markreferencepage
+ \high
+ {\setupsymbolset[\interactionparameter\c!symbolset]%
+ \symbol[\ifcase\referencepagedetail\v!somewhere\or\v!nowhere\or\v!previous\or\v!next\or\v!previous\or\v!next\else\v!somewhere\fi]}%
+ \egroup}
+
+%D Hereafter the \type {\ignorespaces} binds the state node to next character (more likely
+%D than a preceding one) and one can always add an explicit space.
+
+\unexpanded\def\somewhere#backward#foreward#dummy[#label]% #dummy gobbles space around #foreward
+ {\doifcheckedpagestate{#label}%
+ {\goto{#backward}[#label]}%
+ {\goto{#backward}[#label]}%
+ {\ignorespaces}%
+ {\goto{#foreward}[#label]}%
+ {\goto{#foreward}[#label]}%
+ {#label}}%
+
+\unexpanded\def\someplace#preceding#backward#current#foreward#following#dummy[#label]% #dummy gobbles space around #foreward
+ {\doifcheckedpagestate{#label}%
+ {\doifelsenothing{#preceding}{\goto{#preceding}[#label]}\ignorespaces}%
+ {\doifelsenothing {#backward}{\goto {#backward}[#label]}\ignorespaces}%
+ {\doifelsenothing {#current}{\goto {#current}[#label]}\ignorespaces}%
+ {\doifelsenothing {#foreward}{\goto {#foreward}[#label]}\ignorespaces}%
+ {\doifelsenothing{#following}{\goto{#following}[#label]}\ignorespaces}%
+ {#label}}
+
+\unexpanded\def\atpage[#label]% todo
+ {\doifcheckedpagestate{#label}%
+ {\goto{\labeltext \v!precedingpage }[#label]}%
+ {\goto{\labeltext \v!hencefore }[#label]}%
+ {\ignorespaces}%
+ {\goto{\labeltext \v!hereafter }[#label]}%
+ {\goto{\labeltext \v!followingpage }[#label]}%
+ {\goto{\labeltexts\v!page\dummyreference}[#label]}}
+
+% Someone requested this but in retrospect didn't need it so we keep it as example.
+% Beware: a node is injected which is why we add ignorespaces!
+%
+% \unexpanded\def\strc_references_conditional#action#text[#condition]#dummy[#label]%
+% {\doifcheckedpagestate{#label}%
+% {\doifelse{#condition}\v!precedingpage{#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!hencefore {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!current {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!hereafter {#action{#text}[#label]}\ignorespaces}%
+% {\doifelse{#condition}\v!followingpage{#action{#text}[#label]}\ignorespaces}%
+% {#label}}
+%
+% \unexpanded\def\conditionalat {\strc_references_conditional\at}
+% \unexpanded\def\conditionalin {\strc_references_conditional\in}
+% \unexpanded\def\conditionalabout{\strc_references_conditional\about}
-%D The other alternatives just conform their names: only the
-%D label, only the text, or the label and the text.
+%D The other alternatives just conform their names: only the label, only the text, or the
+%D label and the text.
% \dounknownreference -> \dummyreference
@@ -1027,11 +1189,11 @@
\setvalue{\??referencinginteraction\v!all}%
{\the\leftreferencetoks
- \doifsometokselse\leftreferencetoks \leftofreferencecontent \donothing
+ \doifelsesometoks\leftreferencetoks \leftofreferencecontent \donothing
\leftofreference
\currentreferencecontent
\rightofreference
- \doifsometokselse\rightreferencetoks\rightofreferencecontent\donothing
+ \doifelsesometoks\rightreferencetoks\rightofreferencecontent\donothing
\the\rightreferencetoks}
\setvalue{\??referencinginteraction\v!label}%
@@ -1091,14 +1253,21 @@
\let\rightofreferencecontent\empty
\fi
% inefficient: double resolve
- \doifreferencefoundelse{#label} % we need to resolve the text
+ \doifelsereferencefound{#label} % we need to resolve the text
{\goto{\referencesequence}[#label]}
{\let\currentreferencecontent\dummyreference
\goto{\referencesequence}[#label]}%
\strc_references_stop_goto}
-\unexpanded\def\strc_references_in{\strc_references_start_goto\let\currentreferencecontent\currentreferencedefault\strc_references_pickup_goto}
-\unexpanded\def\strc_references_at{\strc_references_start_goto\let\currentreferencecontent\currentreferencepage \strc_references_pickup_goto}
+\unexpanded\def\strc_references_in
+ {\strc_references_start_goto
+ \let\currentreferencecontent\currentreferencedefault
+ \strc_references_pickup_goto}
+
+\unexpanded\def\strc_references_at
+ {\strc_references_start_goto
+ \let\currentreferencecontent\currentreferencepage
+ \strc_references_pickup_goto}
%D \macros
%D {definereferenceformat}
@@ -1209,7 +1378,7 @@
\def\autoreferencelabeltextflag{*} % a proper key like 'auto' or 'name' can clash with a label key
\unexpanded\def\autoreferencelabeltext
- {\ctxcommand{getcurrentreferencemetadata("name")}}
+ {\clf_getcurrentreferencemetadata{name}}
% \starttext
% \definereferenceformat[inxx] [left=(,right=),text=txt]
@@ -1254,7 +1423,9 @@
\newconditional\uselocationstrut \settrue\uselocationstrut
\def\extrareferencearguments
- {\luaconditional\highlighthyperlinks,\luaconditional\gotonewwindow,"\currentviewerlayer"}
+ {highlight \luaconditional\highlighthyperlinks\space
+ newwindow \luaconditional\gotonewwindow\space
+ layer {\currentviewerlayer}}
\unexpanded\def\directgoto
{\ifconditional\uselocationstrut
@@ -1286,7 +1457,15 @@
\attribute\referenceattribute\attributeunsetvalue
\global\lastsavedreferenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\ht\strutbox,\number\dp\strutbox,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \ht\strutbox
+ depth \dp\strutbox
+ \extrareferencearguments
+ }%
+ \relax
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1305,7 +1484,15 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {%
+ height \dimexpr\interactionparameter\c!height\relax
+ depth \dimexpr\interactionparameter\c!depth \relax
+ \extrareferencearguments
+ }%
+ \relax
\setlocationattributes
\attribute\referenceattribute\lastreferenceattribute
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1324,9 +1511,12 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(\number\ht\strutbox,\number\dp\strutbox)}%
+ \clf_injectcurrentreferencehtdp
+ \ht\strutbox
+ \dp\strutbox
+ \relax
\setlocationattributes
\setstrut % can be option
\global\lastsavedreferenceattribute\lastreferenceattribute
@@ -1341,15 +1531,52 @@
%\egroup\unhbox\referencebox}
\endgroup}
+\unexpanded\def\startgoto[#label]%
+ {\dontleavehmode
+ \begingroup
+ \iflocation
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
+ {\expandafter\startgoto_yes}%
+ {\expandafter\startgoto_nop}%
+ \else
+ \expandafter\startgoto_nop
+ \fi}
+
+\unexpanded\def\startgoto_nop
+ {\let\stopgoto\stopgoto_nop}
+
+\unexpanded\def\stopgoto_nop
+ {\endgroup}
+
+\unexpanded\def\startgoto_yes
+ {\expandtexincurrentreference
+ \clf_injectcurrentreferencehtdp
+ \ht\strutbox
+ \dp\strutbox
+ \relax
+ \setlocationattributes
+ \setstrut % can be option
+ \global\lastsavedreferenceattribute\lastreferenceattribute
+ \attribute\referenceattribute\lastreferenceattribute
+ \dostarttagged\t!link\empty
+ \let\stopgoto\stopgoto_yes}
+
+\unexpanded\def\stopgoto_yes
+ {\dostoptagged
+ \endgroup}
+
\def\dogotohtdp#content#dummy[#label]% dummy gobbles spaces
{\dontleavehmode
\begingroup
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(\number\dimexpr\interactionparameter\c!height\relax,\number\dimexpr\interactionparameter\c!depth\relax)}%
+ \clf_injectcurrentreferencehtdp
+ \dimexpr\interactionparameter\c!height\relax
+ \dimexpr\interactionparameter\c!depth \relax
+ \relax
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\attribute\referenceattribute\lastreferenceattribute
@@ -1368,7 +1595,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1385,7 +1616,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\setlocationcolorspec{#resolver}% no consequence for strut
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1402,7 +1637,11 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{injectreference("\referenceprefix","#label",nil,nil,\extrareferencearguments)}%
+ \clf_injectreference
+ {\referenceprefix}%
+ {#label}%
+ {\extrareferencearguments}%
+ \relax
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
\hbox attr \referenceattribute \lastreferenceattribute {#content}%
@@ -1418,9 +1657,9 @@
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
\iflocation
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
{\expandtexincurrentreference
- \ctxcommand{injectcurrentreference(nil,nil)}%
+ \clf_injectcurrentreference
\setlocationattributes
\global\lastsavedreferenceattribute\lastreferenceattribute
\dostarttagged\t!link\empty
@@ -1440,8 +1679,8 @@
\ht\scratchbox#height%
\global\lastsavedreferenceattribute\attributeunsetvalue
\attribute\referenceattribute\attributeunsetvalue
- \ctxcommand{doifelsereference("\referenceprefix","#label",\extrareferencearguments)}%
- {\ctxcommand{injectcurrentreference(nil,nil)}%
+ \clf_doifelsereference{\referenceprefix}{#label}{\extrareferencearguments}%
+ {\clf_injectcurrentreference
\global\lastsavedreferenceattribute\lastreferenceattribute
\hbox attr \referenceattribute \lastreferenceattribute {\box\scratchbox}}
{\box\scratchbox}%
@@ -1503,13 +1742,16 @@
\let\useexternaldocument\usefile
\def\strc_references_use_url[#label][#url][#file][#description]%
- {\ctxcommand{useurl("#label",\!!bs\detokenize{#url}\!!es,\!!bs\detokenize{#file}\!!es,\!!bs\detokenize{#description}\!!es)}}
+ {\clf_useurl{#label}{\detokenize{#url}}{\detokenize{#file}}{\detokenize{#description}}}
\def\strc_references_use_file[#label][#file][#description]%
- {\ctxcommand{usefile("#label",\!!bs\detokenize{#file}\!!es,\!!bs\detokenize{#description}\!!es)}}
+ {\clf_usefile{#label}{\detokenize{#file}}{\detokenize{#description}}}
-\def\doifurldefinedelse #label{\ctxcommand{doifurldefinedelse ("#label")}}
-\def\doiffiledefinedelse#label{\ctxcommand{doiffiledefinedelse("#label")}}
+\def\doifelseurldefined #label{\clf_doifelseurldefined {#label}}
+\def\doifelsefiledefined#label{\clf_doifelsefiledefined{#label}}
+
+\let\doifurldefinedelse \doifelseurldefined
+\let\doiffiledefinedelse\doifelsefiledefined
%D \macros
%D {url,setupurl}
@@ -1540,7 +1782,7 @@
{\dontleavehmode
\begingroup
\useurlstyleandcolor\c!style\c!color
- \hyphenatedurl{\ctxcommand{geturl("#label")}}%
+ \hyphenatedurl{\clf_geturl{#label}}%
\endgroup}
%D This macro is hooked into a support macro, and thereby
@@ -1571,7 +1813,7 @@
\def\strc_references_do_special_from[#label]%
{\dontleavehmode
- \goto{\ctxcommand{from("#label")}}[fileorurl(#label)]}
+ \goto{\clf_from{#label}}[fileorurl(#label)]}
\def\dofromurldescription#content% called at the lua end
{#content}
@@ -1616,13 +1858,13 @@
{\dotripleargument\strc_references_define_program}
\def\strc_references_define_program[#name][#program][#description]%
- {\ctxcommand{defineprogram("#name",\!!bs#program\!!es,\!!bs#description\!!es)}}
+ {\clf_defineprogram{#name}{#program}{#description}}
\def\program[#name]% incompatible, more consistent, hardy used anyway
{\dontleavehmode
\begingroup
\useprogramsstyleandcolor\c!style\c!color
- \ctxcommand{getprogram("#name","\directprogramsparameter\c!alternative","\directprogramsparameter\c!space")}%
+ \clf_getprogram{#name}%
\endgroup}
%D As we can see, we directly use the special reference
@@ -1711,7 +1953,7 @@
%D
%D Only when \type {text} is not empty, a space is inserted.
-\def\dotextprefix#text%
+\unexpanded\def\dotextprefix#text%
{\begingroup
\setbox\scratchbox\hbox{#text}% to be solved some day
\ifdim\wd\scratchbox>\zeropoint
@@ -1847,14 +2089,24 @@
\installcorenamespace{referencingprefix}
\def\getreferencestructureprefix#kind#name#category% name will change
- {{
- prefix = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefix",
- separatorset = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixseparatorset",
- conversion = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversion",
- conversionset = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversionset",
- set = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixset",
- segments = "\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixsegments",
- connector = \!!bs\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconnector\!!es,
+ {{%
+ prefix {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefix}%
+ separatorset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixseparatorset}%
+ conversion {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversion}%
+ conversionset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconversionset}%
+ starter {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixstarter}%
+ stopper {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixstopper}%
+ set {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixset}%
+ segments {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixsegments}%
+ connector {\referencestructureprefixparameter{#kind}{#name}{#category}\c!prefixconnector}%
+ }%
+ {%
+ separatorset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberseparatorset}%
+ conversion {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberconversion}%
+ conversionset {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberconversionset}%
+ starter {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberstarter}%
+ stopper {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numberstopper}%
+ segments {\referencestructureprefixparameter{#kind}{#name}{#category}\c!numbersegments}%
}}
\unexpanded\def\setupreferencestructureprefix
@@ -1876,16 +2128,11 @@
\csname \??referencingprefix:#category#parameter\endcsname
\fi\fi\fi}
-% \def\currentreferencedefault
-% {\ctxcommand{filterdefaultreference()}}
-
-\def\currentreferencedefault
- {\ctxcommand{filterreference("\s!default",\ctxcommand{getcurrentprefixspec("\s!default")})}}
-
-% needs testing
-%
-% \def\currentreferencedefault
-% {\ctxcommand{currentreferencedefault()}}
+\def\currentreferencedefault % for some reason we need to explicitly expand
+ {\normalexpanded{\noexpand\clf_filterdefaultreference
+ {\s!default}%
+ \noexpand\getreferencestructureprefix\clf_getcurrentprefixspec{\s!default}% returns #kind#name#category
+ \relax}}
%D Not all support is visible by looking at the \TEX\ code; here is one of those:^
%D
@@ -1903,6 +2150,55 @@
%D \stopinteractionmenu
%D \stoptyping
+%D Relatively new:
+%D
+%D \starttyping
+%D \chapter{The never ending story}
+%D
+%D \section{An ending story}
+%D
+%D \in{chapter}[match(complex bibliographies)]
+%D \in{chapter}[match(never ending)]
+%D \in{chapter}[match(ending)]
+%D \in{chapter}[match(chapter:never ending)]
+%D \in{chapter}[match(chapter:ending)]
+%D \in{section}[match(section:ending)]
+%D \in{figure}[match(float:mess)]
+%D \in{figure}[match(figure:mess)]
+%D \in{figure (not found)}[match(section:mess)]
+%D \in{figure (not found)}[match(section:xxxx)]
+%D \in{figure}[match(mess)]
+%D
+%D \placefigure{What a mess}{}
+%D
+%D \chapter{About complex bibliographies}
+%D
+%D \in{chapter}[match(complex bibliographies)]
+%D \in{chapter}[match(never ending)]
+%D \in{figure}[match(mess)]
+%D \stoptyping
+
+%D Tracing:
+
+\unexpanded\def\strc_references_tracer#1#2% \csleft csright
+ {\hbox to \zeropoint \bgroup
+ \hss
+ \infofont
+ \darkblue
+ \ifx#1\empty\else
+ \raise\strutht \hbox \s!to \zeropoint \bgroup
+ \hss#1\hskip.2\emwidth
+ \egroup
+ \fi
+ \vrule \s!height 1.5\strutht \s!depth \strutdp \s!width .1\emwidth
+ \ifx#2\empty\else
+ \raise\strutht \hbox \s!to \zeropoint \bgroup
+ \hskip.2\emwidth#2\hss
+ \egroup
+ \fi
+ \hss
+ \egroup}%
+
\protect \endinput
% tricky:
diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua
index b0d8a8a25..ed3292195 100644
--- a/tex/context/base/strc-reg.lua
+++ b/tex/context/base/strc-reg.lua
@@ -13,50 +13,203 @@ local utfchar = utf.char
local lpegmatch = lpeg.match
local allocate = utilities.storage.allocate
-local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
+local trace_registers = false trackers.register("structures.registers", function(v) trace_registers = v end)
-local report_registers = logs.reporter("structure","registers")
+local report_registers = logs.reporter("structure","registers")
-local structures = structures
-local registers = structures.registers
-local helpers = structures.helpers
-local sections = structures.sections
-local documents = structures.documents
-local pages = structures.pages
-local references = structures.references
+local structures = structures
+local registers = structures.registers
+local helpers = structures.helpers
+local sections = structures.sections
+local documents = structures.documents
+local pages = structures.pages
+local references = structures.references
-local mappings = sorters.mappings
-local entries = sorters.entries
-local replacements = sorters.replacements
+local usedinternals = references.usedinternals
-local processors = typesetters.processors
-local splitprocessor = processors.split
+local mappings = sorters.mappings
+local entries = sorters.entries
+local replacements = sorters.replacements
-local texgetcount = tex.getcount
+local processors = typesetters.processors
+local splitprocessor = processors.split
-local variables = interfaces.variables
-local context = context
-local commands = commands
+local texgetcount = tex.getcount
-local matchingtilldepth = sections.matchingtilldepth
-local numberatdepth = sections.numberatdepth
+local variables = interfaces.variables
+local v_forward = variables.forward
+local v_all = variables.all
+local v_yes = variables.yes
+local v_current = variables.current
+local v_previous = variables.previous
+local v_text = variables.text
-local absmaxlevel = 5 -- \c_strc_registers_maxlevel
+local context = context
+local commands = commands
+
+local implement = interfaces.implement
+
+local matchingtilldepth = sections.matchingtilldepth
+local numberatdepth = sections.numberatdepth
+local currentlevel = sections.currentlevel
+local currentid = sections.currentid
+
+local touserdata = helpers.touserdata
+
+local internalreferences = references.internals
+local setinternalreference = references.setinternalreference
+
+local setmetatableindex = table.setmetatableindex
+local texsetattribute = tex.setattribute
+
+local a_destination = attributes.private('destination')
+
+local absmaxlevel = 5 -- \c_strc_registers_maxlevel
+
+local h_prefixpage = helpers.prefixpage
+local h_prefixlastpage = helpers.prefixlastpage
+local h_title = helpers.title
+
+local ctx_startregisteroutput = context.startregisteroutput
+local ctx_stopregisteroutput = context.stopregisteroutput
+local ctx_startregistersection = context.startregistersection
+local ctx_stopregistersection = context.stopregistersection
+local ctx_startregisterentries = context.startregisterentries
+local ctx_stopregisterentries = context.stopregisterentries
+local ctx_startregisterentry = context.startregisterentry
+local ctx_stopregisterentry = context.stopregisterentry
+local ctx_startregisterpages = context.startregisterpages
+local ctx_stopregisterpages = context.stopregisterpages
+local ctx_startregisterseewords = context.startregisterseewords
+local ctx_stopregisterseewords = context.stopregisterseewords
+local ctx_registerentry = context.registerentry
+local ctx_registerseeword = context.registerseeword
+local ctx_registerpagerange = context.registerpagerange
+local ctx_registeronepage = context.registeronepage
+
+-- possible export, but ugly code (overloads)
+--
+-- local output, section, entries, nofentries, pages, words, rawtext
+--
+-- h_title = function(a,b) rawtext = a end
+--
+-- local function ctx_startregisteroutput()
+-- output = { }
+-- section = nil
+-- entries = nil
+-- nofentries = nil
+-- pages = nil
+-- words = nil
+-- rawtext = nil
+-- end
+-- local function ctx_stopregisteroutput()
+-- inspect(output)
+-- output = nil
+-- section = nil
+-- entries = nil
+-- nofentries = nil
+-- pages = nil
+-- words = nil
+-- rawtext = nil
+-- end
+-- local function ctx_startregistersection(tag)
+-- section = { }
+-- output[#output+1] = {
+-- section = section,
+-- tag = tag,
+-- }
+-- end
+-- local function ctx_stopregistersection()
+-- end
+-- local function ctx_startregisterentries(n)
+-- entries = { }
+-- nofentries = 0
+-- section[#section+1] = entries
+-- end
+-- local function ctx_stopregisterentries()
+-- end
+-- local function ctx_startregisterentry(n) -- or subentries (nested?)
+-- nofentries = nofentries + 1
+-- entry = { }
+-- entries[nofentries] = entry
+-- end
+-- local function ctx_stopregisterentry()
+-- nofentries = nofentries - 1
+-- entry = entries[nofentries]
+-- end
+-- local function ctx_startregisterpages()
+-- pages = { }
+-- entry.pages = pages
+-- end
+-- local function ctx_stopregisterpages()
+-- end
+-- local function ctx_startregisterseewords()
+-- words = { }
+-- entry.words = words
+-- end
+-- local function ctx_stopregisterseewords()
+-- end
+-- local function ctx_registerentry(processor,internal,seeparent,text)
+-- text()
+-- entry.text = {
+-- processor = processor,
+-- internal = internal,
+-- seeparent = seeparent,
+-- text = rawtext,
+-- }
+-- end
+-- local function ctx_registerseeword(i,n,processor,internal,seeindex,seetext)
+-- seetext()
+-- entry.words[i] = {
+-- processor = processor,
+-- internal = internal,
+-- seeparent = seeparent,
+-- seetext = rawtext,
+-- }
+-- end
+-- local function ctx_registerpagerange(fprocessor,finternal,frealpage,lprocessor,linternal,lrealpage)
+-- pages[#pages+1] = {
+-- first = {
+-- processor = fprocessor,
+-- internal = finternal,
+-- realpage = frealpage,
+-- },
+-- last = {
+-- processor = lprocessor,
+-- internal = linternal,
+-- realpage = lrealpage,
+-- },
+-- }
+-- end
+-- local function ctx_registeronepage(processor,internal,realpage)
+-- pages[#pages+1] = {
+-- processor = processor,
+-- internal = internal,
+-- realpage = realpage,
+-- }
+-- end
-- some day we will share registers and lists (although there are some conceptual
-- differences in the application of keywords)
local function filtercollected(names,criterium,number,collected,prevmode)
- if not criterium or criterium == "" then criterium = variables.all end
- local data = documents.data
- local numbers, depth = data.numbers, data.depth
- local hash, result, nofresult, all, detail = { }, { }, 0, not names or names == "" or names == variables.all, nil
+ if not criterium or criterium == "" then
+ criterium = v_all
+ end
+ local data = documents.data
+ local numbers = data.numbers
+ local depth = data.depth
+ local hash = { }
+ local result = { }
+ local nofresult = 0
+ local all = not names or names == "" or names == v_all
+ local detail = nil
if not all then
for s in gmatch(names,"[^, ]+") do
hash[s] = true
end
end
- if criterium == variables.all or criterium == variables.text then
+ if criterium == v_all or criterium == v_text then
for i=1,#collected do
local v = collected[i]
if all then
@@ -70,10 +223,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.current then
+ elseif criterium == v_current then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if prevmode then
@@ -108,10 +262,11 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
end
end
- elseif criterium == variables.previous then
+ elseif criterium == v_previous then
+ local collectedsections = sections.collected
for i=1,#collected do
local v = collected[i]
- local sectionnumber = sections.collected[v.references.section]
+ local sectionnumber = collectedsections[v.references.section]
if sectionnumber then
local cnumbers = sectionnumber.numbers
if (all or hash[v.metadata.name]) and #cnumbers >= depth then
@@ -141,12 +296,13 @@ local function filtercollected(names,criterium,number,collected,prevmode)
end
elseif criterium == variables["local"] then
if sections.autodepth(data.numbers) == 0 then
- return filtercollected(names,variables.all,number,collected,prevmode)
+ return filtercollected(names,v_all,number,collected,prevmode)
else
- return filtercollected(names,variables.current,number,collected,prevmode)
+ return filtercollected(names,v_current,number,collected,prevmode)
end
else -- sectionname, number
-- beware, this works ok for registers
+ -- to be redone with reference instead
local depth = sections.getlevel(criterium)
local number = tonumber(number) or numberatdepth(depth) or 0
if trace_registers then
@@ -193,81 +349,155 @@ registers.filtercollected = filtercollected
-- result table; we might do that here as well but since sorting code is
-- older we delay that decision
+-- maybe store the specification in the format (although we predefine only
+-- saved registers)
+
+local function checker(t,k)
+ local v = {
+ metadata = {
+ language = 'en',
+ sorted = false,
+ class = class,
+ },
+ entries = { },
+ }
+ t[k] = v
+ return v
+end
+
local function initializer()
tobesaved = registers.tobesaved
collected = registers.collected
- local internals = references.internals
+ setmetatableindex(tobesaved,checker)
+ setmetatableindex(collected,checker)
+ local usedinternals = references.usedinternals
for name, list in next, collected do
local entries = list.entries
- for e=1,#entries do
- local entry = entries[e]
- local r = entry.references
- if r then
- local internal = r and r.internal
- if internal then
- internals[internal] = entry
+ if not list.metadata.notsaved then
+ for e=1,#entries do
+ local entry = entries[e]
+ local r = entry.references
+ if r then
+ local internal = r and r.internal
+ if internal then
+ internalreferences[internal] = entry
+ usedinternals[internal] = r.used
+ end
end
end
end
end
end
-job.register('structures.registers.collected', tobesaved, initializer)
+local function finalizer()
+ local flaginternals = references.flaginternals
+ for k, v in next, tobesaved do
+ local entries = v.entries
+ if entries then
+ for i=1,#entries do
+ local r = entries[i].references
+ if r and flaginternals[r.internal] then
+ r.used = true
+ end
+ end
+ end
+ end
+end
-local function allocate(class)
+job.register('structures.registers.collected', tobesaved, initializer, finalizer)
+
+setmetatableindex(tobesaved,checker)
+setmetatableindex(collected,checker)
+
+local function defineregister(class,method)
local d = tobesaved[class]
- if not d then
- d = {
- metadata = {
- language = 'en',
- sorted = false,
- class = class
- },
- entries = { },
- }
- tobesaved[class] = d
+ if method == v_forward then
+ d.metadata.notsaved = true
end
- return d
end
-registers.define = allocate
+registers.define = defineregister -- 4 times is somewhat over the top but we want consistency
+registers.setmethod = defineregister -- and we might have a difference some day
+
+implement {
+ name = "defineregister",
+ actions = defineregister,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "setregistermethod",
+ actions = defineregister, -- duplicate use
+ arguments = { "string", "string" }
+}
local entrysplitter = lpeg.tsplitat('+') -- & obsolete in mkiv
local tagged = { }
+-- this whole splitting is an inheritance of mkii
+
local function preprocessentries(rawdata)
local entries = rawdata.entries
if entries then
---~ table.print(rawdata)
- local e, k = entries[1] or "", entries[2] or ""
- local et, kt, entryproc, pageproc
- if type(e) == "table" then
- et = e
- else
- entryproc, e = splitprocessor(e)
+ --
+ -- local e = entries[1] or ""
+ -- local k = entries[2] or ""
+ -- local et, kt, entryproc, pageproc
+ -- if type(e) == "table" then
+ -- et = e
+ -- else
+ -- entryproc, e = splitprocessor(e)
+ -- et = lpegmatch(entrysplitter,e)
+ -- end
+ -- if type(k) == "table" then
+ -- kt = k
+ -- else
+ -- pageproc, k = splitprocessor(k)
+ -- kt = lpegmatch(entrysplitter,k)
+ -- end
+ --
+ local processors = rawdata.processors
+ local et = entries.entries
+ local kt = entries.keys
+ local entryproc = processors and processors.entry
+ local pageproc = processors and processors.page
+ if entryproc == "" then
+ entryproc = nil
+ end
+ if pageproc == "" then
+ pageproc = nil
+ end
+ if not et then
+ local p, e = splitprocessor(entries.entry or "")
+ if p then
+ entryproc = p
+ end
et = lpegmatch(entrysplitter,e)
end
- if type(k) == "table" then
- kt = k
- else
- pageproc, k = splitprocessor(k)
+ if not kt then
+ local p, k = splitprocessor(entries.key or "")
+ if p then
+ pageproc = p
+ end
kt = lpegmatch(entrysplitter,k)
end
+ --
entries = { }
- for k=1,#et do
- entries[k] = { et[k] or "", kt[k] or "" }
- end
+ local ok = false
for k=#et,1,-1 do
- if entries[k][1] ~= "" then
- break
- else
+ local etk = et[k]
+ local ktk = kt[k]
+ if not ok and etk == "" then
entries[k] = nil
+ else
+ entries[k] = { etk or "", ktk ~= "" and ktk or nil }
+ ok = true
end
end
rawdata.list = entries
if pageproc or entryproc then
- rawdata.processors = { entryproc, pageproc }
+ rawdata.processors = { entryproc, pageproc } -- old way: indexed .. will be keys
end
rawdata.entries = nil
end
@@ -277,21 +507,74 @@ local function preprocessentries(rawdata)
end
end
-function registers.store(rawdata) -- metadata, references, entries
- local data = allocate(rawdata.metadata.name).entries
+local function storeregister(rawdata) -- metadata, references, entries
local references = rawdata.references
- references.realpage = references.realpage or 0 -- just to be sure as it can be refered to
+ local metadata = rawdata.metadata
+ -- checking
+ if not metadata then
+ metadata = { }
+ rawdata.metadata = metadata
+ end
+ --
+ if not metadata.kind then
+ metadata.kind = "entry"
+ end
+ --
+ if not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ --
+ local name = metadata.name
+ local notsaved = tobesaved[name].metadata.notsaved
+ --
+ if not references then
+ references = { }
+ rawdata.references = references
+ end
+ --
+ local internal = references.internal
+ if not internal then
+ internal = texgetcount("locationcount") -- we assume that it has been set
+ references.internal = internal
+ end
+ --
+ if notsaved then
+ usedinternals[internal] = true -- todo view (we assume that forward references index entries are used)
+ end
+ --
+ if not references.realpage then
+ references.realpage = 0 -- just to be sure as it can be refered to
+ end
+ --
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
+ --
+ references.section = currentid()
+ metadata.level = currentlevel()
+ --
+ local data = notsaved and collected[name] or tobesaved[name]
+ local entries = data.entries
+ internalreferences[internal] = rawdata
preprocessentries(rawdata)
- data[#data+1] = rawdata
+ entries[#entries+1] = rawdata
local label = references.label
- if label and label ~= "" then tagged[label] = #data end
- context(#data)
+ if label and label ~= "" then
+ tagged[label] = #entries
+ else
+ references.label = nil
+ end
+ return #entries
end
+registers.store = storeregister
+
function registers.enhance(name,n)
- local r = tobesaved[name].entries[n]
- if r then
- r.references.realpage = texgetcount("realpageno")
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[n]
+ if entry then
+ entry.references.realpage = texgetcount("realpageno")
end
end
@@ -300,21 +583,30 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
tag = tagged[tag]
end
if tag then
- local r = tobesaved[name].entries[tag]
- if r then
- local rr = r.references
- rr.lastrealpage = texgetcount("realpageno")
- rr.lastsection = sections.currentid()
+ local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
+ local entry = data.entries[tag]
+ if entry then
+ local references = entry.references
+ references.lastrealpage = texgetcount("realpageno")
+ references.lastsection = currentid()
if rawdata then
+ local userdata = rawdata.userdata
+ if userdata then
+ rawdata.userdata = touserdata(userdata)
+ end
if rawdata.entries then
preprocessentries(rawdata)
end
- for k,v in next, rawdata do
- if not r[k] then
- r[k] = v
+ local metadata = rawdata.metadata
+ if metadata and not metadata.catcodes then
+ metadata.catcodes = tex.catcodetable -- get
+ end
+ for k, v in next, rawdata do
+ local rk = references[k]
+ if not rk then
+ references[k] = v
else
- local rk = r[k]
- for kk,vv in next, v do
+ for kk, vv in next, v do
if type(vv) == "table" then
if next(vv) then
rk[kk] = vv
@@ -330,6 +622,71 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
end
end
+function registers.get(tag,n)
+ local list = tobesaved[tag]
+ return list and list.entries[n]
+end
+
+implement {
+ name = "enhanceregister",
+ actions = registers.enhance,
+ arguments = { "string", "integer" }
+}
+
+implement {
+ name = "extendregister",
+ actions = registers.extend,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "storeregister",
+ actions = function(rawdata)
+ local nofentries = storeregister(rawdata)
+ setinternalreference { internal = rawdata.references.internal }
+ context(nofentries)
+ end,
+ arguments = {
+ {
+ { "metadata", {
+ { "kind" },
+ { "name" },
+ { "coding" },
+ { "level", "integer" },
+ { "catcodes", "integer" },
+ { "own" },
+ { "xmlroot" },
+ { "xmlsetup" }
+ }
+ },
+ { "entries", {
+ { "entries", "list" },
+ { "keys", "list" },
+ { "entry" },
+ { "key" }
+ }
+ },
+ { "references", {
+ { "internal", "integer" },
+ { "section", "integer" },
+ { "label" }
+ }
+ },
+ { "seeword", {
+ { "text" }
+ }
+ },
+ { "processors", {
+ { "entry" },
+ { "key" },
+ { "page" }
+ }
+ },
+ { "userdata" },
+ }
+ }
+}
+
-- sorting and rendering
local compare = sorters.comparers.basic
@@ -339,7 +696,8 @@ function registers.compare(a,b)
if result ~= 0 then
return result
else
- local ka, kb = a.metadata.kind, b.metadata.kind
+ local ka = a.metadata.kind
+ local kb = b.metadata.kind
if ka == kb then
local page_a, page_b = a.references.realpage, b.references.realpage
if not page_a or not page_b then
@@ -364,7 +722,7 @@ end
local seeindex = 0
--- meerdere loops, seewords, dan words, an seewords
+-- meerdere loops, seewords, dan words, anders seewords
local function crosslinkseewords(result) -- all words
-- collect all seewords
@@ -453,17 +811,19 @@ end
function registers.prepare(data)
-- data has 'list' table
- local strip = sorters.strip
+ local strip = sorters.strip
local splitter = sorters.splitters.utf
- local result = data.result
+ local result = data.result
if result then
for i=1, #result do
- local entry, split = result[i], { }
- local list = entry.list
+ local entry = result[i]
+ local split = { }
+ local list = entry.list
if list then
for l=1,#list do
- local ll = list[l]
- local word, key = ll[1], ll[2]
+ local ll = list[l]
+ local word = ll[1]
+ local key = ll[2]
if not key or key == "" then
key = word
end
@@ -478,7 +838,11 @@ function registers.prepare(data)
end
function registers.sort(data,options)
- sorters.sort(data.result,registers.compare)
+ -- if options.pagenumber == false then
+ -- sorters.sort(data.result,compare)
+ -- else
+ sorters.sort(data.result,registers.compare)
+ -- end
end
function registers.unique(data,options)
@@ -487,7 +851,8 @@ function registers.unique(data,options)
for k=1,#dataresult do
local v = dataresult[k]
if prev then
- local pr, vr = prev.references, v.references
+ local vr = v.references
+ local pr = prev.references
if not equal(prev.list,v.list) then
-- ok
elseif pr.realpage ~= vr.realpage then
@@ -530,10 +895,11 @@ function registers.finalize(data,options) -- maps character to index (order)
if trace_registers then
report_registers("splitting at %a",tag)
end
- done, nofdone = { }, 0
+ done = { }
+ nofdone = 0
nofsplit = nofsplit + 1
+ lasttag = tag
split[nofsplit] = { tag = tag, data = done }
- lasttag = tag
end
nofdone = nofdone + 1
done[nofdone] = v
@@ -541,7 +907,7 @@ function registers.finalize(data,options) -- maps character to index (order)
data.result = split
end
-function registers.analyzed(class,options)
+local function analyzeregister(class,options)
local data = collected[class]
if data and data.entries then
options = options or { }
@@ -558,34 +924,55 @@ function registers.analyzed(class,options)
end
end
+registers.analyze = analyzeregister
+
+implement {
+ name = "analyzeregister",
+ actions = { analyzeregister, context },
+ arguments = {
+ "string",
+ {
+ { "language" },
+ { "method" },
+ { "numberorder" },
+ { "compress" },
+ { "criterium" },
+ { "pagenumber", "boolean" },
+ }
+ }
+}
+
-- todo take conversion from index
function registers.userdata(index,name)
local data = references.internals[tonumber(index)]
- data = data and data.userdata and data.userdata[name]
- if data then
- context(data)
- end
+ return data and data.userdata and data.userdata[name] or nil
end
+implement {
+ name = "registeruserdata",
+ actions = { registers.userdata, context },
+ arguments = { "integer", "string" }
+}
+
-- todo: ownnumber
local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec)
local fer, ter = f_entry.references, t_entry.references
- context.registerpagerange(
+ ctx_registerpagerange(
f_entry.processors and f_entry.processors[2] or "",
fer.internal or 0,
fer.realpage or 0,
function()
- helpers.prefixpage(f_entry,prefixspec,pagespec)
+ h_prefixpage(f_entry,prefixspec,pagespec)
end,
ter.internal or 0,
ter.lastrealpage or ter.realpage or 0,
function()
if is_last then
- helpers.prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
+ h_prefixlastpage(t_entry,prefixspec,pagespec) -- swaps page and realpage keys
else
- helpers.prefixpage (t_entry,prefixspec,pagespec)
+ h_prefixpage (t_entry,prefixspec,pagespec)
end
end
)
@@ -593,11 +980,11 @@ end
local function pagenumber(entry,prefixspec,pagespec)
local er = entry.references
- context.registeronepage(
+ ctx_registeronepage(
entry.processors and entry.processors[2] or "",
er.internal or 0,
er.realpage or 0,
- function() helpers.prefixpage(entry,prefixspec,pagespec) end
+ function() h_prefixpage(entry,prefixspec,pagespec) end
)
end
@@ -665,8 +1052,9 @@ local function collapsepages(pages)
end
function registers.flush(data,options,prefixspec,pagespec)
- local collapse_singles = options.compress == variables.yes
- local collapse_ranges = options.compress == variables.all
+ local collapse_singles = options.compress == v_yes
+ local collapse_ranges = options.compress == v_all
+ local show_page_number = options.pagenumber ~= false -- true or false
local result = data.result
local maxlevel = 0
--
@@ -684,18 +1072,19 @@ function registers.flush(data,options,prefixspec,pagespec)
report_registers("limiting level to %a",maxlevel)
end
--
- context.startregisteroutput()
-local done = { }
+ ctx_startregisteroutput()
+ local done = { }
+ local started = false
for i=1,#result do
-- ranges need checking !
local sublist = result[i]
-- local done = { false, false, false, false }
-for i=1,maxlevel do
- done[i] = false
-end
+ for i=1,maxlevel do
+ done[i] = false
+ end
local data = sublist.data
local d, n = 0, 0
- context.startregistersection(sublist.tag)
+ ctx_startregistersection(sublist.tag)
for d=1,#data do
local entry = data[d]
if entry.metadata.kind == "see" then
@@ -703,8 +1092,8 @@ end
if #list > 1 then
list[#list] = nil
else
- -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere
- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
+ -- we have an \seeindex{Foo}{Bar} without Foo being defined anywhere .. somehow this message is wrong
+ -- report_registers("invalid see entry in register %a, reference %a",entry.metadata.name,list[1][1])
end
end
end
@@ -712,140 +1101,158 @@ end
-- but we don't want to allocate too many entries so there we go
while d < #data do
d = d + 1
- local entry = data[d]
- local e = { false, false, false }
-for i=3,maxlevel do
- e[i] = false
-end
+ local entry = data[d]
local metadata = entry.metadata
- local kind = metadata.kind
- local list = entry.list
+ local kind = metadata.kind
+ local list = entry.list
+ local e = { false, false, false }
+ for i=3,maxlevel do
+ e[i] = false
+ end
for i=1,maxlevel do
if list[i] then
e[i] = list[i][1]
end
- if e[i] ~= done[i] then
- if e[i] and e[i] ~= "" then
- done[i] = e[i]
-for j=i+1,maxlevel do
- done[j] = false
-end
- if n == i then
- context.stopregisterentries()
- context.startregisterentries(n)
- else
- while n > i do
- n = n - 1
- context.stopregisterentries()
- end
- while n < i do
- n = n + 1
- context.startregisterentries(n)
- end
+ if e[i] == done[i] then
+ -- skip
+ elseif not e[i] then
+ -- see ends up here
+ -- can't happen any more
+ done[i] = false
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ elseif e[i] == "" then
+ done[i] = false
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ else
+ done[i] = e[i]
+ for j=i+1,maxlevel do
+ done[j] = false
+ end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
+ if n == i then
+-- ctx_stopregisterentries()
+-- ctx_startregisterentries(n)
+ else
+ while n > i do
+ n = n - 1
+ ctx_stopregisterentries()
end
- local internal = entry.references.internal or 0
- local seeparent = entry.references.seeparent or ""
- local processor = entry.processors and entry.processors[1] or ""
- -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be
- -- more of a problem
- if metadata then
- context.registerentry(processor,internal,seeparent,function() helpers.title(e[i],metadata) end)
- else -- ?
- context.registerentry(processor,internal,seeindex,e[i])
+ while n < i do
+ n = n + 1
+ ctx_startregisterentries(n)
end
+ end
+ local references = entry.references
+ local processors = entry.processors
+ local internal = references.internal or 0
+ local seeparent = references.seeparent or ""
+ local processor = processors and processors[1] or ""
+ -- so, we need to keep e as is (local), or we need local title = e[i] ... which might be
+ -- more of a problem
+ ctx_startregisterentry(0) -- will become a counter
+ started = true
+ if metadata then
+ ctx_registerentry(processor,internal,seeparent,function() h_title(e[i],metadata) end)
else
- done[i] = false
-for j=i+1,maxlevel do
- done[j] = false
-end
+ -- can this happen?
+ ctx_registerentry(processor,internal,seeindex,e[i])
end
end
end
if kind == 'entry' then
- context.startregisterpages()
- if collapse_singles or collapse_ranges then
- -- we collapse ranges and keep existing ranges as they are
- -- so we get prebuilt as well as built ranges
- local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
- while dd < #data do
- dd = dd + 1
- local next = data[dd]
- if next and next.metadata.kind == "see" then
- dd = dd - 1
- break
- else
- local el, nl = entry.list, next.list
- if not equal(el,nl) then
+ if show_page_number then
+ ctx_startregisterpages()
+ if collapse_singles or collapse_ranges then
+ -- we collapse ranges and keep existing ranges as they are
+ -- so we get prebuilt as well as built ranges
+ local first, last, prev, pages, dd, nofpages = entry, nil, entry, { }, d, 0
+ while dd < #data do
+ dd = dd + 1
+ local next = data[dd]
+ if next and next.metadata.kind == "see" then
dd = dd - 1
- --~ first = nil
break
- elseif next.references.lastrealpage then
- nofpages = nofpages + 1
- pages[nofpages] = first and { first, last or first } or { entry, entry }
- nofpages = nofpages + 1
- pages[nofpages] = { next, next }
- first, last, prev = nil, nil, nil
- elseif not first then
- first, prev = next, next
- elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
- last, prev = next, next
else
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- first, last, prev = next, nil, next
+ local el, nl = entry.list, next.list
+ if not equal(el,nl) then
+ dd = dd - 1
+ --~ first = nil
+ break
+ elseif next.references.lastrealpage then
+ nofpages = nofpages + 1
+ pages[nofpages] = first and { first, last or first } or { entry, entry }
+ nofpages = nofpages + 1
+ pages[nofpages] = { next, next }
+ first, last, prev = nil, nil, nil
+ elseif not first then
+ first, prev = next, next
+ elseif next.references.realpage - prev.references.realpage == 1 then -- 1 ?
+ last, prev = next, next
+ else
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ first, last, prev = next, nil, next
+ end
end
end
- end
- if first then
- nofpages = nofpages + 1
- pages[nofpages] = { first, last or first }
- end
- if collapse_ranges and nofpages > 1 then
- nofpages = collapsepages(pages)
- end
- if nofpages > 0 then -- or 0
- d = dd
- for p=1,nofpages do
- local first, last = pages[p][1], pages[p][2]
- if first == last then
- if first.references.lastrealpage then
- pagerange(first,first,true,prefixspec,pagespec)
+ if first then
+ nofpages = nofpages + 1
+ pages[nofpages] = { first, last or first }
+ end
+ if collapse_ranges and nofpages > 1 then
+ nofpages = collapsepages(pages)
+ end
+ if nofpages > 0 then -- or 0
+ d = dd
+ for p=1,nofpages do
+ local first, last = pages[p][1], pages[p][2]
+ if first == last then
+ if first.references.lastrealpage then
+ pagerange(first,first,true,prefixspec,pagespec)
+ else
+ pagenumber(first,prefixspec,pagespec)
+ end
+ elseif last.references.lastrealpage then
+ pagerange(first,last,true,prefixspec,pagespec)
else
- pagenumber(first,prefixspec,pagespec)
+ pagerange(first,last,false,prefixspec,pagespec)
end
- elseif last.references.lastrealpage then
- pagerange(first,last,true,prefixspec,pagespec)
- else
- pagerange(first,last,false,prefixspec,pagespec)
end
- end
- elseif entry.references.lastrealpage then
- pagerange(entry,entry,true,prefixspec,pagespec)
- else
- pagenumber(entry,prefixspec,pagespec)
- end
- else
- while true do
- if entry.references.lastrealpage then
+ elseif entry.references.lastrealpage then
pagerange(entry,entry,true,prefixspec,pagespec)
else
pagenumber(entry,prefixspec,pagespec)
end
- if d == #data then
- break
- else
- d = d + 1
- local next = data[d]
- if next.metadata.kind == "see" or not equal(entry.list,next.list) then
- d = d - 1
+ else
+ while true do
+ if entry.references.lastrealpage then
+ pagerange(entry,entry,true,prefixspec,pagespec)
+ else
+ pagenumber(entry,prefixspec,pagespec)
+ end
+ if d == #data then
break
else
- entry = next
+ d = d + 1
+ local next = data[d]
+ if next.metadata.kind == "see" or not equal(entry.list,next.list) then
+ d = d - 1
+ break
+ else
+ entry = next
+ end
end
end
end
+ ctx_stopregisterpages()
end
- context.stopregisterpages()
elseif kind == 'see' then
local t, nt = { }, 0
while true do
@@ -864,38 +1271,77 @@ end
end
end
end
- context.startregisterseewords()
+ ctx_startregisterseewords()
for i=1,nt do
local entry = t[i]
local seeword = entry.seeword
local seetext = seeword.text or ""
local processor = seeword.processor or (entry.processors and entry.processors[1]) or ""
local seeindex = entry.references.seeindex or ""
- context.registerseeword(i,n,processor,0,seeindex,seetext)
+ -- ctx_registerseeword(i,nt,processor,0,seeindex,seetext)
+ ctx_registerseeword(i,nt,processor,0,seeindex,function() h_title(seetext,metadata) end)
end
- context.stopregisterseewords()
+ ctx_stopregisterseewords()
end
end
+ if started then
+ ctx_stopregisterentry()
+ started = false
+ end
while n > 0 do
- context.stopregisterentries()
+ ctx_stopregisterentries()
n = n - 1
end
- context.stopregistersection()
+ ctx_stopregistersection()
end
- context.stopregisteroutput()
+ ctx_stopregisteroutput()
-- for now, maybe at some point we will do a multipass or so
data.result = nil
data.metadata.sorted = false
-end
-
-
-function registers.analyze(class,options)
- context(registers.analyzed(class,options))
+ -- temp hack for luajittex :
+ local entries = data.entries
+ for i=1,#entries do
+ entries[i].split = nil
+ end
+ -- collectgarbage("collect")
end
function registers.process(class,...)
- if registers.analyzed(class,...) > 0 then
- registers.flush(collected[class],...)
+ if analyzeregister(class,...) > 0 then
+ local data = collected[class]
+ registers.flush(data,...)
end
end
+implement {
+ name = "processregister",
+ actions = registers.process,
+ arguments = {
+ "string",
+ {
+ { "language" },
+ { "method" },
+ { "numberorder" },
+ { "compress" },
+ { "criterium" },
+ { "pagenumber", "boolean" },
+ },
+ {
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "set" },
+ { "segments" },
+ { "connector" },
+ },
+ {
+ { "prefix" },
+ { "separatorset" },
+ { "conversionset" },
+ { "starter" },
+ { "stopper" },
+ { "segments" },
+ }
+ }
+}
diff --git a/tex/context/base/strc-reg.mkiv b/tex/context/base/strc-reg.mkiv
index 2d28114c3..138a1486f 100644
--- a/tex/context/base/strc-reg.mkiv
+++ b/tex/context/base/strc-reg.mkiv
@@ -17,6 +17,8 @@
\unprotect
+\startcontextdefinitioncode
+
% todo: tag:: becomes rendering
% todo: language, character, linked, location
% todo: fonts etc at sublevels (already defined)
@@ -106,6 +108,14 @@
\c!entries=,
\c!alternative=]
+
+\definemixedcolumns
+ [\v!register]
+ [\c!n=\registerparameter\c!n,
+ \c!balance=\registerparameter\c!balance,
+ \c!align=\registerparameter\c!align,
+ \c!tolerance=\registerparameter\c!tolerance]
+
%D \starttyping
%D \setupregister[index][1][textcolor=darkred]
%D \setupregister[index][2][textcolor=darkgreen,textstyle=bold]
@@ -123,7 +133,8 @@
\appendtoks
\ifconditional\c_strc_registers_defining \else % todo: dosingle ...
\settrue\c_strc_registers_defining
- \ctxlua{structures.registers.define('\currentregister')}%
+ \definemixedcolumns[\currentregister][\v!register]% first as otherwise it overloads start/stop
+ \clf_defineregister{\currentregister}{\registerparameter\c!referencemethod}%
\normalexpanded{\presetheadtext[\currentregister=\Word{\currentregister}]}%
\setuevalue{\currentregister}{\dodoubleempty\strc_registers_insert_entry[\currentregister]}%
\setuevalue{\e!see\currentregister}{\dodoubleempty\strc_registers_insert_see[\currentregister]}%
@@ -143,6 +154,10 @@
\fi
\to \everydefineregister
+\appendtoks
+ \clf_setregistermethod{\currentregister}{\registerparameter\c!referencemethod}%
+\to \everysetupregister
+
%D Registering:
\def\strc_registers_register_page_entry
@@ -152,6 +167,52 @@
\expandafter\strc_registers_register_page_entry_indeed
\fi}
+\def\strc_registers_register_page_expand_xml_entries
+ {\xmlstartraw
+ \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes_entries
+ {\xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop_entries
+ {\xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
+ \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
+ \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml
+ {\xmlstartraw
+ \xdef\currentregisterentries{\registerparameter\c!entries}%
+ \xmlstopraw
+ \globallet\currentregistercoding\s!xml}
+
+\def\strc_registers_register_page_expand_yes
+ {\xdef\currentregisterentries{\registerparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_nop
+ {\xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
+ \globallet\currentregistercoding\s!tex}
+
+\def\strc_registers_register_page_expand_xml_keys
+ {\xmlstartraw
+ \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \xmlstopraw}
+
+\def\strc_registers_register_page_expand_yes_keys
+ {\xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
+ \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
+ \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}}
+
\def\strc_registers_register_page_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
@@ -165,106 +226,118 @@
\xdef\currentregisterxmlsetup {\registerparameter\c!xmlsetup}%
\ifx\currentregisterentries\empty
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml_entries
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes_entries
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentriesa{\registerparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\registerparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\registerparameter{\c!entries:3}}%
- \else
- \xdef\currentregisterentriesa{\detokenizedregisterparameter{\c!entries:1}}%
- \xdef\currentregisterentriesb{\detokenizedregisterparameter{\c!entries:2}}%
- \xdef\currentregisterentriesc{\detokenizedregisterparameter{\c!entries:3}}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop_entries
+ \fi\fi
\else
\ifx\currentregisterexpansion\s!xml
- \xmlstartraw
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \xmlstopraw
- \globallet\currentregistercoding\s!xml
+ \strc_registers_register_page_expand_xml
+ \else\ifx\currentregisterexpansion\v!yes
+ \strc_registers_register_page_expand_yes
\else
- \ifx\currentregisterexpansion\v!yes
- \xdef\currentregisterentries{\registerparameter\c!entries}%
- \else
- \xdef\currentregisterentries{\detokenizedregisterparameter\c!entries}%
- \fi
- \globallet\currentregistercoding\s!tex
- \fi
+ \strc_registers_register_page_expand_nop
+ \fi\fi
\fi
\ifx\currentregisterkeys\empty
\ifx\currentregistercoding\s!xml
- \xmlstartraw
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
- \xmlstopraw
+ \strc_registers_register_page_expand_xml_keys
\else
- \xdef\currentregisterkeysa{\registerparameter{\c!keys:1}}%
- \xdef\currentregisterkeysb{\registerparameter{\c!keys:2}}%
- \xdef\currentregisterkeysc{\registerparameter{\c!keys:3}}%
+ \strc_registers_register_page_expand_yes_keys
\fi
\fi
\setnextinternalreference
% we could consider storing register entries in a list which we
% could then sort
- \xdef\currentregisternumber{\ctxlua{
- structures.registers.store { % 'own' should not be in metadata
- metadata = {
- kind = "entry",
- name = "\currentregister",
- level = structures.sections.currentlevel(),
- coding = "\currentregistercoding",
- catcodes = \the\catcodetable,
+ \xdef\currentregisternumber{\clf_storeregister % 'own' should not be in metadata
+ metadata {%
+ name {\currentregister}%
+ coding {\currentregistercoding}%
\ifx\currentregisterownnumber\v!yes
- own = "\registerparameter\c!alternative", % can be used instead of pagenumber
+ own {\registerparameter\c!alternative}% can be used instead of pagenumber
+ \fi
+ \ifx\currentreferencecoding\s!xml
+ xmlroot {\xmldocument} % only useful when text
\fi
- xmlroot = \ifx\currentreferencecoding\s!xml "\xmldocument" \else nil \fi, % only useful when text
\ifx\currentregisterxmlsetup\empty \else
- xmlsetup = "\currentregisterxmlsetup",
+ xmlsetup {\currentregisterxmlsetup}%
\fi
- },
- references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(), % hm, why then not also lastsection the same way
- label = "\currentregisterlabel",
- },
- % \ifx\currentregisterentries\empty \else
- entries = {
+ }%
+ references {%
+ \ifx\currentregisterlabel\empty \else
+ label {\currentregisterlabel}%
+ \fi
+ }%
+ entries {%
% we need a special one for xml, this is just a single one
\ifx\currentregisterentries\empty
- { \!!bs\currentregisterentriesa\!!es, \!!bs\currentregisterentriesb\!!es, \!!bs\currentregisterentriesc\!!es },
+ entries {
+ {\currentregisterentriesa}%
+ {\currentregisterentriesb}%
+ {\currentregisterentriesc}%
+ }
\else
- \!!bs\currentregisterentries\!!es,
+ entry {\currentregisterentries}%
\fi
\ifx\currentregisterkeys\empty
- { \!!bs\currentregisterkeysa\!!es, \!!bs\currentregisterkeysb\!!es, \!!bs\currentregisterkeysc\!!es },
+ keys {
+ {\currentregisterkeysa}%
+ {\currentregisterkeysb}%
+ {\currentregisterkeysc}%
+ }
\else
- \!!bs\currentregisterkeys\!!es,
+ key {\currentregisterkeys}%
\fi
- },
- % \fi
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
- }
- }}%
- \ctxlua{structures.references.setinternalreference(nil,nil,\nextinternalreference)}%
+ }%
+ userdata {\detokenize\expandafter{\normalexpanded{#3}}}
+ }%
+ % \clf_setinternalreference internal \nextinternalreference\relax % in previous
\ifx\currentregisterownnumber\v!yes
\glet\currentregistersynchronize\relax
\else
- \xdef\currentregistersynchronize{\ctxlatelua{structures.registers.enhance("\currentregister",\currentregisternumber)}}%
+ \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
\fi
\currentregistersynchronize % here?
% needs thinking ... bla\index{bla}. will break before the . but adding a
% penalty is also no solution
+ \dostarttagged\t!registerlocation\currentregister
\attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
+ \endgroup}
+
+\unexpanded\def\dosetfastregisterentry#1#2#3#4#5% register entry key processor processor
+ {\begingroup
+ \edef\currentregister{#1}%
+ \setnextinternalreference
+ \xdef\currentregisternumber{\clf_storeregister
+ {%
+ metadata {%
+ name {\currentregister}%
+ }
+ entries {%
+ entry {#2}%
+ key {#3}%
+ }%
+ processors {%
+ entry {#4}%
+ page {#5}%
+ }%
+ }%
+ }%
+ % overlap with the above
+ \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
+ \currentregistersynchronize % here?
+ \dostarttagged\t!registerlocation\currentregister
+ \attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
\endgroup}
+\let\dotagregisterlocation\relax % experiment
+
\unexpanded\def\strc_registers_insert_entry[#1][#2]%
{\def\currentregister{#1}%
\doifelse{\registerparameter\c!ownnumber}\v!yes
@@ -296,7 +369,7 @@
\fi}
\def\strc_registers_stop_entry[#1][#2]%
- {\normalexpanded{\ctxlatelua{structures.registers.extend("#1","#2")}}}
+ {\normalexpanded{\ctxlatecommand{extendregister("#1","#2")}}}
\def\setregisterentry {\dotripleempty\strc_registers_set_entry}
\def\finishregisterentry{\dotripleempty\strc_registers_finish_entry}
@@ -309,7 +382,7 @@
\def\strc_registers_finish_entry_indeed#1#2#3% register data userdata
{\begingroup
\edef\currentregister{#1}%
- \setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]%
+ \setupcurrentregister[\c!entries=,\c!label=,\c!keys=,\c!alternative=,#2]% todo: fast setter
\edef\currentregisterlabel {\registerparameter\c!label}%
\edef\currentregisterexpansion{\registerparameter\c!expansion}%
\edef\currentregisterownnumber{\registerparameter\c!ownnumber}%
@@ -329,19 +402,19 @@
\fi
% I hate this kind of mess ... but it's a user request.
\ifx\currentregisterentries\empty
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
\fi
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})%
}}%
\else
- \normalexpanded{\ctxlua{structures.registers.extend("\currentregister","\currentregisterlabel", {
+ \normalexpanded{\ctxcommand{extendregister("\currentregister","\currentregisterlabel", {
metadata = {
- catcodes = \the\catcodetable,
+ % catcodes = \the\catcodetable,
coding = "\currentregistercoding",
\ifx\currentregisterownnumber\v!yes
own = "\registerparameter\c!alternative", % can be used instead of pagenumber
@@ -352,7 +425,7 @@
\!!bs\currentregisterentries\!!es,
\!!bs\currentregisterkeys\!!es
},
- userdata = structures.helpers.touserdata(\!!bs\detokenize{#3}\!!es)
+ userdata = \!!bs\detokenize{#3}\!!es
})
}}%
\fi
@@ -374,7 +447,7 @@
% \placeregister[index][n=1]
% \stoptext
-% some overlap wit previous
+% some overlap with previous
\unexpanded\def\setstructurepageregister
{\dotripleempty\strc_registers_set}
@@ -421,27 +494,28 @@
\fi
\setnextinternalreference
% we could consider storing register entries in list
- \edef\temp{\ctxlua{ structures.registers.store {
- metadata = {
- kind = "see",
- name = "\currentregister",
- level = structures.sections.currentlevel(),
- catcodes = \the\catcodetable,
- },
- references = {
- internal = \nextinternalreference,
- section = structures.sections.currentid(),
- },
- entries = {
- % we need a special one for xml, this is just a single one
- \!!bs\currentregisterentries\!!es,
- \!!bs#2\!!es
- },
- seeword = {
- text = \!!bs\currentregisterseeword\!!es
- },
- }
+ \edef\temp{\clf_storeregister{
+ metadata {%
+ kind {see}%
+ name {\currentregister}%
+ }%
+ references {%
+ % internal = \nextinternalreference,
+ % section = structures.sections.currentid(),
+ }%
+ entries {%
+ % we need a special one for xml, this is just a single one
+ entry {\currentregisterentries}%
+ key {#2}%
+ }%
+ seeword {%
+ text {\currentregisterseeword}%
+ }%
}}%
+ \dostarttagged\t!registerlocation\currentregister
+ \attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
+ \dotagregisterlocation
+ \dostoptagged
\endgroup}
%D Rendering:
@@ -457,13 +531,17 @@
{\begingroup
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
- \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\ctxlua{structures.registers.analyze('\currentregister',{
- language = "\registerparameter\s!language",
- method = "\registerparameter\c!method",
- numberorder = "\registerparameter\c!numberorder",
- compress = "\registerparameter\c!compress",
- criterium = "\registerparameter\c!criterium",
- })}}}% brrr
+ \normalexpanded{\endgroup\noexpand\xdef\noexpand\utilityregisterlength{\clf_analyzeregister
+ {\currentregister}%
+ {%
+ language {\registerparameter\s!language}%
+ method {\registerparameter\c!method}%
+ numberorder {\registerparameter\c!numberorder}%
+ compress {\registerparameter\c!compress}%
+ criterium {\registerparameter\c!criterium}%
+ pagenumber \ifx\registerpageseparatorsymbol\empty false\else true\fi
+ }%
+ }}%
\ifcase\utilityregisterlength\relax
\resetsystemmode\v!register
\else
@@ -486,59 +564,43 @@
\edef\currentregister{#1}%
\setupregister[\currentregister][#2]%
\the\everyplaceregister
- \ifnum\registerparameter\c!n>\plusone
- \startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \strc_registers_place_indeed
- \stopcolumns
+ \ifnum\namedmixedcolumnsparameter\currentregister\c!n>\plusone
+ \startmixedcolumns[\currentregister]
+ \strc_registers_place_indeed
+ \stopmixedcolumns
\else
\strc_registers_place_indeed
\fi
\endgroup
\fi}
-\def\strc_registers_place_columns
- {\startcolumns
- [\c!n=\registerparameter\c!n,
- \c!balance=\registerparameter\c!balance,
- \c!align=\registerparameter\c!align,
- \c!tolerance=\registerparameter\c!tolerance]%
- \startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked
- \stopcolumns}
-
-\def\strc_registers_place_normal
- {\startpacked[\v!blank]%
- \strc_registers_place_indeed
- \stoppacked}
-
\def\strc_registers_place_indeed
- {\ctxlua{structures.registers.process('\currentregister',{
- language = "\registerparameter\s!language",
- method = "\registerparameter\c!method",
- numberorder = "\registerparameter\c!numberorder",
- compress = "\registerparameter\c!compress",
- criterium = "\registerparameter\c!criterium",
- },{
- separatorset = "\registerparameter\c!pageprefixseparatorset",
- conversionset = "\registerparameter\c!pageprefixconversionset",
- starter = \!!bs\registerparameter\c!pageprefixstarter\!!es,
- stopper = \!!bs\registerparameter\c!pageprefixstopper\!!es,
- set = "\registerparameter\c!pageprefixset",
- segments = "\registerparameter\c!pageprefixsegments",
- connector = \!!bs\registerparameter\c!pageprefixconnector\!!es,
- },{
- prefix = "\registerparameter\c!pageprefix",
- separatorset = "\registerparameter\c!pageseparatorset",
- conversionset = "\registerparameter\c!pageconversionset",
- starter = \!!bs\registerparameter\c!pagestarter\!!es,
- stopper = \!!bs\registerparameter\c!pagestopper\!!es,
- segments = "\registerparameter\c!pagesegments",
- })}}
+ {\clf_processregister
+ {\currentregister}%
+ {%
+ language {\registerparameter\s!language}%
+ method {\registerparameter\c!method}%
+ numberorder {\registerparameter\c!numberorder}%
+ compress {\registerparameter\c!compress}%
+ criterium {\registerparameter\c!criterium}%
+ pagenumber \ifx\registerpageseparatorsymbol\empty false\else true\fi
+ }{%
+ separatorset {\registerparameter\c!pageprefixseparatorset}%
+ conversionset {\registerparameter\c!pageprefixconversionset}%
+ starter {\registerparameter\c!pageprefixstarter}%
+ stopper {\registerparameter\c!pageprefixstopper}%
+ set {\registerparameter\c!pageprefixset}%
+ segments {\registerparameter\c!pageprefixsegments}%
+ connector {\registerparameter\c!pageprefixconnector}%
+ }{%
+ prefix {\registerparameter\c!pageprefix}%
+ separatorset {\registerparameter\c!pageseparatorset}%
+ conversionset {\registerparameter\c!pageconversionset}%
+ starter {\registerparameter\c!pagestarter}%
+ stopper {\registerparameter\c!pagestopper}%
+ segments {\registerparameter\c!pagesegments}%
+ }%
+ \relax}
\def\strc_registers_limited_entry#1%
{\limitatetext{#1}\currentregistermaxwidth\unknown}%
@@ -613,17 +675,19 @@
% a =
-\setvalue{\??registerindicator a}#1%
+\def\strc_registers_indicator_a#1#2%
{\registerparameter\c!before
% bugged, why does leftskip gets set: \vskip\lineheight\goodbreak\vskip-\lineheight
+ \typo_injectors_check_register
\begingroup
\useregisterstyleandcolor\c!style\c!color
\dontleavehmode
+ \typo_injectors_mark_register
\strut
\iflocation
\dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
- \registerparameter\c!command{#1}%
+ \registerparameter\c!command{#2}%
\endgroup
\blank[\v!samepage]%
\registerparameter\c!after
@@ -632,38 +696,55 @@
% b =
-\setvalue{\??registerindicator b}#1% will be shared with a
+\def\strc_registers_indicator_b#1#2%
{\registerparameter\c!before
+ \typo_injectors_check_register
\begingroup
\useregisterstyleandcolor\c!style\c!color
\dontleavehmode
+ \typo_injectors_mark_register
\strut
\iflocation
\dosetdirectpagereference{\currentregister:\v!section:#1}%
\fi
- \registerparameter\c!command{#1}%
+ \registerparameter\c!command{#2}%
\endgroup
\registerparameter\c!after
\nobreak}
-\setvalue{\??registerindicator A}#1{\getvalue{\??registerindicator a}{\WORD{#1}}}
-\setvalue{\??registerindicator B}#1{\getvalue{\??registerindicator b}{\WORD{#1}}}
+\setvalue{\??registerindicator a}#1{\strc_registers_indicator_a{#1}{#1}}
+\setvalue{\??registerindicator A}#1{\strc_registers_indicator_a{#1}{\WORD{#1}}}
+\setvalue{\??registerindicator b}#1{\strc_registers_indicator_b{#1}{#1}}
+\setvalue{\??registerindicator B}#1{\strc_registers_indicator_b{#1}{\WORD{#1}}}
%D The following macros are the interface to the rendering. These are
%D generated by \LUA. This might change.
+% \showinjector
+% \setinjector[register][2][\column]
+%
+% \starttext
+% first \index{first}
+% second \index{second}
+% third \index{third}
+% fourth \index{fourth}
+% \placeregister[index]
+% \stoptext
+
+\doinstallinjector\s!register
+
%D Beware, we get funny side effects when a dangling \index precedes an
%D placeindex as then flushing takes place inside the index. Took me hours
%D to notice that.
-\newconditional\c_strc_registers_page_done
+\newconstant \c_strc_registers_page_state % 0=nothing 1=page 2=see
\newdimen \d_strc_registers_distance
\unexpanded\def\startregisteroutput
{\endgraf
\begingroup
\d_strc_registers_distance\registerparameter\c!distance\relax
- \dostarttagged\t!register\currentregister
+ \dostarttaggedchained\t!register\currentregister\??register
\forgeteverypar
\forgetparindent
\forgetparskip}
@@ -673,17 +754,8 @@
\dostoptagged
\endgroup}
-% \unexpanded\def\startregisterentries#1% depth
-% {\endgraf
-% \begingroup
-% \dostarttagged\t!registerentries\empty
-% \let\savedcurrentregister\currentregister
-% \edef\currentregister{\currentregister:#1}%
-% \useregisterstyleandcolor\c!textstyle\c!textcolor
-% \advance\leftskip\numexpr#1-\plusone\relax\dimexpr\d_strc_registers_distance\relax
-% \hangindent\registerparameter\c!distance\relax
-% \hangafter\plusone
-% \let\currentregister\savedcurrentregister}
+\newdimen\d_strc_registers_hangindent
+\newcount\c_strc_registers_hangafter
\unexpanded\def\startregisterentries#1% depth
{\endgraf
@@ -696,8 +768,9 @@
\ifnum\scratchcounter>\plusone
\advance\leftskip\d_strc_registers_distance\relax
\fi
- \hangindent\registerparameter\c!distance\relax
- \hangafter\plusone
+ \d_strc_registers_hangindent\registerparameter\c!distance\relax
+ \c_strc_registers_hangafter \plusone
+ \blank[\v!samepage]%
\let\currentregister\savedcurrentregister}
\unexpanded\def\stopregisterentries
@@ -705,6 +778,21 @@
\dostoptagged
\endgroup}
+\unexpanded\def\startregisterentry#1% todo: level
+ {\typo_injectors_check_register
+ \begingroup
+ \dostarttagged\t!registerentry\empty
+ \global\setconstant\c_strc_registers_page_state\zerocount
+ \hangindent\d_strc_registers_hangindent
+ \hangafter \c_strc_registers_hangafter
+ \typo_injectors_mark_register}
+
+\unexpanded\def\stopregisterentry
+ {\endgraf
+ \global\setconstant\c_strc_registers_page_state\zerocount
+ \dostoptagged
+ \endgroup}
+
\unexpanded\def\startregistersection#1% title
{\dostarttagged\t!registersection\empty
\dostarttagged\t!registertag\empty
@@ -718,7 +806,6 @@
\unexpanded\def\startregisterpages
{\begingroup
\dostarttagged\t!registerpages\empty
- \setfalse\c_strc_registers_page_done
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
\unexpanded\def\stopregisterpages
@@ -726,9 +813,8 @@
\endgroup}
\unexpanded\def\startregisterseewords
- {\ifhmode\crlf\fi
+ {%\par % \ifhmode\crlf\fi % otherwise wrong level
\begingroup
- \setfalse\c_strc_registers_page_done
\dostarttagged\t!registerpage\empty
\useregisterstyleandcolor\c!pagestyle\c!pagecolor}
@@ -736,16 +822,21 @@
{\dostoptagged
\endgroup}
-\unexpanded\def\registerpageseparator% todo: , configurable
- {\ifconditional\c_strc_registers_page_done
- \registerpageseparatorsymbol
- \else
+\unexpanded\def\registerpageseparator % todo: , configurable
+ {\ifcase\c_strc_registers_page_state
\hskip\d_strc_registers_distance\relax
- \settrue\c_strc_registers_page_done
+ \or
+ \dostarttagged\t!registerseparator\empty
+ \registerpageseparatorsymbol % page
+ \dostoptagged
+ \or
+ \dostarttagged\t!registerseparator\empty
+ \registerpageseparatorsymbol % see
+ \dostoptagged
\fi}
\unexpanded\def\registeronepagerangeseparator
- {|\endash|}
+ {|\endash|} % todo use \prewordbreak
\def\withregisterpagecommand#1#2#3#4%
{\def\currentregisterpageindex{#2}%
@@ -757,12 +848,14 @@
\unexpanded\def\registeronepage#1#2#3#4% #1:processor content
{\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plusone
\dostarttagged\t!registerpage\empty
\withregisterpagecommand{#1}{#2}{#3}{#4}%
\dostoptagged}
\unexpanded\def\registerpagerange#1#2#3#4#5#6#7% #1:processor content, content todo: -- configurable
{\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plusone
\dostarttagged\t!registerpagerange\empty
\dostarttagged\t!registerfrompage\empty
\withregisterpagecommand{#1}{#2}{#3}{#4}%
@@ -773,12 +866,8 @@
\dostoptagged
\dostoptagged}
-\let\strc_register_injector_process\relax
-\let\strc_register_injector_show \relax
-
\unexpanded\def\defaultregisterentry#1#2#3#4% #1:processor #2:internal #3:seeindex #4:word
{\def\currentregisterpageindex{#2}%
- \strc_register_injector_process
\iflocation
\def\currentregisterseeindex{#3}%
\doifelse{\registerparameter\c!interaction}\v!text
@@ -790,10 +879,9 @@
\fi}
\unexpanded\def\doapplyregisterentrycommand#1#2% processor text
- {\dostarttagged\t!registerentry\empty
+ {\dostarttagged\t!registercontent\empty
\ifx\currentregisterseeindex\empty \else
\dontleavehmode
- \strc_register_injector_show
\dosetdirectpagereference{seeindex:\currentregisterseeindex}% maybe some day we will support an area
\fi
\applyprocessor{#1}{\registerparameter\c!textcommand{\limitedregisterentry{\registerparameter\c!deeptextcommand{#2}}}}%
@@ -808,7 +896,8 @@
\fi}
\unexpanded\def\defaultregisterseeword#1#2#3#4#5#6% i n #3:processor #4:internal #5:seeindex #6:word
- {%\registerpageseparator
+ {\registerpageseparator
+ \global\setconstant\c_strc_registers_page_state\plustwo
\def\currentregisterpageindex{#4}%
\dostarttagged\t!registersee\empty
\settrue\c_strc_registers_page_done
@@ -846,7 +935,7 @@
% \placeregister[index][n=1,pagecommand=\MyRegisterPageCommand]
% \stoptext
-\def\registerpageuserdata #1#2{\ctxlua{structures.registers.userdata(#1,"#2")}}
+\def\registerpageuserdata #1#2{\clf_registeruserdata#1{#2}}
\def\currentregisterpageuserdata {\registerpageuserdata\currentregisterpageindex} % {#1}
% not yet ok : new internal handler names
@@ -857,10 +946,10 @@
\installcorenamespace{registersymbol}
\setvalue{\??registersymbol n}%
- {\def\registerpageseparatorsymbol{, }}
+ {\def\registerpageseparatorsymbol{,\space}}
\setvalue{\??registersymbol a}%
- {\def\registerpageseparatorsymbol{, }} % now done via conversion
+ {\def\registerpageseparatorsymbol{,\space}} % now done via conversion
\setvalue{\??registersymbol\v!none}%
{\let\registerpageseparatorsymbol\empty
@@ -877,7 +966,7 @@
\def\registeronepage {\registerpagebuttonsymbol\gobblefourarguments}%
\def\registerpagerange{\registerpagebuttonsymbol\gobblesevenarguments}}
-\def\setregisterpagerendering
+\unexpanded\def\setregisterpagerendering
{\doifelse{\registerparameter\c!pagenumber}\v!no
{\let \currentregisterpagesymbol\v!none}
{\edef\currentregisterpagesymbol{\registerparameter\c!symbol}}%
@@ -904,4 +993,6 @@
[\v!index]
% [\v!indices]
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-ren.mkiv b/tex/context/base/strc-ren.mkiv
index fdf8fb7f4..34903dfa0 100644
--- a/tex/context/base/strc-ren.mkiv
+++ b/tex/context/base/strc-ren.mkiv
@@ -135,7 +135,7 @@
\unexpanded\def\strc_rendering_place_head_number_and_text
{\strc_rendering_start_placement
\setheadmarking
- \doiftextelse\getheadnumber
+ \doifelsetext\getheadnumber
\dosettructureheadnumbercontent
\doresettructureheadnumbercontent
\ifconditional\c_strc_sectioning_empty
@@ -422,7 +422,7 @@
{\dodoubleargument\strc_rendering_define_placement}
\def\strc_rendering_define_placement[#1][#2]%
- {\doifnextbgroupelse
+ {\doifelsenextbgroup
{\strc_rendering_define_placement_yes[#1][#2]}%
{\strc_rendering_define_placement_nop[#1][#2]}}
@@ -516,6 +516,11 @@
\fi
\endgroup}
+\def\fakedheadnumber{\vphantom{0}} % needed for mathplus
+
+\unexpanded\def\fakeheadnumbercontent
+ {\hbox to \zeropoint{\let\getheadnumber\fakedheadnumber\headnumbercontent}}
+
\unexpanded\def\strc_rendering_inject_number_and_text
{\edef\p_command{\headparameter\c!command}% assumes \unexpanded definition
\ifx\p_command\empty
@@ -608,6 +613,7 @@
\hsize\headtextwidth
\fi
\noindent
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\headtextcontent
}
@@ -629,7 +635,7 @@
\hbox {
\hfill
\headnumbercontent
- \doifrightpageelse{
+ \doifelserightpage{
\scratchdistance\leftmargindistance
} {
\scratchdistance\rightmargindistance
@@ -637,6 +643,8 @@
\hskip\dimexpr\d_strc_rendering_local_leftoffset+\scratchdistance\relax
}
}
+ \else
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\headtextcontent
}
@@ -650,6 +658,30 @@
% \directsetup{\??headrenderings:\v!vertical:\v!inmargin}
% \stopsetups
+%D This one is for head based numbering usage: foo 1.2 and so:
+
+\defineheadalternative
+ [\v!reverse]
+ [\c!alternative=\v!vertical,
+ \c!renderingsetup=\??headrenderings:\v!reverse]
+
+\startsetups[\??headrenderings:\v!reverse]
+ \vbox {
+ \headsetupspacing
+ \noindent
+ \begstrut
+ \setfalse\headisdisplay % so a kind of mix
+ \headtextcontent
+ \ifconditional\headshownumber
+ \kern\headnumberdistance
+ \headnumbercontent
+ \else
+ \fakeheadnumbercontent
+ \fi
+ \endstrut
+ }
+\stopsetups
+
\defineheadalternative
[\v!middle]
[\c!alternative=\v!vertical,
@@ -665,6 +697,8 @@
\strut
\headnumbercontent
\par
+ \else
+ \fakeheadnumbercontent % will also be done in the other ones (force consistency with numbered)
\fi
\begstrut
\headtextcontent
diff --git a/tex/context/base/strc-rsc.lua b/tex/context/base/strc-rsc.lua
index a90f577e3..627e443b2 100644
--- a/tex/context/base/strc-rsc.lua
+++ b/tex/context/base/strc-rsc.lua
@@ -12,7 +12,8 @@ if not modules then modules = { } end modules ['strc-rsc'] = {
-- The scanner accepts nested outer, but we don't care too much, maybe
-- some day we will have both but currently the innermost wins.
-local lpegmatch, lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.match, lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg
local find = string.find
local spaces = lpegP(" ")^0
@@ -34,18 +35,28 @@ local backslash = lpegP("\\")
local endofall = spaces * lpegP(-1)
-local o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient
-local a_token = 1 - rbrace
+----- o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient
+----- a_token = 1 - rbrace
local s_token = 1 - lparent - lbrace
local i_token = 1 - lparent - lbrace - endofall
local f_token = 1 - lparent - lbrace - dcolon
local c_token = 1 - lparent - lbrace - tcolon
+-- experimental
+
+local o_token = lpegpatterns.nestedparents
+ + (1 - rparent - lbrace)
+local a_token = lpegpatterns.nestedbraces
+ + (1 - rbrace)
+local q_token = lpegpatterns.unsingle
+ + lpegpatterns.undouble
+
local hastexcode = lpegCg(lpegCc("has_tex") * lpegCc(true)) -- cannot be made to work
local component = lpegCg(lpegCc("component") * lpegCs(c_token^1))
local outer = lpegCg(lpegCc("outer") * lpegCs(f_token^1))
-local operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1))
-local arguments = lpegCg(lpegCc("arguments") * lpegCs(a_token^0))
+----- operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1))
+local operation = lpegCg(lpegCc("operation") * lpegCs(q_token + o_token^1))
+local arguments = lpegCg(lpegCc("arguments") * lpegCs(q_token + a_token^0))
local special = lpegCg(lpegCc("special") * lpegCs(s_token^1))
local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1))
@@ -56,9 +67,16 @@ local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1))
inner = inner * arguments
special = special * lparent * (operation * arguments)^-1 * rparent
-local referencesplitter = spaces * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset)
-local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon)) * #-scolon * lpegCs(lpegP(1)^1)
-local componentsplitter = lpegCs(lpegP((1-scolon)^1)) * scolon * #-scolon * lpegCs(lpegP(1)^1)
+local referencesplitter = spaces
+ * lpegCf (lpegCt("") * (component + outer)^-1 * (special + inner)^-1 * endofall, rawset)
+
+local prefixsplitter = lpegCs(lpegP((1-scolon)^1 * scolon))
+ * #-scolon
+ * lpegCs(lpegP(1)^1)
+
+local componentsplitter = lpegCs(lpegP((1-scolon)^1))
+ * scolon * #-scolon
+ * lpegCs(lpegP(1)^1)
prefixsplitter = componentsplitter
@@ -67,11 +85,11 @@ local function splitreference(str)
local t = lpegmatch(referencesplitter,str)
if t then
local a = t.arguments
- if a and find(a,"\\") then
+ if a and find(a,"\\",1,true) then
t.has_tex = true
else
local o = t.arguments
- if o and find(o,"\\") then
+ if o and find(o,"\\",1,true) then
t.has_tex = true
end
end
@@ -135,6 +153,8 @@ references.splitcomponent = splitcomponent
-- inspect(splitreference([[ outer :: inner { argument } ]]))
-- inspect(splitreference([[ special ( outer :: operation ) ]]))
+-- inspect(splitreference([[inner(foo,bar)]]))
+
-- inspect(splitreference([[]]))
-- inspect(splitreference([[inner]]))
-- inspect(splitreference([[special(operation{argument,argument})]]))
@@ -152,3 +172,14 @@ references.splitcomponent = splitcomponent
-- inspect(splitreference([[outer::special()]]))
-- inspect(splitreference([[outer::inner{argument}]]))
-- inspect(splitreference([[special(outer::operation)]]))
+
+-- inspect(splitreference([[special(operation)]]))
+-- inspect(splitreference([[special(operation(whatever))]]))
+-- inspect(splitreference([[special(operation{argument,argument{whatever}})]]))
+-- inspect(splitreference([[special(operation{argument{whatever}})]]))
+
+-- inspect(splitreference([[special("operation(")]]))
+-- inspect(splitreference([[special("operation(whatever")]]))
+-- inspect(splitreference([[special(operation{"argument,argument{whatever"})]]))
+-- inspect(splitreference([[special(operation{"argument{whatever"})]]))
+
diff --git a/tex/context/base/strc-sbe.mkiv b/tex/context/base/strc-sbe.mkiv
index fc48307ec..9f1d214cf 100644
--- a/tex/context/base/strc-sbe.mkiv
+++ b/tex/context/base/strc-sbe.mkiv
@@ -65,7 +65,7 @@
\resetallstructuremarks
\strc_sectionblock_get_environment\currentsectionblock
\sectionblockparameter\c!before % don't move
- \dostarttagged\t!division\currentsectionblock
+ \dostarttagged\t!division\currentsectionblock % no parents
\to \everybeforesectionblock
\appendtoks
@@ -81,7 +81,9 @@
\def\strc_sectionblock_set[#1][#2]% used to set the default
{\edef\currentsectionblock{#1}% from now on we assume a value
\setupcurrentsectionblock[#2]%
- \ctxcommand{setsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}}
+ \clf_setsectionblock{#1}
+ bookmark {\sectionblockparameter\c!bookmark}%
+ \relax}
\let\currentsectionblock\empty % was \s!unknown
@@ -93,7 +95,9 @@
\begingroup
\edef\currentsectionblock{#1}% from now on we assume a value
\setupcurrentsectionblock[#2]%
- \ctxcommand{pushsectionblock("#1", { bookmark = "\sectionblockparameter\c!bookmark" })}%
+ \clf_pushsectionblock{#1}
+ bookmark {\sectionblockparameter\c!bookmark}%
+ \relax
\csname #1true\endcsname % obsolete
\setsystemmode\currentsectionblock
\the\everybeforesectionblock\relax
@@ -102,7 +106,7 @@
\unexpanded\def\stopsectionblock
{\showmessage\m!structures2\currentsectionblock
\the\everyaftersectionblock\relax
- \ctxcommand{popsectionblock()}%
+ \clf_popsectionblock
\endgroup}
%D \starttyping
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 2962e2c49..b5a1a5ba0 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -15,6 +15,8 @@
\unprotect
+\startcontextdefinitioncode
+
\installcorenamespace{structure}
\installdirectcommandhandler \??structure {structure} % unchecked, so we need to initialize used parameters
@@ -101,8 +103,11 @@
{\setfalse\c_strc_bookmarks_preroll}
\def\strc_sectioning_autobookmark#1%
- {\nodestostring\tempstring{#1}%
- \globallet\currentstructurebookmark\tempstring}
+ {\begingroup
+ \the\everypreroll
+ \nodestostring\tempstring{#1}%
+ \globallet\currentstructurebookmark\tempstring
+ \endgroup}
% so it's an experiment
@@ -130,9 +135,9 @@
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
\xmlstopraw
-\iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark\currentstructuretitle
-\fi \fi \fi
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\ifx\currentstructurelist\empty
\globallet\currentstructurelist\currentstructuretitle
\fi
@@ -143,23 +148,23 @@
\xdef\currentstructurebookmark{\structureparameter\c!bookmark}%
\xdef\currentstructuremarking {\structureparameter\c!marking}%
\xdef\currentstructurelist {\structureparameter\c!list}%
-\iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark\currentstructuretitle
-\fi \fi \fi
+ \iflocation \ifx\currentstructurebookmark\empty \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark\currentstructuretitle
+ \fi \fi \fi
\else
\xdef\currentstructuretitle {\detokenizedstructureparameter\c!title}%
\xdef\currentstructurebookmark{\detokenizedstructureparameter\c!bookmark}%
\xdef\currentstructuremarking {\detokenizedstructureparameter\c!marking}%
\xdef\currentstructurelist {\detokenizedstructureparameter\c!list}%
\iflocation \ifx\currentstructurebookmark\empty
-\ifconditional\c_strc_bookmarks_preroll
- \strc_sectioning_autobookmark{\structureparameter\c!title}%
-\else
- \begingroup
- \simplifycommands
- \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
- \endgroup
-\fi
+ \ifconditional\c_strc_bookmarks_preroll
+ \strc_sectioning_autobookmark{\structureparameter\c!title}%
+ \else
+ \begingroup
+ \simplifycommands
+ \xdef\currentstructurebookmark{\detokenize\expandafter{\normalexpanded{\structureparameter\c!title}}}%
+ \endgroup
+ \fi
\fi \fi
\fi
\ifx\currentstructurelist\empty
@@ -170,75 +175,72 @@
\setnextinternalreference
\storeinternalreference\currentstructurename\nextinternalreference %
\strc_sectioning_set_reference_prefix
- \xdef\currentstructurenumber{\ctxlua{ % todo: combine with next call, adapt marks accordingly
- structures.sections.somelevel {
- references = {
- internal = \nextinternalreference,
- block = "\currentsectionblock",
- reference = "\currentstructurereference",
- referenceprefix = "\currentstructurereferenceprefix",
- backreference = "\currentstructurebackreference",
- },
- directives = {
- resetset = "\structureparameter\c!sectionresetset",
- },
- metadata = {
- kind = "section",
- name = "\currentstructurename",
- catcodes = \the\ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi,
- coding = "\currentstructurecoding",
- \ifx\currentstructurecoding\s!xml
- xmlroot = "\xmldocument",
- \fi
- \ifx\currentstructurexmlsetup\empty \else
- xmlsetup = "\currentstructurexmlsetup",
- \fi
- \ifx\currentstructuresaveinlist\v!no
- nolist = true,
- \fi
- \ifx\currentstructureincrementnumber\v!yes
- increment = "\currentstructureincrementnumber",
- \fi
- },
- titledata = { % we can add mark and reference
- label = \!!bs\detokenize\expandafter{\currentstructurelabel }\!!es,
- title = \!!bs\detokenize\expandafter{\currentstructuretitle }\!!es,
- \ifx\currentstructurebookmark\currentstructuretitle \else
- bookmark = \!!bs\detokenize\expandafter{\currentstructurebookmark }\!!es,
- \fi
- \ifx\currentstructuremarking\currentstructuretitle \else
- marking = \!!bs\detokenize\expandafter{\currentstructuremarking }\!!es,
- \fi
- \ifx\currentstructuresaveinlist\v!no \else
- \ifx\currentstructurelist\currentstructuretitle \else
- list = \!!bs\detokenize\expandafter{\currentstructurelist}\!!es,
- \fi
- \fi
- },
- numberdata = {
- % needed ?
- block = "\currentsectionblock",
- hidenumber = \ifx\currentstructureshownumber\v!no true\else nil\fi, % titles
- % so far
- separatorset = "\structureparameter\c!sectionseparatorset",
- conversion = "\structureparameter\c!sectionconversion", % for good old times sake
- conversionset = "\structureparameter\c!sectionconversionset",
- starter = \!!bs\structureparameter\c!sectionstarter\!!es,
- stopper = \!!bs\structureparameter\c!sectionstopper\!!es,
- set = "\structureparameter\c!sectionset",
- segments = "\structureparameter\c!sectionsegments",
- ownnumber = "\currentstructureownnumber",
- },
- userdata = \!!bs\detokenize{#3}\!!es % will be converted to table at the lua end
+ \clf_setsectionentry
+ references {
+ internal \nextinternalreference\space
+ % block {\currentsectionblock}
+ prefix {\currentstructurereferenceprefix}
+ reference {\currentstructurereference}
+ backreference {\currentstructurebackreference}
+ }
+ directives {
+ resetset {\structureparameter\c!sectionresetset}
+ }
+ metadata {
+ kind {section}
+ name {\currentstructurename}
+ catcodes \ifx\currentstructurecatcodes\empty\catcodetable\else\csname\currentstructurecatcodes\endcsname\fi\space
+ coding {\currentstructurecoding}
+ \ifx\currentstructurecoding\s!xml
+ xmlroot {\xmldocument}
+ \fi
+ \ifx\currentstructurexmlsetup\empty \else
+ xmlsetup {\currentstructurexmlsetup}
+ \fi
+ \ifx\currentstructuresaveinlist\v!no
+ nolist \space true\space
+ \fi
+ \ifx\currentstructureincrementnumber\v!yes
+ increment {\currentstructureincrementnumber}
+ \fi
+ }
+ titledata {
+ label {\detokenize\expandafter{\currentstructurelabel}}
+ title {\detokenize\expandafter{\currentstructuretitle}}
+ \ifx\currentstructurebookmark\currentstructuretitle \else
+ bookmark {\detokenize\expandafter{\currentstructurebookmark}}
+ \fi
+ \ifx\currentstructuremarking\currentstructuretitle \else
+ marking {\detokenize\expandafter{\currentstructuremarking}}
+ \fi
+ \ifx\currentstructuresaveinlist\v!no \else
+ \ifx\currentstructurelist\currentstructuretitle \else
+ list {\detokenize\expandafter{\currentstructurelist}}
+ \fi
+ \fi
}
- }}%
- % \xdef\currentstructurelistnumber{\ctxcommand{addtolist(structures.sections.current())}}%
- \xdef\currentstructurelistnumber{\ctxcommand{currentsectiontolist()}}%
+ numberdata {
+ % block {\currentsectionblock}
+ \ifx\currentstructureshownumber\v!no
+ hidenumber \space true\space
+ \fi
+ separatorset {\structureparameter\c!sectionseparatorset}
+ conversionset {\structureparameter\c!sectionconversionset}
+ conversion {\structureparameter\c!sectionconversion}
+ starter {\structureparameter\c!sectionstarter}
+ stopper {\structureparameter\c!sectionstopper}
+ set {\structureparameter\c!sectionset}
+ segments {\structureparameter\c!sectionsegments}
+ ownnumber {\currentstructureownnumber}
+ language {\currentlanguage}% for the moment, needed for bookmarks conversion
+ }
+ userdata {\detokenize{#3}}% will be converted to table at the lua end
+ \relax
+ \xdef\currentstructurelistnumber{\clf_currentsectiontolist}%
% \currentstructuresynchronize has to be called someplace, since it introduces a node
\setstructuresynchronization\currentstructurelistnumber
\endgroup}
-\let\currentstructurenumber \!!zerocount
\let\currentsectioncountervalue \!!zerocount % redefined later
\let\previoussectioncountervalue\!!zerocount % redefined later
@@ -261,16 +263,15 @@
% todo: #1 => "#1" ... adapt lua code for name and number
-\def\structurenumber {\ctxcommand{structurenumber()}}
-\def\structuretitle {\ctxcommand{structuretitle()}}
-\def\structurevariable #1{\ctxcommand{structurevariable("#1")}}
-\def\structureuservariable #1{\ctxcommand{structureuservariable("#1")}}
-\def\structurecatcodedget #1{\ctxcommand{structurecatcodedget("#1")}} % bad name
-\def\structuregivencatcodedget #1#2{\ctxcommand{structuregivencatcodedget("#1",\number#2)}} % bad name
-\def\structureautocatcodedget #1#2{\ctxcommand{structureautocatcodedget ("#1","#2")}}
-
-\def\namedstructurevariable #1#2{\ctxcommand{namedstructurevariable ("#1","#2")}}
-\def\namedstructureuservariable#1#2{\ctxcommand{namedstructureuservariable("#1","#2")}}
+\def\structurenumber {\clf_structurenumber}
+\def\structuretitle {\clf_structuretitle}
+\def\structurevariable #1{\clf_structurevariable {#1}}
+\def\structureuservariable #1{\clf_structureuservariable {#1}}
+\def\structurecatcodedget #1{\clf_structurecatcodedget {#1}} % bad name
+\def\structuregivencatcodedget #1#2{\clf_structuregivencatcodedget {#1}#2 } % bad name
+\def\structureautocatcodedget #1#2{\clf_structureautocatcodedget {#1}{#2}}
+\def\namedstructurevariable #1#2{\clf_namedstructurevariable {#1}{#2}}
+\def\namedstructureuservariable#1#2{\clf_namedstructureuservariable{#1}{#2}}
% compatibility issue:
%
@@ -300,18 +301,14 @@
\newconditional\c_strc_rendering_continuous % not used (mkii ?)
-\def\setstructurelevel #1#2{\ctxlua{structures.sections.setlevel("#1","#2")}} % name, level|parent
-\def\getstructurelevel #1{\ctxlua{structures.sections.getcurrentlevel("#1")}}% name
-\def\setstructurenumber #1#2{\ctxlua{structures.sections.setnumber(#1,"#2")}} % level, number (+/-)
-\def\getstructurenumber #1{\ctxlua{structures.sections.getnumber(#1)}} % level
-\def\getsomestructurenumber #1#2{\ctxlua{structures.sections.getnumber(#1,"#2")}} % level, what
-\def\getfullstructurenumber #1{\ctxlua{structures.sections.fullnumber(#1)}} % level
-\def\getsomefullstructurenumber#1#2{\ctxlua{structures.sections.fullnumber(#1,"#2")}}
-\def\getspecificstructuretitle #1{\ctxlua{structures.sections.structuredata("#1","titledata.title",nil,"\headparameter\s!catcodes")}}%
-
-% will be:
-%
-% \def\getfullstructurenumber #1{\ctxcommand{structurenumber(\thenamedheadlevel{#1})}}
+\def\setstructurelevel #1#2{\clf_setstructurelevel {#1}{#2}} % name, level|parent
+\def\getstructurelevel #1{\clf_getstructurelevel {#1}} % name
+\def\setstructurenumber #1#2{\clf_setstructurenumber #1{#2}} % level, number (+/-)
+\def\getstructurenumber #1{\clf_getstructurenumber \numexpr#1\relax} % level
+\def\getsomestructurenumber #1#2{\clf_getsomestructurenumber #1{#2}} % level, what
+\def\getfullstructurenumber #1{\clf_getfullstructurenumber \numexpr#1\relax} % level
+\def\getsomefullstructurenumber#1#2{\clf_getsomefullstructurenumber #1{#2}} % level, what
+\def\getspecificstructuretitle #1{\clf_getspecificstructuretitle {#1}{\headparameter\s!catcodes}}
% structure heads (like \startchapter)
@@ -407,7 +404,7 @@
\definemarking[\currenthead] [\currentheadsection]%
\definemarking[\currenthead\v!number][\currentheadsection]%
\setupmarking [\currenthead] [\c!filtercommand=\noexpand\sectionheadmarkingtitle {\currenthead}]%
- \setupmarking [\currenthead\c!number][\c!filtercommand=\noexpand\sectionheadmarkingnumber{\currenthead}]%
+ \setupmarking [\currenthead\v!number][\c!filtercommand=\noexpand\sectionheadmarkingnumber{\currenthead}]%
}%
\doifelselist\currenthead\donothing
{\definelist[\currenthead][\c!prefix=\v!no]}%
@@ -425,9 +422,15 @@
\the\everysetuphead
\to \everydefinehead
+\newtoks\everyredefinehead
+
+\appendtoks
+ \the\everyredefinehead
+\to \everydefinehead
+
\appendtoks
\setstructurelevel\currenthead{\thenamedheadlevel\currenthead}%
-\to \everydefinehead
+\to \everyredefinehead
\appendtoks
% beware, this is a global register
@@ -435,13 +438,14 @@
\edef\currentsectionheadcoupling{\sectionheadcoupling\currenthead}%
\edef\currentsectionheadsection {\sectionheadsection \currentsectionheadcoupling}%
\edef\currentsectionlevel {\sectionlevel \currentsectionheadsection}%
- \ctxlua{structures.sections.register("\currenthead",{
- coupling = "\currentsectionheadcoupling",
- section = "\currentsectionheadsection",
- level = \currentsectionlevel,
- })}%
+ \clf_registersection {\currenthead} {
+ coupling {\currentsectionheadcoupling}
+ section {\currentsectionheadsection}
+ level \currentsectionlevel
+ parent {\currentheadparent}
+ }%
\endgroup
-\to \everydefinehead
+\to \everyredefinehead
\appendtoks
% \setevalue{\e!next \currenthead}{\donexthead [\currenthead]}%
@@ -455,6 +459,15 @@
{\setuevalue\currenthead{\strc_sectioning_handle_nop[\currenthead]}}%
\to \everysetuphead
+\unexpanded\def\doredefinehead#1#2% called at lua end
+ {\pushmacro\currenthead
+ \pushmacro\currentheadparent
+ \edef\currenthead{#1}%
+ \edef\currentheadparent{#2}%
+ \the\everyredefinehead\relax
+ \popmacro\currentheadparent
+ \popmacro\currenthead}
+
\let\currentnamedsection\empty
\unexpanded\def\startnamedsection
@@ -578,8 +591,8 @@
% head -> head
-\def\sectionheadmarkingtitle #1#2{\ctxlua{structures.marks.title("#1","#2")}}
-\def\sectionheadmarkingnumber#1#2{\ctxlua{structures.marks.number("#1","#2")}}
+\def\sectionheadmarkingtitle #1#2{\clf_markingtitle {#1}{#2}}
+\def\sectionheadmarkingnumber#1#2{\clf_markingnumber{#1}{#2}}
\def\sectionheadcoupling#1{\namedheadparameter{#1}\c!coupling}
\def\sectionheadsection #1{\namedheadparameter{#1}\c!section}
@@ -603,7 +616,7 @@
\unexpanded\def\strc_sectioning_handle_nop_indeed[#1][#2]% for taco: [key=value] variant
{\setfalse\currentstructureown
\triggerautostructurelevel
- \doifassignmentelse{#2}\strc_sectioning_handle_nop_indeed_yes\strc_sectioning_handle_nop_indeed_nop{#1}{#2}}
+ \doifelseassignment{#2}\strc_sectioning_handle_nop_indeed_yes\strc_sectioning_handle_nop_indeed_nop{#1}{#2}}
\unexpanded\def\strc_sectioning_handle_nop_indeed_yes#1#2%
{\strc_sectioning_handle{#1}{#2}{}}
@@ -763,7 +776,7 @@
\unexpanded\def\placeheadtext {\dosingleempty\strc_sectioning_place_head_text } % use with care
\unexpanded\def\placeheadnumber{\dosingleempty\strc_sectioning_place_head_number} % use with care
-\unexpanded\def\strc_sectioning_report{\ctxlua{structures.sections.reportstructure()}}
+\unexpanded\def\strc_sectioning_report{\clf_reportstructure}
\ifdefined\strc_rendering_initialize_style_and_color \else
@@ -985,15 +998,19 @@
\hskip\s_strc_sectioniong_continuous_signal\relax
\fi}
+% \let\dotagsectionlevel\relax
+
\def\strc_sectioning_before_yes
{\strc_sectioning_check_before\strc_sectioning_handle_page_yes
\headparameter\c!inbetween
- \dostarttagged\t!section\currenthead}
+ \dostarttaggedchained\t!section\currenthead\??head
+% \dotagsectionlevel
+ }
\def\strc_sectioning_before_nop
{\strc_sectioning_check_before\strc_sectioning_handle_page_nop
\headparameter\c!inbetween
- \dostarttagged\currenthead\empty}
+ \dostarttagged\currenthead\empty} % this is a weird one .. needs checking
\def\strc_sectioning_empty_correction
{\ifconditional\c_strc_sectioning_empty
@@ -1007,7 +1024,8 @@
\def\strc_sectioning_after_yes
{\ifconditional\headisdisplay
\ifconditional\c_strc_sectioning_auto_break
- \vspacing[\v!samepage-\currentheadlevel]%
+ % \vspacing[\v!samepage-\currentheadlevel]%
+\vspacing[\v!samepage]%
\fi
\strc_sectioning_empty_correction
\headparameter\c!after
@@ -1039,8 +1057,8 @@
#1%
\fi}
-\def\currentsectioncountervalue {\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead)}}
-\def\previoussectioncountervalue{\ctxlua{structures.sections.depthnumber(\thenamedheadlevel\currenthead-1)}}
+\def\currentsectioncountervalue {\clf_depthnumber\numexpr\thenamedheadlevel\currenthead\relax}
+\def\previoussectioncountervalue{\clf_depthnumber\numexpr\thenamedheadlevel\currenthead+\minusone\relax}
\def\strc_sectioning_handle_page_nop
{\edef\p_continue{\headparameter\c!continue}%
@@ -1063,10 +1081,16 @@
\strc_sectioning_handle_page_nop
\edef\p_aligntitle{\headparameter\c!aligntitle}%
\ifx\p_aligntitle\v!float
+\ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\currentheadlevel]%
+\fi
\headparameter\c!before\relax
\indent
\else
\page_otr_command_flush_side_floats
+\ifconditional\c_strc_sectioning_auto_break
+ \vspacing[\v!samepage-\currentheadlevel]%
+\fi
\headparameter\c!before\relax
\fi
\global\c_strc_sectioniong_preceding_level\currentheadlevel
@@ -1089,7 +1113,7 @@
{\dodoubleargument\strc_sectioning_setup_number}
\def\strc_sectioning_setup_number[#1][#2]% todo: reset if at other level
- {\setstructurenumber{\thenamedheadlevel{#1}}{#2}}
+ {\setstructurenumber{\thenamedheadlevel{#1}}{\number#2}}
\def\currentheadnumber{0} % ==> \currentheadnumber
@@ -1119,7 +1143,7 @@
\let\sectioncountervalue\structurevalue
-\def\currentheadtext{obsolete, use marks}
+\def\currentheadtext{obsolete,\space use marks}
% list references, will be redone in lua when we need it
@@ -1136,7 +1160,7 @@
\unexpanded\def\strc_sectioning_initialize_autolevel
{\ifconditional\c_strc_sectioning_auto_levels
- \ctxcommand{autonextstructurelevel(\number\currentheadlevel)}%
+ \clf_autonextstructurelevel\currentheadlevel\relax
\global\setfalse\c_strc_sectioning_auto_levels
\fi}
@@ -1144,7 +1168,7 @@
{\global\settrue\c_strc_sectioning_auto_levels}
\unexpanded\def\finalizeautostructurelevels
- {\ctxcommand{autofinishstructurelevels()}}
+ {\clf_autofinishstructurelevels}
\unexpanded\def\finalizeautostructurelevel
{\dostoptagged
@@ -1154,4 +1178,6 @@
\finalizeautostructurelevels
\to \everystoptext
+\stopcontextdefinitioncode
+
\protect \endinput
diff --git a/tex/context/base/strc-syn.lua b/tex/context/base/strc-syn.lua
index ca4b3ac18..5f3557a69 100644
--- a/tex/context/base/strc-syn.lua
+++ b/tex/context/base/strc-syn.lua
@@ -7,10 +7,13 @@ if not modules then modules = { } end modules ['strc-syn'] = {
}
local next, type = next, type
-local format = string.format
-local allocate = utilities.storage.allocate
--- interface to tex end
+local context = context
+local implement = interfaces.implement
+
+local allocate = utilities.storage.allocate
+
+local sorters = sorters
local structures = structures
local synonyms = structures.synonyms
@@ -19,6 +22,10 @@ local tags = structures.tags
local collected = allocate()
local tobesaved = allocate()
+local firstofsplit = sorters.firstofsplit
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
synonyms.collected = collected
synonyms.tobesaved = tobesaved
@@ -37,94 +44,144 @@ job.register('structures.synonyms.collected', tobesaved, initializer, finalizer)
-- todo: allocate becomes metatable
-local function allocate(class)
- local d = tobesaved[class]
- if not d then
- d = {
- metadata = {
- language = 'en',
- sorted = false,
- class = class
- },
- entries = {
- },
- hash = {
- }
+table.setmetatableindex(tobesaved,function(t,k)
+ local v = {
+ metadata = {
+ language = 'en',
+ sorted = false,
+ class = v
+ },
+ entries = {
+ },
+ hash = {
}
- tobesaved[class] = d
- end
- return d
-end
+ }
+ t[k] = v
+ return v
+end)
function synonyms.define(class,kind)
- local data = allocate(class)
+ local data = tobesaved[class]
data.metadata.kind = kind
end
function synonyms.register(class,kind,spec)
- local data = allocate(class)
+ local data = tobesaved[class]
+ local hash = data.hash
+ local definition = spec.definition
+ local tag = definition.tag or ""
data.metadata.kind = kind -- runtime, not saved in format (yet)
- if not data.hash[spec.definition.tag or ""] then
- data.entries[#data.entries+1] = spec
- data.hash[spec.definition.tag or ""] = spec
+ if not hash[tag] then
+ if definition.used == nil then
+ definition.used = false
+ end
+ if definition.shown == nil then
+ definition.shown = false
+ end
+ local entries = data.entries
+ entries[#entries+1] = spec
+ hash[tag] = spec
end
end
function synonyms.registerused(class,tag)
- local data = allocate(class)
- local dht = data.hash[tag]
- if dht then
- dht.definition.used = true
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.used = true
+ definition.list = true
+ end
+end
+
+function synonyms.registershown(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.shown = true
+ definition.list = true
+ end
+end
+
+function synonyms.isused(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ return okay and okay.definition.used
+end
+
+function synonyms.isshown(class,tag)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ return okay and okay.definition.shown
+end
+
+function synonyms.resetused(class)
+ for tag, data in next, tobesaved[class].hash do
+ data.definition.used = false
+ end
+end
+
+function synonyms.resetshown(class)
+ for tag, data in next, tobesaved[class].hash do
+ data.definition.shown = false
end
end
function synonyms.synonym(class,tag)
- local data = allocate(class).hash
- local d = data[tag]
- if d then
- local de = d.definition
- de.used = true
- context(de.synonym)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.used = true
+ definition.list = true
+ context(definition.synonym)
end
end
function synonyms.meaning(class,tag)
- local data = allocate(class).hash
- local d = data[tag]
- if d then
- local de = d.definition
- de.used = true
- context(de.meaning)
+ local data = tobesaved[class]
+ local okay = data.hash[tag]
+ if okay then
+ local definition = okay.definition
+ definition.shown = true
+ definition.list = true
+ context(definition.meaning)
end
end
synonyms.compare = sorters.comparers.basic -- (a,b)
function synonyms.filter(data,options)
- local result = { }
+ local result = { }
local entries = data.entries
- local all = options and options.criterium == interfaces.variables.all
- for i=1,#entries do
- local entry = entries[i]
- if all or entry.definition.used then
- result[#result+1] = entry
+ local all = options and options.criterium == interfaces.variables.all
+ if all then
+ for i=1,#entries do
+ result[i] = entries[i]
+ end
+ else
+ for i=1,#entries do
+ local entry = entries[i]
+ local definition = entry.definition
+ if definition.list then
+ result[#result+1] = entry
+ end
end
end
data.result = result
end
function synonyms.prepare(data)
- local strip = sorters.strip
- local splitter = sorters.splitters.utf
local result = data.result
if result then
for i=1, #result do
- local r = result[i]
- local rd = r.definition
- if rd then
- local rt = rd.tag
- local sortkey = (rt and rt ~= "" and rt) or rd.synonym
- r.split = splitter(strip(sortkey))
+ local entry = result[i]
+ local definition = entry.definition
+ if definition then
+ local tag = definition.tag
+ local key = tag ~= "" and tag or definition.synonym
+ entry.split = splitter(strip(key))
end
end
end
@@ -132,21 +189,31 @@ end
function synonyms.sort(data,options)
sorters.sort(data.result,synonyms.compare)
+ data.metadata.sorted = true
end
-function synonyms.finalize(data,options)
- local result = data.result
- data.metadata.nofsorted = #result
- local split = { }
+function synonyms.finalize(data,options) -- mostly the same as registers so we will generalize it: sorters.split
+ local result = data.result
+ local split = { }
+ local nofsplit = 0
+ local lasttag = nil
+ local lasttag = nil
+ local nofdone = 0
for k=1,#result do
- local v = result[k]
- local entry, tag = sorters.firstofsplit(v)
- local s = split[entry] -- keeps track of change
- if not s then
- s = { tag = tag, data = { } }
- split[entry] = s
+ local entry = result[k]
+ local first, tag = firstofsplit(entry)
+ if tag ~= lasttag then
+ -- if trace_registers then
+ -- report_registers("splitting at %a",tag)
+ -- end
+ done = { }
+ nofdone = 0
+ nofsplit = nofsplit + 1
+ lasttag = tag
+ split[nofsplit] = { tag = tag, data = done }
end
- s.data[#s.data+1] = v
+ nofdone = nofdone + 1
+ done[nofdone] = entry
end
data.result = split
end
@@ -154,33 +221,28 @@ end
-- for now, maybe at some point we will do a multipass or so
-- maybe pass the settings differently
+local ctx_synonymentry = context.synonymentry
+
function synonyms.flush(data,options)
- local kind = data.metadata.kind -- hack, will be done better
- -- context[format("\\start%soutput",kind)]()
local result = data.result
- local sorted = table.sortedkeys(result)
- for k=1,#sorted do
- local letter = sorted[k]
- local sublist = result[letter]
- local data = sublist.data
- -- context[format("\\start%ssection",kind)](sublist.tag)
+ for i=1,#result do
+ local sublist = result[i]
+ local letter = sublist.tag
+ local data = sublist.data
for d=1,#data do
local entry = data[d].definition
- -- context[format("\\%sentry",kind)](d,entry.tag,entry.synonym,entry.meaning or "")
- context("\\%sentry{%s}{%s}{%s}{%s}",kind,d,entry.tag,entry.synonym,entry.meaning or "")
+ ctx_synonymentry(d,entry.tag,entry.synonym,entry.meaning or "")
end
- -- context[format("\\stop%ssection",kind)]()
end
- -- context[format("\\stop%soutput",kind)]()
- data.result = nil
+ data.result = nil
data.metadata.sorted = false
end
function synonyms.analyzed(class,options)
- local data = synonyms.collected[class]
+ local data = collected[class]
if data and data.entries then
options = options or { }
- sorters.setlanguage(options.language)
+ sorters.setlanguage(options.language,options.method)
synonyms.filter(data,options) -- filters entries to result
synonyms.prepare(data,options) -- adds split table parallel to list table
synonyms.sort(data,options) -- sorts entries in result
@@ -192,7 +254,65 @@ end
function synonyms.process(class,options)
if synonyms.analyzed(class,options) then
- synonyms.flush(synonyms.collected[class],options)
+ synonyms.flush(collected[class],options)
end
end
+-- todo: local higher up
+
+implement { name = "registerusedsynonym", actions = synonyms.registerused, arguments = { "string", "string" } }
+implement { name = "registershownsynonym", actions = synonyms.registershown, arguments = { "string", "string" } }
+implement { name = "synonymmeaning", actions = synonyms.meaning, arguments = { "string", "string" } }
+implement { name = "synonymname", actions = synonyms.synonym, arguments = { "string", "string" } }
+implement { name = "resetusedsynonyms", actions = synonyms.resetused, arguments = "string" }
+implement { name = "resetshownsynonyms", actions = synonyms.resetshown, arguments = "string" }
+
+implement {
+ name = "doifelsesynonymused",
+ actions = { synonyms.isused, commands.doifelse },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "doifelsesynonymshown",
+ actions = { synonyms.isshown, commands.doifelse },
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "registersynonym",
+ actions = synonyms.register,
+ arguments = {
+ "string",
+ "string",
+ {
+ { "metadata", {
+ { "catcodes", "integer" },
+ { "coding" },
+ { "xmlroot" }
+ }
+ },
+ {
+ "definition", {
+ { "tag" },
+ { "synonym" },
+ { "meaning" },
+ { "used", "boolean" }
+ }
+ }
+ }
+ }
+}
+
+implement {
+ name = "processsynonyms",
+ actions = synonyms.process,
+ arguments = {
+ "string",
+ {
+ { "criterium" },
+ { "language" },
+ { "method" }
+ }
+ }
+}
diff --git a/tex/context/base/strc-syn.mkiv b/tex/context/base/strc-syn.mkiv
index e0087d450..b206f8069 100644
--- a/tex/context/base/strc-syn.mkiv
+++ b/tex/context/base/strc-syn.mkiv
@@ -15,28 +15,101 @@
\registerctxluafile{strc-syn}{1.001}
+%D Although we could nowadays build this on top of regular lists we keep this
+%D more efficient variant around. Eventually we can add some options to lists
+%D that also provide such functionality but at the cost of much more overhead.
+%D
+%D We show a usage of both synonyms and sorts, which are deep down variants of
+%D so called simple lists. A definition looks like this:
+%D
+%D \startbuffer
+%D \definesynonyms
+%D [myabbreviation]
+%D
+%D \setupsynonyms
+%D [myabbreviation]
+%D [headstyle=bold,
+%D headcolor=darkred,
+%D synonymstyle=boldslanted,
+%D synonymcolor=darkblue,
+%D textstyle=slanted,
+%D textcolor=darkgreen,
+%D style=normal,
+%D color=darkyellow]
+%D
+%D \definesorting
+%D [mylogo]
+%D
+%D \setupsorting
+%D [mylogo]
+%D [style=bold,
+%D color=darkmagenta]
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D More complex definitions involves commands to call up meanings and such. The
+%D use of the defined commands is as follows: \
+%D
+%D \startbuffer
+%D \myabbreviation [FIRST] {TheFirst} {The First Words}
+%D \myabbreviation [SECOND] {TheSecond} {The Second Words}
+%D \myabbreviation [THIRD] {TheThird} {The Third Words}
+%D
+%D \mylogo [FOURTH] {TheFourth}
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D By default a synonym is just typeset and flagges as being used, so that in
+%D a list it wil be shows with its meaning. You can however also expand the
+%D meaning automatically at first use:
+%D
+%D \startbuffer
+%D \setupsynonyms[myabbreviation][alternative=first]
+%D
+%D We have \FIRST, \SECOND\ and also \THIRD\ but no \FOURTH.
+%D
+%D We have \FIRST, \SECOND\ and also \THIRD\ but no \FOURTH.
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D We can change the order, as demonstrated in:
+%D
+%D \startbuffer
+%D \resetshownsynonyms[myabbreviation]
+%D
+%D \setupsynonyms[myabbreviation][alternative=last]
+%D
+%D We have \FIRST\ and \THIRD\ or \FOURTH.
+%D
+%D We have \FIRST\ and \THIRD\ or \FOURTH.
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D A list is called up with:
+%D
+%D \startbuffer
+%D \placelistofsynonyms[myabbreviation]
+%D
+%D \placelistofsorts[mylogo]
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D The lists are constructions (like descriptions are) and can be set up
+%D likewise.
+
% todo: add 'define only' option to descriptions, then add sorting (also based on key)
% and call to definition -> replaces this module
\unprotect
-\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
-\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-
-% general help, can be shared
-
-% simplifiedcommands -> flag in lua
-%
-% expansion
-% criterium -> when start, then flag in list
-% command-> wanneer?
-% state -> flagging enabled
-% conversion ?
-% todo: register xml mode etc
-
% split but common in lua
-\def\preprocessexpansion#1#2#3#4%
+\def\preprocessexpansion#1#2#3#4% do this at the lua end if still needed
{\ifx#1\s!xml
\xmlstartraw
\xdef#2{#4}%
@@ -51,13 +124,94 @@
\globallet#3\s!tex
\fi}
-\installcorenamespace{synonym}
+%D We now use a simple list variant:
+
+\installcorenamespace {simplelist}
+
+\installcommandhandler \??simplelist {simplelist} \??simplelist
+
+\let\setupsimplelists\setupsimplelist
+
+\setupsimplelists[%
+ %c!title=,
+ %c!text=,
+ %
+ %c!style=,
+ %c!color=,
+ %c!command=,
+ %c!align=,
+ %
+ %c!headstyle=,
+ %c!headcolor=,
+ %c!headalign=,
+ %
+ %c!titlestyle=,
+ %c!titlecolor=,
+ %c!titlecommand=,
+ %c!titleleft=,
+ %c!titleright=,
+ %
+ %c!closesymbol=,
+ %c!closecommand=,
+ %
+ \c!alternative=\v!left,
+ \c!display=\v!yes,
+ \c!width=7\emwidth,
+ \c!distance=\emwidth,
+ \c!titledistance=.5\emwidth,
+ %c!hang=,
+ %c!sample=,
+ \c!margin=\v!no,
+ \c!before=\blank,
+ \c!inbetween=\blank,
+ \c!after=\blank,
+ %c!indentnext=,
+ %c!indenting=,
+ %
+ \c!expansion=\v!no,
+ %c!xmlsetup=,
+ %s!catcodes=,
+ \s!language=\currentmainlanguage,
+]
+
+\appendtoks
+ \setfalse\c_strc_constructions_define_commands
+ \ifx\currentsimplelistparent\empty
+ \defineconstruction[\currentsimplelist][\s!handler=\v!simplelist,\c!level=1]%
+ \else
+ \defineconstruction[\currentsimplelist][\currentsimplelistparent][\s!handler=\v!simplelist,\c!level=1]%
+ \fi
+ \settrue\c_strc_constructions_define_commands
+\to \everydefinesimplelist
+
+\setuvalue{\??constructioninitializer\v!simplelist}%
+ {\let\currentsimplelist \currentconstruction
+ \let\constructionparameter \simplelistparameter
+ \let\constructionnamespace \??simplelist
+ \let\detokenizedconstructionparameter\detokenizedsimplelistparameter
+ \let\letconstructionparameter \letsimplelistparameter
+ \let\useconstructionstyleandcolor \usesimpleliststyleandcolor
+ \let\setupcurrentconstruction \setupcurrentsimplelist}
+
+\setuvalue{\??constructionfinalizer\v!simplelist}%
+ {}
+
+\setuvalue{\??constructiontexthandler\v!simplelist}%
+ {\begingroup
+ \useconstructionstyleandcolor\c!headstyle\c!headcolor
+ \the\everyconstruction
+ \constructionparameter\c!headcommand
+ {\strut
+ \currentsimplelistentry}%
+ \endgroup}
-\installsimplecommandhandler \??synonym {synonym} \??synonym
+% And we build on top of this.
-\let\setupsynonyms\setupsynonym
+\ifdefined\dotagsynonym \else \let\dotagsynonym\relax \fi
+\ifdefined\dotagsorting \else \let\dotagsorting\relax \fi
-\setupsynonyms
+\definesimplelist
+ [\v!synonym]
[\c!state=\v!start,
%\c!synonymstyle=,
%\c!textstyle=,
@@ -75,147 +229,231 @@
%\c!after=,
\c!indentnext=\v!no,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsynonyms\setupsimplelist
\unexpanded\def\definesynonyms
- {\doquadrupleempty\dodefinesynonyms}
+ {\doquadrupleempty\strc_synonyms_define}
-\def\dodefinesynonyms[#1][#2][#3][#4]% name plural \meaning \use
+\def\strc_synonyms_define[#1][#2][#3][#4]% name plural \meaning \use
{\edef\currentsynonym{#1}%
\iffourthargument
- \unexpanded\def#4##1{\doinsertsynonym{#1}{##1}}% name tag
+ \unexpanded\def#4##1{\strc_synonyms_insert{#1}{##1}}% name tag
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!no][#1]}% \name
\else
\ifthirdargument
- \unexpanded\def#3##1{\doinsertsynonymmeaning{#1}{##1}}% \meaning
+ \unexpanded\def#3##1{\strc_synonyms_insert_meaning{#1}{##1}}% \meaning
\fi
\setuvalue{#1}{\definesynonym[\v!yes][#1]}% \name
\fi
- \checksynonymparent
- \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ %
+% \checksynonymparent
+% \setupcurrentsynonym[\s!single={#1},\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsynonym]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% changes the \if...argument
- \setvalue{\e!setup #2\e!endsetup}{\setupsynonym[#1]}% obsolete definition
+ %
+ \setvalue{\e!setup #2\e!endsetup}{\setupsynonyms[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsynonyms[#1]}% accepts extra argument
\setvalue{\e!complete\e!listof#2}{\completelistofsynonyms[#1]}}
\unexpanded\def\definesynonym
- {\dotripleempty\dodefinesynonym}
+ {\dotripleempty\strc_synonyms_define_entry}
-\def\dodefinesynonym[#1][#2][#3]#4#5%
+\def\strc_synonyms_define_entry[#1][#2][#3]#4#5%
{\begingroup
\edef\currentsynonym{#2}%
\edef\currentsynonymtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsynonymtag\empty
\edef\currentsynonymtag{#4}%
\fi
\ifx\currentsynonymtag\empty
% todo: error message
\else
- \edef\currentsynonymexpansion{\synonymparameter\c!expansion}%
- \preprocessexpansion\currentsynonymexpansion\currentsynonymtext \currentsynonymcoding{#4}%
- \preprocessexpansion\currentsynonymexpansion\currentsynonymmeaning\currentsynonymcoding{#5}%
- \ctxlua{structures.synonyms.register("\currentsynonym", "synonym", {
- metadata = {
- catcodes = \the\catcodetable,
- coding = "\currentsynonymcoding",
- xmlroot = \ifx\currentsynonymcoding\s!xml "\xmldocument" \else nil \fi,
- },
- definition = {
- tag = "\currentsynonymtag",
- synonym = \!!bs\currentsynonymtext\!!es,
- meaning = \!!bs\currentsynonymmeaning\!!es,
- used = false,
- }
- })}%
- \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\noexpand\doinsertsynonym{\currentsynonym}{\currentsynonymtag}}}%
+ \edef\currentsynonymexpansion{\simplelistparameter\c!expansion}%
+ \preprocessexpansion\currentsynonymexpansion\m_synonyms_text \currentsynonymcoding{#4}%
+ \preprocessexpansion\currentsynonymexpansion\m_synonyms_meaning\currentsynonymcoding{#5}%
+ \clf_registersynonym
+ {\currentsynonym}%
+ {synonym}%
+ {%
+ metadata {%
+ catcodes \catcodetable
+ coding {\currentsynonymcoding}%
+ \ifx\currentsynonymcoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ }%
+ definition {%
+ tag {\currentsynonymtag}%
+ synonym {\m_synonyms_text}%
+ meaning {\m_synonyms_meaning}%
+ % used false
+ }%
+ }%
+ \relax
+ \doif{#1}\v!yes{\setuxvalue\currentsynonymtag{\strc_synonyms_insert{\currentsynonym}{\currentsynonymtag}}}%
\fi
\endgroup}
\unexpanded\def\registersynonym
- {\dodoubleargument\doregistersynonym}
+ {\dodoubleargument\strc_synonyms_register}
+
+\def\strc_synonyms_register[#1][#2]%
+ {\clf_registerusedsynonym{#1}{#2}}
+
+\unexpanded\def\currentsynonymname {\clf_synonymname {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\currentsynonymmeaning {\clf_synonymmeaning {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\doifelsecurrentsynonymused {\clf_doifelsesynonymused {\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\doifelsecurrentsynonymshown{\clf_doifelsesynonymshown{\currentsimplelist}{\currentsynonymtag}}
+\unexpanded\def\resetusedsynonyms [#1]{\clf_resetusedsynonyms {#1}}
+\unexpanded\def\resetshownsynonyms [#1]{\clf_resetshownsynonyms {#1}}
+
+\installcorenamespace{simplelistalternative} % specific ways of rendering a list
+\installcorenamespace{simplelistrenderings} % a namespace for setups (rather local)
-\def\doregistersynonym[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\installcommandhandler \??simplelistalternative {simplelistalternative} \??simplelistalternative
-\unexpanded\def\doinsertsynonymmeaning#1#2% name tag
+\setupsimplelist
+ [\v!synonym]
+ [\c!alternative=\v!normal]
+
+\unexpanded\def\strc_synonyms_insert_meaning#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
- \usesynonymstyleandcolor\c!textstyle\c!textcolor
- \synonymparameter\c!textcommand{\ctxlua{structures.synonyms.meaning("#1","#2")}}%
+ \def\currentsimplelist{#1}%
+ \def\currentsynonymtag{#2}%
+ \fastsetup{\??simplelistrenderings::\v!text}%
\endgroup}
-\unexpanded\def\doinsertsynonym#1#2% name tag
+\unexpanded\def\strc_synonyms_insert#1#2% name tag
{\begingroup
- \def\currentsynonym{#1}%
- \def\currentsynonymtag{#2}%
- \dostarttagged\t!synonym\currentsynonym
- \dotagsynonym
- \usesynonymstyleandcolor\c!synonymstyle\c!synonymcolor
- \synonymparameter\c!synonymcommand{\ctxlua{structures.synonyms.synonym("#1","#2")}}%
- \dostoptagged
- \normalexpanded{\endgroup\synonymparameter\c!next}}
+ \edef\currentsimplelist{#1}%
+ \let \currentsynonym\currentsimplelist % for a while
+ \def \currentsynonymtag{#2}%
+ \edef\currentsimplelistalternative{\simplelistparameter\c!alternative}%
+ \fastsetup{\??simplelistrenderings:\v!synonym:\currentsimplelistalternative}%
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
+
+% \setupsimplelistalternative
+% [\c!command=\strictsimplelistparameter\c!command]
+
+\definesimplelistalternative
+ [\v!normal]
+ [\c!inbetween=\space,
+ \c!left=(,
+ \c!right=)]
+
+\definesimplelistalternative
+ [\v!first]
+ [\v!normal]
+
+\definesimplelistalternative
+ [\v!last]
+ [\v!normal]
+
+\startsetups[\??simplelistrenderings::\v!synonym]
+ \begingroup
+ \dostarttaggedchained\t!synonym\currentsynonym\??simplelist
+ \dotagsynonym
+ \usesimpleliststyleandcolor\c!synonymstyle\c!synonymcolor
+ \simplelistparameter\c!synonymcommand{\currentsynonymname}%
+ \dostoptagged
+ \endgroup
+\stopsetups
+
+\startsetups[\??simplelistrenderings::\v!text]
+ \begingroup
+ \usesimpleliststyleandcolor\c!textstyle\c!textcolor
+ \simplelistparameter\c!textcommand{\currentsynonymmeaning}%
+ \endgroup
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!normal]
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!first]
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ \doifelsecurrentsynonymshown \donothing {
+ \simplelistalternativeparameter\c!inbetween
+ \simplelistalternativeparameter\c!left
+ \fastsetup{\??simplelistrenderings::\v!text}
+ \simplelistalternativeparameter\c!right
+ }
+\stopsetups
+
+\startsetups[\??simplelistrenderings:\v!synonym:\v!last]
+ \doifelsecurrentsynonymshown {
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ } {
+ \fastsetup{\??simplelistrenderings::\v!text}
+ \simplelistalternativeparameter\c!inbetween
+ \simplelistalternativeparameter\c!left
+ \fastsetup{\??simplelistrenderings::\v!synonym}
+ \simplelistalternativeparameter\c!right
+ }
+\stopsetups
\unexpanded\def\placelistofsynonyms
- {\dodoubleempty\doplacelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_place_list}
-\def\doplacelistofsynonyms[#1][#2]%
+\def\strc_synonyms_place_list[#1][#2]%
{\begingroup
- \def\currentsynonym{#1}%
- \definedescription % todo, per class
- [syndef]
- [\c!location=\synonymparameter\c!location,
- \c!width=\synonymparameter\c!width,
- \c!distance=\synonymparameter\c!distance,
- \c!sample=\synonymparameter\c!sample,
- \c!hang=\synonymparameter\c!hang,
- \c!align=\synonymparameter\c!align,
- \c!before=\synonymparameter\c!before,
- \c!inbetween=\synonymparameter\c!inbetween,
- \c!after=\synonymparameter\c!after,
- \c!indentnext=\synonymparameter\c!indentnext,
- \c!headstyle=\synonymparameter\c!textstyle,
- \c!headcolor=\synonymparameter\c!textcolor,
- \c!style=,
- \c!color=.
- #2]%
- \startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\synonymparameter\c!criterium",
- language = "\synonymparameter\s!language",
- method = "\synonymparameter\c!method",
- })}%
- \stoppacked
+ \edef\currentsimplelist{#1}%
+ \doifelsecommandhandler\??simplelist\currentsimplelist
+ {\strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \let\synonymentry\strc_synonym_normal
+ \startpacked
+ \clf_processsynonyms
+ {#1}%
+ {%
+ criterium {\simplelistparameter\c!criterium}%
+ language {\simplelistparameter\s!language}%
+ method {\simplelistparameter\c!method}%
+ }%
+ \relax
+ \stoppacked}%
+ {}% todo: message that invalid
\endgroup}
\def\completelistofsynonyms
- {\dodoubleempty\docompletelistofsynonyms}
+ {\dodoubleempty\strc_synonyms_complete_list}
-\def\docompletelistofsynonyms[#1][#2]%
- {\edef\currentsynonym{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\synonymparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsynonyms[#1][#2]%
- \page
- \stopnamedsection}
-
-\let\startsynonymoutput \relax
-\let\stopsynonymoutput \relax
-\let\startsynonymsection\gobbleoneargument
-\let\stopsynonymsection \relax
+\def\strc_synonyms_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \doifelsecommandhandler\??simplelist\currentsimplelist
+ {\normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_synonyms_place_list[#1][#2]%
+ \page
+ \stopnamedsection}%
+ {}% todo: message that invalid
+ \endgroup}
-\unexpanded\def\synonymentry#1#2#3#4%
- {\syndef{#3}#4\par}
+\unexpanded\def\strc_synonym_normal#1#2#3#4%
+ {\begingroup
+ \def\currentsimplelistentry{#3}%
+ \csname\??constructionstarthandler\v!construction\endcsname
+ #4%
+ \csname\??constructionstophandler\v!construction\endcsname
+ \endgroup}
%D Sorting (a simplified version of synonym).
-\installcorenamespace{sorting}
-
-\installsimplecommandhandler \??sorting {sorting} \??sorting
-
-\setupsorting
+\definesimplelist
+ [\v!sorting]
[\c!state=\v!start,
%\c!command=, % we test for defined !
%\c!criterium=,
@@ -223,123 +461,179 @@
%\c!before=,
\c!after=\endgraf,
%\c!expansion=,
- \c!method=,
- \s!language=\currentmainlanguage]
+ \c!method=]
+
+\let\setupsorting\setupsimplelist
\unexpanded\def\definesorting
- {\dotripleempty\dodefinesorting}
+ {\dotripleempty\strc_sorting_define}
% if #3=\relax or \v!none, then no command but still protected
-\def\dodefinesorting[#1][#2][#3]%
+\def\strc_sorting_define[#1][#2][#3]%
{\edef\currentsorting{#1}%
\ifthirdargument
\doifnot{#3}\v!none
{\ifx#3\relax \else
- \unexpanded\def#3##1{\doinsertsort{#1}{##1}}%
+ \unexpanded\def#3##1{\strc_sorting_insert{#1}{##1}}%
\fi}%
\setuvalue{#1}{\definesort[\v!no][#1]}%
\else
\setuvalue{#1}{\definesort[\v!yes][#1]}%
\fi
- \checksortingparent
- \setupcurrentsorting[\s!multi={#2}]%
+ \setfalse\c_strc_constructions_define_commands
+ \definesimplelist
+ [\currentsorting]%
+ [\v!sorting]
+ [\s!single={#1},%
+ \s!multi={#2}]%
+ \settrue\c_strc_constructions_define_commands
+ %
\presetheadtext[#2=\Word{#2}]% after \ifthirdargument -)
+ %
\setvalue{\e!setup #2\e!endsetup}{\setupsorting[#1]}% obsolete definition
\setvalue{\e!place \e!listof#2}{\placelistofsorts[#1]}%
\setvalue{\e!complete\e!listof#2}{\completelistofsorts[#1]}}
\unexpanded\def\definesort
- {\dotripleempty\dodefinesort}
+ {\dotripleempty\strc_sorting_define_entry}
-\def\dodefinesort[#1][#2][#3]#4%
+\def\strc_sorting_define_entry[#1][#2][#3]#4%
{\begingroup
\edef\currentsorting{#2}%
\edef\currentsortingtag{#3}%
+ \let\currentsimplelist\currentsimplelist
\ifx\currentsortingtag\empty
\edef\currentsortingtag{#4}%
\fi
\ifx\currentsortingtag\empty
% todo: error message
\else
- \edef\currentsortingexpansion{\sortingparameter\c!expansion}%
+ \edef\currentsortingexpansion{\simplelistparameter\c!expansion}%
\preprocessexpansion\currentsortingexpansion\currentsortingtext\currentsortingcoding{#4}%
- \ctxlua{structures.synonyms.register("\currentsorting", "sorting", {
- metadata = {
- catcodes = \the\catcodetable,
- coding = "\currentsortingcoding",
- xmlroot = \ifx\currentsortingcoding\s!xml "\xmldocument" \else nil \fi,
- },
- definition = {
- tag = "\currentsortingtag",
- synonym = \!!bs\currentsortingtext\!!es,
- % used = false,
- }
- })}%
- \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\noexpand\doinsertsort{\currentsorting}{\currentsortingtag}}}%
+ \clf_registersynonym
+ {\currentsorting}%
+ {sorting}%
+ {%
+ metadata {%
+ catcodes \catcodetable
+ coding {\currentsortingcoding}%
+ \ifx\currentsortingcoding\s!xml
+ xmlroot {\xmldocument}%
+ \fi
+ }%
+ definition {%
+ tag {\currentsortingtag}%
+ synonym {\currentsortingtext}%
+ % used false
+ }%
+ }%
+ \relax
+ \doif{#1}\v!yes{\setuxvalue\currentsortingtag{\strc_sorting_insert{\currentsorting}{\currentsortingtag}}}%
\fi
\endgroup}
-\unexpanded\def\doinsertsort#1#2% name tag
+\unexpanded\def\currentsortingname {\clf_synonymname {\currentsimplelist}{\currentsortingtag}}
+\unexpanded\def\doifelsecurrentsortingused {\clf_doifelsesynonymused {\currentsimplelist}{\currentsortingtag}}
+\unexpanded\def\resetusedsortings [#1]{\clf_resetusedsynonyms {#1}}
+
+\setupsimplelist
+ [\v!sorting]
+ [\c!alternative=\v!normal]
+
+\unexpanded\def\strc_sorting_insert#1#2% name tag
{\begingroup
% no kap currently, of .. we need to map cap onto WORD
\edef\currentsorting{#1}%
- \def\currentsortingtag{#2}%
- \dostarttagged\t!sorting\currentsorting
- \dotagsorting
- \usesortingstyleandcolor\c!style\c!color
- \ctxlua{structures.synonyms.synonym("#1","#2")}%
- \dostoptagged
- \normalexpanded{\endgroup\sortingparameter\c!next}}
+ \def \currentsortingtag{#2}%
+ \let \currentsimplelist\currentsorting
+ \edef\currentsimplelistalternative{\simplelistparameter\c!alternative}%
+ \fastsetup{\??simplelistrenderings:\v!sorting:\currentsimplelistalternative}%
+ \normalexpanded{\endgroup\simplelistparameter\c!next}}
+
+\startsetups [\??simplelistrenderings:\v!sorting:\v!normal]
+ \fastsetup{\??simplelistrenderings::\v!sorting}%
+\stopsetups
+
+\startsetups [\??simplelistrenderings::\v!sorting]
+ \begingroup
+ \dostarttaggedchained\t!sorting\currentsorting\??simplelist
+ \dotagsorting
+ \usesimpleliststyleandcolor\c!style\c!color
+ \currentsortingname
+ \dostoptagged
+ \endgroup
+\stopsetups
\unexpanded\def\registersort
- {\dodoubleargument\doregistersort}
+ {\dodoubleargument\strc_sorting_register}
-\def\doregistersort[#1][#2]%
- {\ctxlua{structures.synonyms.registerused("#1","#2")}}
+\def\strc_sorting_register[#1][#2]%
+ {\clf_registerusedsynonym{#1}{#2}}
% before after
%
% maybe just 'commandset' and then combine
\unexpanded\def\placelistofsorts
- {\dodoubleempty\doplacelistofsorts}
+ {\dodoubleempty\strc_sorting_place_list}
-\def\doplacelistofsorts[#1][#2]% NOG EEN RUWE VERSIE MAKEN ZONDER WITRUIMTE ETC ETC
+\def\strc_sorting_place_list[#1][#2]%
{\begingroup
- \def\currentsorting{#1}%
- \setupcurrentsorting[#2]%
+ \edef\currentsimplelist{#1}%
+ \strc_constructions_initialize{#1}%
+ \setupcurrentsimplelist[#2]%
+ \edef\p_simplelist_command{\simplelistparameter\c!command}%
+ \ifx\p_simplelist_command\empty
+ \let\synonymentry\strc_sorting_normal
+ \else
+ \let\synonymentry\strc_sorting_command
+ \fi
\startpacked
- \ctxlua{structures.synonyms.process('#1',{
- criterium = "\sortingparameter\c!criterium",
- language = "\sortingparameter\s!language",
- method = "\sortingparameter\c!method",
- })}%
+ \clf_processsynonyms
+ {#1}%
+ {%
+ criterium {\simplelistparameter\c!criterium}%
+ language {\simplelistparameter\s!language}%
+ method {\simplelistparameter\c!method}%
+ }%
+ \relax
\stoppacked
\endgroup}
\unexpanded\def\completelistofsorts
- {\dodoubleempty\docompletelistofsorts}
+ {\dodoubleempty\strc_sorting_complete_list}
-\def\docompletelistofsorts[#1][#2]%
- {\edef\currentsorting{#1}%
- \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\sortingparameter\s!multi}},\c!reference=#1]}%
- \doplacelistofsorts[#1][#2]%
+\def\strc_sorting_complete_list[#1][#2]%
+ {\begingroup
+ \edef\currentsimplelist{#1}%
+ \normalexpanded{\startnamedsection[\v!chapter][\c!title={\headtext{\simplelistparameter\s!multi}},\c!reference=#1]}%
+ \strc_sorting_place_list[#1][#2]%
\page
- \stopnamedsection}
+ \stopnamedsection
+ \endgroup}
-\let\startsortingoutput \relax
-\let\stopsortingoutput \relax
-\let\startsortingsection\gobbleoneargument
-\let\stopsortingsection \relax
+\def\strc_sorting_command#1#2#3#4% #4 is meaning but empty here
+ {\p_simplelist_command{#1}{#2}{#3}}
-\def\sortingentry#1#2#3#4% #4 is meaning but empty here
- {\doifelsenothing{\sortingparameter\c!command}
- {\begingroup\usesortingstyleandcolor\c!style\c!color#3\endgroup\par} % todo
- {\sortingparameter\c!command{#1}{#2}{#3}}}
+\def\strc_sorting_normal#1#2#3#4% #4 is meaning but empty here
+ {\begingroup
+ \usesimpleliststyleandcolor\c!style\c!color
+ #3%
+ \endgroup
+ \par}
%D Presets.
+% To be considered:
+%
+% \setupsimplelist
+% [\v!sorting]
+% [\c!headstyle=\simplelistparameter\c!synonymstyle,
+% \c!headcolor=\simplelistparameter\c!synonymcolor,
+% \c!style=\simplelistparameter\c!textstyle,
+% \c!color=\simplelistparameter\c!textcolor]
+
\definesynonyms
[\v!abbreviation]
[\v!abbreviations]
diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua
index 7e5c6f993..637d74e8c 100644
--- a/tex/context/base/strc-tag.lua
+++ b/tex/context/base/strc-tag.lua
@@ -6,189 +6,219 @@ if not modules then modules = { } end modules ['strc-tag'] = {
license = "see context related readme files"
}
--- This is rather experimental code.
+-- This is rather experimental code. Tagging happens on the fly and there are two analysers
+-- involved: the pdf backend tagger and the exporter. They share data but there are subtle
+-- differences. Each tag carries a specification and these can be accessed by attribute (the
+-- end of the chain tag) or by so called fullname which is a tagname combined with a number.
+local type, next = type, next
local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat
-local gsub, find, topattern, format = string.gsub, string.find, string.topattern, string.format
-local lpegmatch = lpeg.match
+local find, topattern, format = string.find, string.topattern, string.format
+local lpegmatch, P, S, C, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
local texattribute = tex.attribute
local allocate = utilities.storage.allocate
local settings_to_hash = utilities.parsers.settings_to_hash
+local setmetatableindex = table.setmetatableindex
local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
local report_tags = logs.reporter("structure","tags")
-local attributes, structures = attributes, structures
+local attributes = attributes
+local structures = structures
+local implement = interfaces.implement
local a_tagged = attributes.private('tagged')
local unsetvalue = attributes.unsetvalue
local codeinjections = backends.codeinjections
-local taglist = allocate()
-local properties = allocate()
-local labels = allocate()
-local stack = { }
-local chain = { }
-local ids = { }
-local enabled = false
-local tagdata = { } -- used in export
-local tagmetadata = { } -- used in export
-
-local tags = structures.tags
-tags.taglist = taglist -- can best be hidden
-tags.labels = labels
-tags.data = tagdata
-tags.metadata = tagmetadata
-
-local properties = allocate {
-
- document = { pdf = "Div", nature = "display" },
-
- division = { pdf = "Div", nature = "display" },
- paragraph = { pdf = "P", nature = "mixed" },
- p = { pdf = "P", nature = "mixed" },
- construct = { pdf = "Span", nature = "inline" },
- highlight = { pdf = "Span", nature = "inline" },
-
- section = { pdf = "Sect", nature = "display" },
- sectiontitle = { pdf = "H", nature = "mixed" },
- sectionnumber = { pdf = "H", nature = "mixed" },
- sectioncontent = { pdf = "Div", nature = "display" },
-
- itemgroup = { pdf = "L", nature = "display" },
- item = { pdf = "Li", nature = "display" },
- itemtag = { pdf = "Lbl", nature = "mixed" },
- itemcontent = { pdf = "LBody", nature = "mixed" },
-
- description = { pdf = "Div", nature = "display" },
- descriptiontag = { pdf = "Div", nature = "mixed" },
- descriptioncontent = { pdf = "Div", nature = "mixed" },
- descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference
-
- verbatimblock = { pdf = "Code", nature = "display" },
- verbatimlines = { pdf = "Code", nature = "display" },
- verbatimline = { pdf = "Code", nature = "mixed" },
- verbatim = { pdf = "Code", nature = "inline" },
-
- lines = { pdf = "Code", nature = "display" },
- line = { pdf = "Code", nature = "mixed" },
-
- synonym = { pdf = "Span", nature = "inline" },
- sorting = { pdf = "Span", nature = "inline" },
-
- register = { pdf = "Div", nature = "display" },
- registersection = { pdf = "Div", nature = "display" },
- registertag = { pdf = "Span", nature = "mixed" },
- registerentries = { pdf = "Div", nature = "display" },
- registerentry = { pdf = "Span", nature = "mixed" },
- registersee = { pdf = "Span", nature = "mixed" },
- registerpages = { pdf = "Span", nature = "mixed" },
- registerpage = { pdf = "Span", nature = "inline" },
- registerpagerange = { pdf = "Span", nature = "mixed" },
-
- table = { pdf = "Table", nature = "display" },
- tablerow = { pdf = "TR", nature = "display" },
- tablecell = { pdf = "TD", nature = "mixed" },
-
- tabulate = { pdf = "Table", nature = "display" },
- tabulaterow = { pdf = "TR", nature = "display" },
- tabulatecell = { pdf = "TD", nature = "mixed" },
-
- list = { pdf = "TOC", nature = "display" },
- listitem = { pdf = "TOCI", nature = "display" },
- listtag = { pdf = "Lbl", nature = "mixed" },
- listcontent = { pdf = "P", nature = "mixed" },
- listdata = { pdf = "P", nature = "mixed" },
- listpage = { pdf = "Reference", nature = "mixed" },
-
- delimitedblock = { pdf = "BlockQuote", nature = "display" },
- delimited = { pdf = "Quote", nature = "inline" },
- subsentence = { pdf = "Span", nature = "inline" },
-
- label = { pdf = "Span", nature = "mixed" },
- number = { pdf = "Span", nature = "mixed" },
-
- float = { pdf = "Div", nature = "display" }, -- Figure
- floatcaption = { pdf = "Caption", nature = "mixed" },
- floatlabel = { pdf = "Span", nature = "inline" },
- floatnumber = { pdf = "Span", nature = "inline" },
- floattext = { pdf = "Span", nature = "mixed" },
- floatcontent = { pdf = "P", nature = "mixed" },
-
- image = { pdf = "P", nature = "mixed" },
- mpgraphic = { pdf = "P", nature = "mixed" },
-
- formulaset = { pdf = "Div", nature = "display" },
- formula = { pdf = "Div", nature = "display" }, -- Formula
- formulacaption = { pdf = "Span", nature = "mixed" },
- formulalabel = { pdf = "Span", nature = "mixed" },
- formulanumber = { pdf = "Span", nature = "mixed" },
- formulacontent = { pdf = "P", nature = "display" },
- subformula = { pdf = "Div", nature = "display" },
-
- link = { pdf = "Link", nature = "inline" },
-
- margintextblock = { pdf = "Span", nature = "inline" },
- margintext = { pdf = "Span", nature = "inline" },
-
- math = { pdf = "Div", nature = "inline" }, -- no display
- mn = { pdf = "Span", nature = "mixed" },
- mi = { pdf = "Span", nature = "mixed" },
- mo = { pdf = "Span", nature = "mixed" },
- ms = { pdf = "Span", nature = "mixed" },
- mrow = { pdf = "Span", nature = "display" },
- msubsup = { pdf = "Span", nature = "display" },
- msub = { pdf = "Span", nature = "display" },
- msup = { pdf = "Span", nature = "display" },
- merror = { pdf = "Span", nature = "mixed" },
- munderover = { pdf = "Span", nature = "display" },
- munder = { pdf = "Span", nature = "display" },
- mover = { pdf = "Span", nature = "display" },
- mtext = { pdf = "Span", nature = "mixed" },
- mfrac = { pdf = "Span", nature = "display" },
- mroot = { pdf = "Span", nature = "display" },
- msqrt = { pdf = "Span", nature = "display" },
- mfenced = { pdf = "Span", nature = "display" },
- maction = { pdf = "Span", nature = "display" },
-
- mtable = { pdf = "Table", nature = "display" }, -- might change
- mtr = { pdf = "TR", nature = "display" }, -- might change
- mtd = { pdf = "TD", nature = "display" }, -- might change
-
- ignore = { pdf = "Span", nature = "mixed" },
- metadata = { pdf = "Div", nature = "display" },
- metavariable = { pdf = "Span", nature = "mixed" },
-
- mid = { pdf = "Span", nature = "inline" },
- sub = { pdf = "Span", nature = "inline" },
- sup = { pdf = "Span", nature = "inline" },
- subsup = { pdf = "Span", nature = "inline" },
-
- combination = { pdf = "Span", nature = "display" },
- combinationpair = { pdf = "Span", nature = "display" },
- combinationcontent = { pdf = "Span", nature = "mixed" },
- combinationcaption = { pdf = "Span", nature = "mixed" },
+local taglist = allocate() -- access by attribute
+local specifications = allocate() -- access by fulltag
+local labels = allocate()
+local stack = { }
+local chain = { }
+local ids = { }
+local enabled = false
+local tagcontext = { }
+local tagpatterns = { }
+local lasttags = { }
+local stacksize = 0
+local metadata = nil -- applied to the next element
+
+local tags = structures.tags
+tags.taglist = taglist -- can best be hidden
+tags.labels = labels
+tags.patterns = tagpatterns
+tags.specifications = specifications
+
+-- Tags are internally stored as:
+--
+-- tag>number tag>number tag>number
+
+local p_splitter = C((1-S(">"))^1) * P(">") * C(P(1)^1)
+tagpatterns.splitter = p_splitter
+
+local properties = allocate {
+
+ document = { pdf = "Div", nature = "display" },
+
+ division = { pdf = "Div", nature = "display" },
+ paragraph = { pdf = "P", nature = "mixed" },
+ p = { pdf = "P", nature = "mixed" },
+ construct = { pdf = "Span", nature = "inline" },
+ highlight = { pdf = "Span", nature = "inline" },
+
+ section = { pdf = "Sect", nature = "display" },
+ sectiontitle = { pdf = "H", nature = "mixed" },
+ sectionnumber = { pdf = "H", nature = "mixed" },
+ sectioncontent = { pdf = "Div", nature = "display" },
+
+ itemgroup = { pdf = "L", nature = "display" },
+ item = { pdf = "LI", nature = "display" },
+ itemtag = { pdf = "Lbl", nature = "mixed" },
+ itemcontent = { pdf = "LBody", nature = "mixed" },
+ itemhead = { pdf = "Div", nature = "display" },
+ itembody = { pdf = "Div", nature = "display" },
+
+ description = { pdf = "Div", nature = "display" },
+ descriptiontag = { pdf = "Div", nature = "mixed" },
+ descriptioncontent = { pdf = "Div", nature = "mixed" },
+ descriptionsymbol = { pdf = "Span", nature = "inline" }, -- note reference
+
+ verbatimblock = { pdf = "Code", nature = "display" },
+ verbatimlines = { pdf = "Code", nature = "display" },
+ verbatimline = { pdf = "Code", nature = "mixed" },
+ verbatim = { pdf = "Code", nature = "inline" },
+
+ lines = { pdf = "Code", nature = "display" },
+ line = { pdf = "Code", nature = "mixed" },
+
+ synonym = { pdf = "Span", nature = "inline" },
+ sorting = { pdf = "Span", nature = "inline" },
+
+ register = { pdf = "Div", nature = "display" },
+ registerlocation = { pdf = "Span", nature = "inline" },
+ registersection = { pdf = "Div", nature = "display" },
+ registertag = { pdf = "Span", nature = "mixed" },
+ registerentries = { pdf = "Div", nature = "display" },
+ registerentry = { pdf = "Div", nature = "display" },
+ registercontent = { pdf = "Span", nature = "mixed" },
+ registersee = { pdf = "Span", nature = "mixed" },
+ registerpages = { pdf = "Span", nature = "mixed" },
+ registerpage = { pdf = "Span", nature = "mixed" },
+ registerseparator = { pdf = "Span", nature = "inline" },
+ registerpagerange = { pdf = "Span", nature = "mixed" },
+
+ table = { pdf = "Table", nature = "display" },
+ tablerow = { pdf = "TR", nature = "display" },
+ tablecell = { pdf = "TD", nature = "mixed" },
+
+ tabulate = { pdf = "Table", nature = "display" },
+ tabulaterow = { pdf = "TR", nature = "display" },
+ tabulatecell = { pdf = "TD", nature = "mixed" },
+
+ list = { pdf = "TOC", nature = "display" },
+ listitem = { pdf = "TOCI", nature = "display" },
+ listtag = { pdf = "Lbl", nature = "mixed" },
+ listcontent = { pdf = "P", nature = "mixed" },
+ listdata = { pdf = "P", nature = "mixed" },
+ listpage = { pdf = "Reference", nature = "mixed" },
+
+ delimitedblock = { pdf = "BlockQuote", nature = "display" },
+ delimited = { pdf = "Quote", nature = "inline" },
+ subsentence = { pdf = "Span", nature = "inline" },
+
+ label = { pdf = "Span", nature = "mixed" },
+ number = { pdf = "Span", nature = "mixed" },
+
+ float = { pdf = "Div", nature = "display" }, -- Figure
+ floatcaption = { pdf = "Caption", nature = "mixed" },
+ floatlabel = { pdf = "Span", nature = "inline" },
+ floatnumber = { pdf = "Span", nature = "inline" },
+ floattext = { pdf = "Span", nature = "mixed" },
+ floatcontent = { pdf = "P", nature = "mixed" },
+
+ image = { pdf = "P", nature = "mixed" },
+ mpgraphic = { pdf = "P", nature = "mixed" },
+
+ formulaset = { pdf = "Div", nature = "display" },
+ formula = { pdf = "Div", nature = "display" }, -- Formula
+ formulacaption = { pdf = "Span", nature = "mixed" },
+ formulalabel = { pdf = "Span", nature = "mixed" },
+ formulanumber = { pdf = "Span", nature = "mixed" },
+ formulacontent = { pdf = "P", nature = "display" },
+ subformula = { pdf = "Div", nature = "display" },
+
+ link = { pdf = "Link", nature = "inline" },
+
+ margintextblock = { pdf = "Span", nature = "inline" },
+ margintext = { pdf = "Span", nature = "inline" },
+
+ math = { pdf = "Div", nature = "inline" }, -- no display
+ mn = { pdf = "Span", nature = "mixed" },
+ mi = { pdf = "Span", nature = "mixed" },
+ mo = { pdf = "Span", nature = "mixed" },
+ ms = { pdf = "Span", nature = "mixed" },
+ mrow = { pdf = "Span", nature = "display" },
+ msubsup = { pdf = "Span", nature = "display" },
+ msub = { pdf = "Span", nature = "display" },
+ msup = { pdf = "Span", nature = "display" },
+ merror = { pdf = "Span", nature = "mixed" },
+ munderover = { pdf = "Span", nature = "display" },
+ munder = { pdf = "Span", nature = "display" },
+ mover = { pdf = "Span", nature = "display" },
+ mtext = { pdf = "Span", nature = "mixed" },
+ mfrac = { pdf = "Span", nature = "display" },
+ mroot = { pdf = "Span", nature = "display" },
+ msqrt = { pdf = "Span", nature = "display" },
+ mfenced = { pdf = "Span", nature = "display" },
+ maction = { pdf = "Span", nature = "display" },
+
+ mstacker = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackertop = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackerbot = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackermid = { pdf = "Span", nature = "display" }, -- these are only internally used
+
+ mtable = { pdf = "Table", nature = "display" }, -- might change
+ mtr = { pdf = "TR", nature = "display" }, -- might change
+ mtd = { pdf = "TD", nature = "display" }, -- might change
+
+ ignore = { pdf = "Span", nature = "mixed" }, -- used internally
+ private = { pdf = "Span", nature = "mixed" }, -- for users (like LS) when they need it
+ metadata = { pdf = "Div", nature = "display" },
+ metavariable = { pdf = "Span", nature = "mixed" },
+
+ mid = { pdf = "Span", nature = "inline" },
+ sub = { pdf = "Span", nature = "inline" },
+ sup = { pdf = "Span", nature = "inline" },
+ subsup = { pdf = "Span", nature = "inline" },
+
+ combination = { pdf = "Span", nature = "display" },
+ combinationpair = { pdf = "Span", nature = "display" },
+ combinationcontent = { pdf = "Span", nature = "mixed" },
+ combinationcaption = { pdf = "Span", nature = "mixed" },
}
-function tags.detailedtag(tag,detail,attribute)
- if not attribute then
- attribute = texattribute[a_tagged]
- end
+tags.properties = properties
+
+local patterns = setmetatableindex(function(t,tag)
+ local v = topattern("^" .. tag .. ">")
+ t[tag] = v
+ return v
+end)
+
+function tags.locatedtag(tag)
+ local attribute = texattribute[a_tagged]
if attribute >= 0 then
- local tl = taglist[attribute]
- if tl then
- local pattern
- if detail and detail ~= "" then
- pattern = "^" .. tag .. ":".. detail .. "%-"
- else
- pattern = "^" .. tag .. "%-"
- end
- for i=#tl,1,-1 do
- local tli = tl[i]
- if find(tli,pattern) then
- return tli
+ local specification = taglist[attribute]
+ if specification then
+ local taglist = specification.taglist
+ local pattern = patterns[tag]
+ for i=#taglist,1,-1 do
+ local t = taglist[i]
+ if find(t,pattern) then
+ return t
end
end
end
@@ -198,12 +228,20 @@ function tags.detailedtag(tag,detail,attribute)
return false -- handy as bogus index
end
-tags.properties = properties
-
-local lasttags = { }
-local userdata = { }
-
-tags.userdata = userdata
+function structures.atlocation(str)
+ local specification = taglist[texattribute[a_tagged]]
+ if specification then
+ if list then
+ local taglist = specification.taglist
+ local pattern = patterns[str]
+ for i=#list,1,-1 do
+ if find(list[i],pattern) then
+ return true
+ end
+ end
+ end
+ end
+end
function tags.setproperty(tag,key,value)
local p = properties[tag]
@@ -214,15 +252,18 @@ function tags.setproperty(tag,key,value)
end
end
-function tags.registerdata(data)
- local fulltag = chain[nstack]
- if fulltag then
- tagdata[fulltag] = data
+function tags.setaspect(key,value)
+ local tag = chain[stacksize]
+ if tag then
+ local p = properties[tag]
+ if p then
+ p[key] = value
+ else
+ properties[tag] = { [key] = value }
+ end
end
end
-local metadata
-
function tags.registermetadata(data)
local d = settings_to_hash(data)
if metadata then
@@ -232,75 +273,92 @@ function tags.registermetadata(data)
end
end
-local nstack = 0
-
function tags.start(tag,specification)
- local label, detail, user
- if specification then
- label, detail, user = specification.label, specification.detail, specification.userdata
- end
if not enabled then
codeinjections.enabletags()
enabled = true
end
--
---~ labels[tag] = label ~= "" and label or tag
---~ local fulltag
---~ if detail and detail ~= "" then
---~ fulltag = tag .. ":" .. detail
---~ else
---~ fulltag = tag
---~ end
+ labels[tag] = tag -- can go away
--
- local fulltag = label ~= "" and label or tag
- labels[tag] = fulltag
- if detail and detail ~= "" then
- fulltag = fulltag .. ":" .. detail
- end
+ local attribute = #taglist + 1
+ local tagindex = (ids[tag] or 0) + 1
--
- local t = #taglist + 1
- local n = (ids[fulltag] or 0) + 1
- ids[fulltag] = n
- lasttags[tag] = n
- local completetag = fulltag .. "-" .. n
- nstack = nstack + 1
- chain[nstack] = completetag
- stack[nstack] = t
- -- a copy as we can add key values for alt and actualtext if needed:
- taglist[t] = { unpack(chain,1,nstack) }
+ local completetag = tag .. ">" .. tagindex
--
- if user and user ~= "" then
- -- maybe we should merge this into taglist or whatever ... anyway there is room to optimize
- -- taglist.userdata = settings_to_hash(user)
- userdata[completetag] = settings_to_hash(user)
- end
- if metadata then
- tagmetadata[completetag] = metadata
+ ids[tag] = tagindex
+ lasttags[tag] = tagindex
+ stacksize = stacksize + 1
+ --
+ chain[stacksize] = completetag
+ stack[stacksize] = attribute
+ tagcontext[tag] = completetag
+ --
+ local tagnesting = { unpack(chain,1,stacksize) } -- a copy so we can add actualtext
+ --
+ if specification then
+ specification.attribute = attribute
+ specification.tagindex = tagindex
+ specification.taglist = tagnesting
+ specification.tagname = tag
+ if metadata then
+ specification.metadata = metadata
+ metadata = nil
+ end
+ local userdata = specification.userdata
+ if user ~= "" and type(userdata) == "string" then
+ specification.userdata = settings_to_hash(userdata)
+ end
+ local detail = specification.detail
+ if detail == "" then
+ specification.detail = nil
+ end
+ local parents = specification.parents
+ if parents == "" then
+ specification.parents = nil
+ end
+ else
+ specification = {
+ attribute = attribute,
+ tagindex = tagindex,
+ taglist = tagnesting,
+ tagname = tag,
+ metadata = metadata,
+ }
metadata = nil
end
- texattribute[a_tagged] = t
- return t
+ --
+ taglist[attribute] = specification
+ specifications[completetag] = specification
+ --
+ texattribute[a_tagged] = attribute
+ return attribute
end
-function tags.restart(completetag)
- local t = #taglist + 1
- nstack = nstack + 1
- chain[nstack] = completetag
- stack[nstack] = t
- taglist[t] = { unpack(chain,1,nstack) }
- texattribute[a_tagged] = t
- return t
+function tags.restart(attribute)
+ stacksize = stacksize + 1
+ if type(attribute) == "number" then
+ local taglist = taglist[attribute].taglist
+ chain[stacksize] = taglist[#taglist]
+ else
+ chain[stacksize] = attribute -- a string
+ attribute = #taglist + 1
+ taglist[attribute] = { taglist = { unpack(chain,1,stacksize) } }
+ end
+ stack[stacksize] = attribute
+ texattribute[a_tagged] = attribute
+ return attribute
end
function tags.stop()
- if nstack > 0 then
- nstack = nstack -1
+ if stacksize > 0 then
+ stacksize = stacksize - 1
end
- local t = stack[nstack]
+ local t = stack[stacksize]
if not t then
- if trace_tags then
- report_tags("ignoring end tag, previous chain: %s",nstack > 0 and concat(chain[nstack],"",1,nstack) or "none")
- end
+ -- if trace_tags then
+ report_tags("ignoring end tag, previous chain: %s",stacksize > 0 and concat(chain," ",1,stacksize) or "none")
+ -- end
t = unsetvalue
end
texattribute[a_tagged] = t
@@ -308,24 +366,56 @@ function tags.stop()
end
function tags.getid(tag,detail)
- if detail and detail ~= "" then
- return ids[tag .. ":" .. detail] or "?"
- else
- return ids[tag] or "?"
- end
+ return ids[tag] or "?"
end
function tags.last(tag)
return lasttags[tag] -- or false
end
-function tags.lastinchain()
- return chain[nstack]
+function tags.lastinchain(tag)
+ if tag and tag ~= "" then
+ return tagcontext[tag]
+ else
+ return chain[stacksize]
+ end
end
-function structures.atlocation(str)
- local location = gsub(concat(taglist[texattribute[a_tagged]],"-"),"%-%d+","")
- return find(location,topattern(str)) ~= nil
+local strip = C((1-S(">"))^1)
+
+function tags.elementtag()
+ local fulltag = chain[stacksize]
+ if fulltag then
+ return lpegmatch(strip,fulltag)
+ end
+end
+
+function tags.strip(fulltag)
+ return lpegmatch(strip,fulltag)
+end
+
+function tags.setuserproperties(tag,list)
+ if not list or list == "" then
+ tag, list = chain[stacksize], tag
+ else
+ tag = tagcontext[tag]
+ end
+ if tag then -- an attribute now
+ local l = settings_to_hash(list)
+ local s = specifications[tag]
+ if s then
+ local u = s.userdata
+ if u then
+ for k, v in next, l do
+ u[k] = v
+ end
+ else
+ s.userdata = l
+ end
+ else
+ -- error
+ end
+ end
end
function tags.handler(head) -- we need a dummy
@@ -334,8 +424,8 @@ end
statistics.register("structure elements", function()
if enabled then
- if nstack > 0 then
- return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,nstack))
+ if stacksize > 0 then
+ return format("%s element chains identified, open chain: %s ",#taglist,concat(chain," => ",1,stacksize))
else
return format("%s element chains identified",#taglist)
end
@@ -349,6 +439,65 @@ directives.register("backend.addtags", function(v)
end
end)
-commands.starttag = tags.start
-commands.stoptag = tags.stop
-commands.settagproperty = tags.setproperty
+-- interface
+
+local starttag = tags.start
+
+implement {
+ name = "starttag",
+ actions = starttag,
+ arguments = { "string" }
+}
+
+implement {
+ name = "stoptag",
+ actions = tags.stop,
+}
+
+implement {
+ name = "starttag_u",
+ scope = "private",
+ actions = function(tag,userdata) starttag(tag,{ userdata = userdata }) end,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "starttag_d",
+ scope = "private",
+ actions = function(tag,detail) starttag(tag,{ detail = detail }) end,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "starttag_c",
+ scope = "private",
+ actions = function(tag,detail,parents) starttag(tag,{ detail = detail, parents = parents }) end,
+ arguments = { "string", "string", "string" }
+}
+
+implement { name = "settagaspect", actions = tags.setaspect, arguments = { "string", "string" } }
+
+implement { name = "settagproperty", actions = tags.setproperty, arguments = { "string", "string", "string" } }
+implement { name = "settagproperty_b", actions = tags.setproperty, arguments = { "string", "'backend'", "string" }, scope = "private" }
+implement { name = "settagproperty_n", actions = tags.setproperty, arguments = { "string", "'nature'", "string" }, scope = "private" }
+
+implement { name = "getelementtag", actions = { tags.elementtag, context } }
+
+implement {
+ name = "setelementuserproperties",
+ scope = "private",
+ actions = tags.setuserproperties,
+ arguments = { "string", "string" }
+}
+
+implement {
+ name = "doifelseinelement",
+ actions = { structures.atlocation, commands.testcase },
+ arguments = "string",
+}
+
+implement {
+ name = "settaggedmetadata",
+ actions = structures.tags.registermetadata,
+ arguments = "string"
+}
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 6e792fd3f..f2b59c29c 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -11,6 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% labels: no language needed
% key/values and other names might change (and probably will)
\writestatus{loading}{ConTeXt Structure Macros / Tags}
@@ -22,123 +23,139 @@
%D Eventually these labels will either move to the modules
%D where they're used, or they will en dup in mult-tag.
-\def\t!document {document} % Div
-
-\def\t!division {division} % Div
-\def\t!paragraph {paragraph} % P
-\def\t!p {p} % P
-\def\t!construct {construct} % Span
-\def\t!highlight {highlight} % Span
-
-\def\t!section {section} % Sect
-\def\t!sectiontitle {sectiontitle} % H
-\def\t!sectionnumber {sectionnumber} % H
-\def\t!sectioncontent {sectioncontent} % Div
-
-\def\t!itemgroup {itemgroup} % L
-\def\t!item {item} % Li
-\def\t!itemtag {itemtag} % Lbl
-\def\t!itemcontent {itemcontent} % LBody
-
-\def\t!description {description} % Li
-\def\t!descriptiontag {descriptiontag} % Lbl
-\def\t!descriptioncontent {descriptioncontent} % LBody
-\def\t!descriptionsymbol {descriptionsymbol} % Span
-
-\let\t!construction \t!description
-\let\t!constructiontag \t!descriptiontag
-\let\t!constructioncontent\t!descriptioncontent
-\let\t!constructionsymbol \t!descriptionsymbol
-
-\def\t!verbatimblock {verbatimblock} % Code
-\def\t!verbatimlines {verbatimlines} % Code
-\def\t!verbatimline {verbatimline} % Code
-\def\t!verbatim {verbatim} % Code
-
-\def\t!lines {lines} % Code
-\def\t!line {line} % Code
-
-\def\t!sorting {sorting} % Span
-\def\t!synonym {synonym} % Span
-
-\def\t!register {register} % Div
-\def\t!registersection {registersection} % Div
-\def\t!registertag {registertag} % Span
-\def\t!registerentries {registerentries} % Div
-\def\t!registerentry {registerentry} % Span
-\def\t!registersee {registersee} % Span
-\def\t!registerpages {registerpages} % Span
-\def\t!registerpage {registerpage} % Span
-\def\t!registerpagerange {registerpagerange} % Span
-
-\def\t!table {table} % Table
-\def\t!tablerow {tablerow} % TR
-\def\t!tablecell {tablecell} % TD
-\def\t!tabulate {tabulate} % Table
-\def\t!tabulaterow {tabulaterow} % TR
-\def\t!tabulatecell {tabulatecell} % TD
-
-\def\t!math {math} % math
-\def\t!mathtable {mtable} % Table
-\def\t!mathtablerow {mtr} % TR
-\def\t!mathtablecell {mtd} % TD
-\def\t!mathaction {maction} %
-
-\def\t!list {list} % TOC
-\def\t!listitem {listitem} % TOCI
-\def\t!listtag {listtag} % Lbl
-\def\t!listcontent {listcontent} % P
-\def\t!listdata {listdata} % P
-\def\t!listpage {listpage} % Reference
-
-\def\t!delimitedblock {delimited} % BlockQuote
-\def\t!delimited {delimited} % Quote
-\def\t!subsentence {subsentence} % Span
-
-\def\t!float {float} % Div
-\def\t!floatcaption {floatcaption} % Caption
-\def\t!floatlabel {floatlabel} % Span
-\def\t!floattext {floattext} % Span
-\def\t!floatnumber {floatnumber} % Span
-\def\t!floatcontent {floatcontent} % P
-
-\def\t!image {image} % P
-
-\def\t!mpgraphic {mpgraphic} % P
-
-\def\t!formulaset {formulaset} % Div
-\def\t!formula {formula} % Div
-\def\t!formulacaption {formulacaption} % Span
-\def\t!formulalabel {formulalabel} % Span
-\def\t!formulanumber {formulanumber} % P
-\def\t!formulacontent {formulacontent} % P
-\def\t!subformula {subformula} % Div
-
-\def\t!link {link} % Link
-
-\def\t!margintext {margintext} % Span
-\def\t!margintextblock {margintextblock} % Div
+\def\t!document {document} % Div
+
+\def\t!division {division} % Div
+\def\t!paragraph {paragraph} % P
+\def\t!p {p} % P
+\def\t!construct {construct} % Span
+\def\t!highlight {highlight} % Span
+
+\def\t!section {section} % Sect
+\def\t!sectiontitle {sectiontitle} % H
+\def\t!sectionnumber {sectionnumber} % H
+\def\t!sectioncontent {sectioncontent} % Div
+
+\def\t!itemgroup {itemgroup} % L
+\def\t!item {item} % Li
+\def\t!itemtag {itemtag} % Lbl
+\def\t!itemcontent {itemcontent} % LBody
+\def\t!itemhead {itemhead} % Div
+\def\t!itembody {itembody} % Div
+
+\def\t!description {description} % Li
+\def\t!descriptiontag {descriptiontag} % Lbl
+\def\t!descriptioncontent {descriptioncontent} % LBody
+\def\t!descriptionsymbol {descriptionsymbol} % Span
+
+\let\t!construction \t!description
+\let\t!constructiontag \t!descriptiontag
+\let\t!constructioncontent \t!descriptioncontent
+\let\t!constructionsymbol \t!descriptionsymbol
+
+\def\t!verbatimblock {verbatimblock} % Code
+\def\t!verbatimlines {verbatimlines} % Code
+\def\t!verbatimline {verbatimline} % Code
+\def\t!verbatim {verbatim} % Code
+
+\def\t!lines {lines} % Code
+\def\t!line {line} % Code
+
+\def\t!sorting {sorting} % Span
+\def\t!synonym {synonym} % Span
+
+\def\t!register {register} % Div
+\def\t!registerlocation {registerlocation} % Span
+\def\t!registersection {registersection} % Div
+\def\t!registertag {registertag} % Span
+\def\t!registerentries {registerentries} % Div
+\def\t!registerentry {registerentry} % Span
+\def\t!registercontent {registercontent} % Span
+\def\t!registersee {registersee} % Span
+\def\t!registerpages {registerpages} % Span
+\def\t!registerpage {registerpage} % Span
+\def\t!registerpagerange {registerpagerange} % Span
+\def\t!registerfrompage {registerfrompage} % Span
+\def\t!registertopage {registertopage} % Span
+\def\t!registerseparator {registerseparator} % Span
+
+\def\t!table {table} % Table
+\def\t!tablerow {tablerow} % TR
+\def\t!tablecell {tablecell} % TD
+\def\t!tabulate {tabulate} % Table
+\def\t!tabulaterow {tabulaterow} % TR
+\def\t!tabulatecell {tabulatecell} % TD
+
+\def\t!math {math} % math
+\def\t!mathtable {mtable} % Table
+\def\t!mathtablerow {mtr} % TR
+\def\t!mathtablecell {mtd} % TD
+\def\t!mathaction {maction} %
+\def\t!mathstacker {mstacker}
+\def\t!mathstackertop {mstackertop}
+\def\t!mathstackermid {mstackermid}
+\def\t!mathstackerbot {mstackerbot}
+
+\def\t!munderover {munderover} % special cases
+\def\t!munder {munder} % special cases
+\def\t!mover {mover} % special cases
+
+\def\t!list {list} % TOC
+\def\t!listitem {listitem} % TOCI
+\def\t!listtag {listtag} % Lbl
+\def\t!listcontent {listcontent} % P
+\def\t!listdata {listdata} % P
+\def\t!listpage {listpage} % Reference
+
+\def\t!delimitedblock {delimited} % BlockQuote
+\def\t!delimited {delimited} % Quote
+\def\t!subsentence {subsentence} % Span
+
+\def\t!float {float} % Div
+\def\t!floatcaption {floatcaption} % Caption
+\def\t!floatlabel {floatlabel} % Span
+\def\t!floattext {floattext} % Span
+\def\t!floatnumber {floatnumber} % Span
+\def\t!floatcontent {floatcontent} % P
+
+\def\t!image {image} % P
+
+\def\t!mpgraphic {mpgraphic} % P
+
+\def\t!formulaset {formulaset} % Div
+\def\t!formula {formula} % Div
+\def\t!formulacaption {formulacaption} % Span
+\def\t!formulalabel {formulalabel} % Span
+\def\t!formulanumber {formulanumber} % P
+\def\t!formulacontent {formulacontent} % P
+\def\t!subformula {subformula} % Div
+
+\def\t!link {link} % Link
+
+\def\t!margintext {margintext} % Span
+\def\t!margintextblock {margintextblock} % Div
% we might opt for verbose variants so this is experimental:
-\def\t!label {label} % Span
-\def\t!number {number} % Span
+\def\t!label {label} % Span
+\def\t!number {number} % Span
-\def\t!ignore {ignore} % Span
+\def\t!ignore {ignore} % Span
+\def\t!private {private} % Span
-\def\t!mid {mid} % Span
-\def\t!sub {sub} % Span
-\def\t!sup {sup} % Span
-\def\t!subsup {subsup} % Span
+\def\t!mid {mid} % Span
+\def\t!sub {sub} % Span
+\def\t!sup {sup} % Span
+\def\t!subsup {subsup} % Span
-\def\t!unit {unit} % Span
-\def\t!quantity {quantity} % Span
-\def\t!number {number} % Span
+\def\t!unit {unit} % Span
+\def\t!quantity {quantity} % Span
+\def\t!number {number} % Span
-\def\t!combination {combination} % Span
-\def\t!combinationpair {combinationpair} % Span
-\def\t!combinationcontent {combinationcontent} % Span
-\def\t!combinationcaption {combinationcaption} % Span
+\def\t!combination {combination} % Span
+\def\t!combinationpair {combinationpair} % Span
+\def\t!combinationcontent {combinationcontent} % Span
+\def\t!combinationcaption {combinationcaption} % Span
% \setuptaglabeltext
% [en]
@@ -152,8 +169,11 @@
\unexpanded\def\setelementbackendtag{\dodoubleargument\strc_tags_set_backend}
\unexpanded\def\setelementnature {\dodoubleargument\strc_tags_set_nature}
-\def\strc_tags_set_backend[#1][#2]{\ctxcommand{settagproperty("#1","backend","#2")}}
-\def\strc_tags_set_nature [#1][#2]{\ctxcommand{settagproperty("#1","nature", "#2")}}
+\def\strc_tags_set_backend[#1][#2]{\clf_settagproperty_b{#1}{#2}} % todo: ignore when no export
+\def\strc_tags_set_nature [#1][#2]{\clf_settagproperty_n{#1}{#2}} % todo: ignore when no export
+
+\unexpanded\def\strc_tags_set_aspect_nop#1#2{}
+\unexpanded\def\strc_tags_set_aspect_yes#1#2{\clf_settagaspect{#1}{#2}} % todo: ignore when no export / also \let
\installcorenamespace{tagging}
@@ -176,11 +196,14 @@
\expandafter\strc_tags_element_stop_yes
\fi}
+% it makes no sense to have labels ... maybe some day as a last 'replace' in the export
+% which might be more efficient then ... okay, we now cannot overload but who cares
+
\unexpanded\def\strc_tags_element_start_yes_indeed_yes[#1][#2]%
- {\ctxcommand{starttag("#1",{label="\dogetupsometaglabeltext{#1}",userdata=\!!bs#2\!!es})}}
+ {\clf_starttag_u{#1}{#2}}
\unexpanded\def\strc_tags_element_stop_yes
- {\ctxcommand{stoptag()}}
+ {\clf_stoptag}
\unexpanded\def\strc_tags_element_start_nop_indeed[#1][#2]%
{}
@@ -191,19 +214,35 @@
\unexpanded\def\strc_tags_enable_elements
{\setuplanguage[\s!default][\s!righthyphenchar="AD]% for the moment here
\let\startelement\strc_tags_element_start_yes
- \let\stopelement \strc_tags_element_stop_yes}
+ \let\stopelement \strc_tags_element_stop_yes
+ \let\dosettagproperty\strc_tags_set_aspect_yes}
\unexpanded\def\strc_tags_disable_elements
{\let\startelement\strc_tags_element_start_nop
- \let\stopelement \strc_tags_element_stop_nop}
+ \let\stopelement \strc_tags_element_stop_nop
+ \let\dosettagproperty\strc_tags_set_aspect_nop}
% beware: making these unexpanded spoils tables (noalign problem)
-\def\strc_tags_enabled_start
+\def\strc_tags_enabled_start_no_detail
+ {\iftrialtypesetting
+ \expandafter\strc_tags_start_nop_no_detail
+ \else
+ \expandafter\strc_tags_start_yes_no_detail
+ \fi}
+
+\def\strc_tags_enabled_start_detail
+ {\iftrialtypesetting
+ \expandafter\strc_tags_start_nop_detail
+ \else
+ \expandafter\strc_tags_start_yes_detail
+ \fi}
+
+\def\strc_tags_enabled_start_chained
{\iftrialtypesetting
- \expandafter\strc_tags_start_nop
+ \expandafter\strc_tags_start_nop_chained
\else
- \expandafter\strc_tags_start_yes
+ \expandafter\strc_tags_start_yes_chained
\fi}
\def\strc_tags_enabled_stop
@@ -213,25 +252,66 @@
\expandafter\strc_tags_stop_yes
\fi}
-\def\strc_tags_start_yes#1#2% we could have a fast labeltext resolver
- {\ctxcommand{starttag("#1",{label="\dogetupsometaglabeltext{#1}",detail="#2"})}}
+\def\strc_tags_start_yes_no_detail #1{\clf_starttag{#1}}
+\def\strc_tags_start_yes_detail #1#2{\clf_starttag_d{#1}{#2}}
+\def\strc_tags_start_yes_chained #1#2#3{\clf_starttag_c{#1}{#2}{\getcurrentparentchain#3{#2}}}
+\def\strc_tags_stop_yes {\clf_stoptag}
-\def\strc_tags_stop_yes
- {\ctxcommand{stoptag()}}
+\let\strc_tags_start_nop_no_detail\gobbleoneargument
+\let\strc_tags_start_nop_detail \gobbletwoarguments
+\let\strc_tags_start_nop_chained \gobblethreearguments
+\let\strc_tags_stop_nop \donothing
-\def\strc_tags_start_nop#1#2%
- {}
+% more efficient:
-\def\strc_tags_stop_nop
- {}
+% \dostarttagged % {tag} {detail}
+% \dostarttaggedchained % {tag} {detail} \??hash
+% \dostarttaggednodetail % {tag}
+
+% \unexpanded\def\strc_tags_enable
+% {\let\dostarttagged\strc_tags_start_yes
+% \let\dostoptagged \strc_tags_stop_yes}
\unexpanded\def\strc_tags_enable
- {\let\dostarttagged\strc_tags_start_yes
- \let\dostoptagged \strc_tags_stop_yes}
+ {\let\dostarttagged \strc_tags_enabled_start_detail
+ \let\dostarttaggednodetail\strc_tags_enabled_start_no_detail
+ \let\dostarttaggedchained \strc_tags_enabled_start_chained
+ \let\dostoptagged \strc_tags_enabled_stop}
\unexpanded\def\strc_tags_disable
- {\let\dostarttagged\strc_tags_start_nop
- \let\dostoptagged \strc_tags_stop_nop}
+ {\let\dostarttagged \strc_tags_start_nop_detail
+ \let\dostarttaggednodetail\strc_tags_start_nop_no_detail
+ \let\dostarttaggedchained \strc_tags_start_nop_chained
+ \let\dostoptagged \strc_tags_stop_nop}
+
+% for luigi (beware: fully expandable):
+
+\def\strc_tags_get_element_tag_yes{\clf_getelementtag}
+\let\strc_tags_get_element_tag_nop\donothing
+
+\unexpanded\def\strc_tags_setup_element_user_properties_yes
+ {\dodoubleempty\strc_tags_setup_element_user_properties_indeed}
+
+\unexpanded\def\strc_tags_setup_element_user_properties_nop
+ {\dodoubleempty\strc_tags_setup_element_user_properties_indeed_nop}
+
+\def\strc_tags_setup_element_user_properties_indeed
+ {\iftrialtypesetting
+ \expandafter\strc_tags_setup_element_user_properties_indeed_nop
+ \else
+ \expandafter\strc_tags_setup_element_user_properties_indeed_yes
+ \fi}
+
+\def\strc_tags_setup_element_user_properties_indeed_nop[#1][#2]{}
+\def\strc_tags_setup_element_user_properties_indeed_yes[#1][#2]{\clf_setelementuserproperties{#1}{#2}}
+
+\unexpanded\def\strc_tags_enable_properties
+ {\let\getelementtag \strc_tags_get_element_tag_yes
+ \let\setupelementuserproperties\strc_tags_setup_element_user_properties_yes}
+
+\unexpanded\def\strc_tags_disable_properties
+ {\let\getelementtag \strc_tags_get_element_tag_nop
+ \let\setupelementuserproperties\strc_tags_setup_element_user_properties_nop}
%D The triggers:
@@ -240,11 +320,13 @@
\appendtoks
\strc_tags_enable_elements
+ \strc_tags_enable_properties
\doifelse{\taggingparameter\c!method}\v!auto\strc_tags_enable\strc_tags_disable
\to \everyenableelements
\appendtoks
\strc_tags_disable_elements
+ \strc_tags_disable_properties
\strc_tags_disable
\to \everydisableelements
@@ -277,41 +359,41 @@
\installcorenamespace {paragraph}
\installcommandhandler \??paragraph {paragraph} \??paragraph
-\setupparagraph % someday maybe also strut (beg/end)
+\setupparagraph % someday maybe also strut (beg/end) and align
[\c!color=,
\c!style=]
\unexpanded\def\startparagraph
- {\dodoubleempty\paragraph_start}
+ {\dodoubleempty\paragraph_start}
\def\paragraph_start
- {\iffirstargument
- \ifsecondargument
- \doubleexpandafter\paragraph_start_two
- \else
- \doubleexpandafter\paragraph_start_one
- \fi
- \else
- \expandafter\paragraph_start_zero
- \fi}
+ {\endgraf % we end before the group
+ \begingroup
+ \iffirstargument
+ \ifsecondargument
+ \doubleexpandafter\paragraph_start_two
+ \else
+ \doubleexpandafter\paragraph_start_one
+ \fi
+ \else
+ \expandafter\paragraph_start_zero
+ \fi}
\def\paragraph_start_two[#1][#2]%
- {\endgraf % we end before the group
- \begingroup
- \let\stopparagraph\paragraph_stop_indeed
- \edef\currentparagraph{#1}%
- \setupcurrentparagraph[#2]%
- \paragraph_start_indeed}
+ {\edef\currentparagraph{#1}%
+ \setupcurrentparagraph[#2]%
+ \paragraph_start_indeed}
\def\paragraph_start_one[#1][#2]%
- {\endgraf % we end before the group
- \begingroup
- \let\stopparagraph\paragraph_stop_indeed
- \doifassignmentelse{#1}
- {\let\currentparagraph\empty
- \setupcurrentparagraph[#1]}
- {\edef\currentparagraph{#1}}%
- \paragraph_start_indeed}
+ {\doifelseassignment{#1}
+ {\let\currentparagraph\empty
+ \setupcurrentparagraph[#1]}
+ {\edef\currentparagraph{#1}}%
+ \paragraph_start_indeed}
+
+\def\paragraph_start_zero[#1][#2]%
+ {\let\currentparagraph\empty
+ \paragraph_start_indeed}
\def\paragraph_start_indeed
{\useparagraphstyleandcolor\c!style\c!color
@@ -319,27 +401,32 @@
\usesetupsparameter\paragraphparameter
\dostarttagged\t!paragraph\currentparagraph}
-\def\paragraph_start_zero[#1][#2]%
- {\let\currentparagraph\empty
- \paragraph_start_indeed}
-
-\unexpanded\def\paragraph_stop_indeed
- {\dostoptagged
- \endgraf % we end inside the group
- \endgroup}
-
\unexpanded\def\stopparagraph
- {\dostoptagged}
+ {\dostoptagged
+ \endgraf % we end inside the group
+ \endgroup}
\let\startpar\startparagraph
\let\stoppar \stopparagraph
+\def\strc_tags_document_start_indeed
+ {\glet\strc_tags_document_start_indeed\relax
+ \dostarttagged\t!document\empty}
+
+\def\strc_tags_document_stop_indeed
+ {\glet\strc_tags_document_stop_indeed\relax
+ \dostoptagged}
+
\appendtoks
- \dostarttagged\t!document\empty
-\to \everystarttext
+ \strc_tags_document_start_indeed % here because otherwise products don't get a root (starttext before env)
+\to \everyenableelements
+
+% \appendtoks
+% \strc_tags_document_start_indeed
+% \to \everystarttext
\appendtoks
- \dostoptagged
+ \strc_tags_document_stop_indeed
\to \everystoptext
\appendtoks
@@ -347,12 +434,14 @@
\strc_tags_disable
\to \everybeforepagebody
-% \doifinelementelse{structure:section} {yes} {no}
-% \doifinelementelse{structure:chapter} {yes} {no}
-% \doifinelementelse{division:*-structure:chapter} {yes} {no}
+% \doifelseinelement{structure:section} {yes} {no}
+% \doifelseinelement{structure:chapter} {yes} {no}
+% \doifelseinelement{division:*-structure:chapter} {yes} {no}
+
+\unexpanded\def\doifelseinelement#1%
+ {\clf_doifelseinelement{#1}}
-\unexpanded\def\doifinelementelse#1%
- {\ctxcommand{testcase(structures.atlocation("#1"))}}
+\let\doifinelementelse\doifelseinelement
\unexpanded\def\taggedlabeltexts#1#2#3% experimental: label, numberdetail, numbercontent
{\begingroup
@@ -398,7 +487,7 @@
%D \stoptyping
\unexpanded\def\settaggedmetadata[#1]%
- {\ctxlua{structures.tags.registermetadata(\!!bs#1\!!es)}}
+ {\clf_settaggedmetadata{#1}}
%D An overload:
diff --git a/tex/context/base/supp-box.lua b/tex/context/base/supp-box.lua
index 27078f46f..7cc71a891 100644
--- a/tex/context/base/supp-box.lua
+++ b/tex/context/base/supp-box.lua
@@ -8,13 +8,16 @@ if not modules then modules = { } end modules ['supp-box'] = {
-- this is preliminary code, use insert_before etc
+local lpegmatch = lpeg.match
+
local report_hyphenation = logs.reporter("languages","hyphenation")
local tex = tex
local context = context
-local commands = commands
local nodes = nodes
+local implement = interfaces.implement
+
local splitstring = string.split
local nodecodes = nodes.nodecodes
@@ -26,110 +29,217 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
-local new_penalty = nodes.pool.penalty
-local new_hlist = nodes.pool.hlist
-local new_glue = nodes.pool.glue
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
-local free_node = nodes.free
-local copy_list = nodes.copy_list
-local copy_node = nodes.copy
-local find_tail = nodes.tail
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getattribute = nuts.getattribute
+local getbox = nuts.getbox
+
+local setfield = nuts.setfield
+local setbox = nuts.setbox
+
+local free_node = nuts.free
+local flush_list = nuts.flush_list
+local copy_node = nuts.copy
+local copy_list = nuts.copy_list
+local find_tail = nuts.tail
+local traverse_id = nuts.traverse_id
+local link_nodes = nuts.linked
+
+local listtoutf = nodes.listtoutf
+
+local nodepool = nuts.pool
+local new_penalty = nodepool.penalty
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+
+local setlistcolor = nodes.tracers.colors.setlist
-local texsetbox = tex.setbox
-local texgetbox = tex.getbox
local texget = tex.get
+local texgetbox = tex.getbox
-local function hyphenatedlist(list)
- while list do
- local id, next, prev = list.id, list.next, list.prev
+local function hyphenatedlist(head,usecolor)
+ local current = head and tonut(head)
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ local prev = getprev(current)
if id == disc_code then
- local hyphen = list.pre
- if hyphen then
- local penalty = new_penalty(-500)
- hyphen.next, penalty.prev = penalty, hyphen
- prev.next, next.prev = hyphen, penalty
- penalty.next, hyphen.prev = next, prev
- list.pre = nil
- free_node(list)
+ local pre = getfield(current,"pre")
+ local post = getfield(current,"post")
+ local replace = getfield(current,"replace")
+ if pre then
+ setfield(current,"pre",nil)
+ end
+ if post then
+ setfield(current,"post",nil)
end
+ if not usecolor then
+ -- nothing fancy done
+ elseif pre and post then
+ setlistcolor(pre,"darkmagenta")
+ setlistcolor(post,"darkcyan")
+ elseif pre then
+ setlistcolor(pre,"darkyellow")
+ elseif post then
+ setlistcolor(post,"darkyellow")
+ end
+ if replace then
+ flush_list(replace)
+ setfield(current,"replace",nil)
+ end
+ -- setfield(current,"replace",new_rule(65536)) -- new_kern(65536*2))
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local list = link_nodes (
+ pre and new_penalty(10000),
+ pre,
+ current,
+ post,
+ post and new_penalty(10000)
+ )
+ local tail = find_tail(list)
+ if prev then
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
+ end
+ if next then
+ setfield(tail,"next",next)
+ setfield(next,"prev",tail)
+ end
+ -- free_node(current)
elseif id == vlist_code or id == hlist_code then
- hyphenatedlist(list.list)
+ hyphenatedlist(getlist(current))
end
- list = next
+ current = next
end
end
-commands.hyphenatedlist = hyphenatedlist
+implement {
+ name = "hyphenatedlist",
+ arguments = { "integer", "boolean" },
+ actions = function(n,color)
+ local b = texgetbox(n)
+ if b then
+ hyphenatedlist(b.list,color)
+ end
+ end
+}
-function commands.showhyphenatedinlist(list)
- report_hyphenation("show: %s",nodes.listtoutf(list,false,true))
-end
+-- local function hyphenatedhack(head,pre)
+-- pre = tonut(pre)
+-- for n in traverse_id(disc_code,tonut(head)) do
+-- local hyphen = getfield(n,"pre")
+-- if hyphen then
+-- flush_list(hyphen)
+-- end
+-- setfield(n,"pre",copy_list(pre))
+-- end
+-- end
+--
+-- commands.hyphenatedhack = hyphenatedhack
local function checkedlist(list)
if type(list) == "number" then
- return texgetbox(list).list
+ return getlist(getbox(tonut(list)))
else
- return list
+ return tonut(list)
end
end
-local function applytochars(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+implement {
+ name = "showhyphenatedinlist",
+ arguments = "integer",
+ actions = function(box)
+ report_hyphenation("show: %s",listtoutf(checkedlist(n),false,true))
+ end
+}
+
+local function applytochars(current,doaction,noaction,nested)
while current do
- local id = current.id
+ local id = getid(current)
if nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytochars(current.list,what,nested)
+ applytochars(getlist(current),doaction,noaction,nested)
context.endhbox()
elseif id ~= glyph_code then
- noaction(copy_node(current))
+ noaction(tonode(copy_node(current)))
else
- doaction(copy_node(current))
+ doaction(tonode(copy_node(current)))
end
- current = current.next
+ current = getnext(current)
end
end
-local function applytowords(list,what,nested)
- local doaction = context[what or "ruledhbox"]
- local noaction = context
- local current = checkedlist(list)
+local function applytowords(current,doaction,noaction,nested)
local start
while current do
- local id = current.id
+ local id = getid(current)
if id == glue_code then
if start then
- doaction(copy_list(start,current))
+ doaction(tonode(copy_list(start,current)))
start = nil
end
- noaction(copy_node(current))
+ noaction(tonode(copy_node(current)))
elseif nested and (id == hlist_code or id == vlist_code) then
context.beginhbox()
- applytowords(current.list,what,nested)
+ applytowords(getlist(current),doaction,noaction,nested)
context.egroup()
elseif not start then
start = current
end
- current = current.next
+ current = getnext(current)
end
if start then
- doaction(copy_list(start))
+ doaction(tonode(copy_list(start)))
end
end
-commands.applytochars = applytochars
-commands.applytowords = applytowords
+local methods = {
+ char = applytochars,
+ characters = applytochars,
+ word = applytowords,
+ words = applytowords,
+}
+
+implement {
+ name = "applytobox",
+ arguments = {
+ {
+ { "box", "integer" },
+ { "command" },
+ { "method" },
+ { "nested", "boolean" },
+ }
+ },
+ actions = function(specification)
+ local list = checkedlist(specification.box)
+ local action = methods[specification.method or "char"]
+ if list and action then
+ action(list,context[specification.command or "ruledhbox"],context,specification.nested)
+ end
+ end
+}
local split_char = lpeg.Ct(lpeg.C(1)^0)
local split_word = lpeg.tsplitat(lpeg.patterns.space)
local split_line = lpeg.tsplitat(lpeg.patterns.eol)
-function commands.processsplit(str,command,how,spaced)
- how = how or "word"
- if how == "char" then
- local words = lpeg.match(split_char,str)
+local function processsplit(specification)
+ local str = specification.data or ""
+ local command = specification.command or "ruledhbox"
+ local method = specification.method or "word"
+ local spaced = specification.spaced
+ if method == "char" or method == "character" then
+ local words = lpegmatch(split_char,str)
for i=1,#words do
local word = words[i]
if word == " " then
@@ -142,8 +252,8 @@ function commands.processsplit(str,command,how,spaced)
context(word)
end
end
- elseif how == "word" then
- local words = lpeg.match(split_word,str)
+ elseif method == "word" then
+ local words = lpegmatch(split_word,str)
for i=1,#words do
local word = words[i]
if spaced and i > 1 then
@@ -155,8 +265,8 @@ function commands.processsplit(str,command,how,spaced)
context(word)
end
end
- elseif how == "line" then
- local words = lpeg.match(split_line,str)
+ elseif method == "line" then
+ local words = lpegmatch(split_line,str)
for i=1,#words do
local word = words[i]
if spaced and i > 1 then
@@ -173,63 +283,88 @@ function commands.processsplit(str,command,how,spaced)
end
end
+implement {
+ name = "processsplit",
+ actions = processsplit,
+ arguments = {
+ {
+ { "data" },
+ { "command" },
+ { "method" },
+ { "spaced", "boolean" },
+ }
+ }
+}
+
local a_vboxtohboxseparator = attributes.private("vboxtohboxseparator")
-function commands.vboxlisttohbox(original,target,inbetween)
- local current = texgetbox(original).list
- local head = nil
- local tail = nil
- while current do
- local id = current.id
- local next = current.next
- if id == hlist_code then
- local list = current.list
- if head then
- if inbetween > 0 then
- local n = new_glue(0,0,inbetween)
- tail.next = n
- n.prev = tail
- tail = n
- end
- tail.next = list
- list.prev = tail
- else
- head = list
- end
- tail = find_tail(list)
- -- remove last separator
- if tail.id == hlist_code and tail[a_vboxtohboxseparator] == 1 then
- local temp = tail
- local prev = tail.prev
- if next then
- local list = tail.list
- prev.next = list
- list.prev = prev
- tail.list = nil
- tail = find_tail(list)
+implement {
+ name = "vboxlisttohbox",
+ arguments = { "integer", "integer", "dimen" },
+ actions = function(original,target,inbetween)
+ local current = getlist(getbox(original))
+ local head = nil
+ local tail = nil
+ while current do
+ local id = getid(current)
+ local next = getnext(current)
+ if id == hlist_code then
+ local list = getlist(current)
+ if head then
+ if inbetween > 0 then
+ local n = new_glue(0,0,inbetween)
+ setfield(tail,"next",n)
+ setfield(n,"prev",tail)
+ tail = n
+ end
+ setfield(tail,"next",list)
+ setfield(list,"prev",tail)
else
- tail = prev
+ head = list
end
- free_node(temp)
+ tail = find_tail(list)
+ -- remove last separator
+ if getid(tail) == hlist_code and getattribute(tail,a_vboxtohboxseparator) == 1 then
+ local temp = tail
+ local prev = getprev(tail)
+ if next then
+ local list = getlist(tail)
+ setfield(prev,"next",list)
+ setfield(list,"prev",prev)
+ setfield(tail,"list",nil)
+ tail = find_tail(list)
+ else
+ tail = prev
+ end
+ free_node(temp)
+ end
+ -- done
+ setfield(tail,"next",nil)
+ setfield(current,"list",nil)
end
- -- done
- tail.next = nil
- current.list = nil
+ current = next
end
- current = next
+ local result = new_hlist()
+ setfield(result,"list",head)
+ setbox(target,result)
end
- local result = new_hlist()
- result.list = head
- texsetbox(target,result)
-end
+}
-function commands.hboxtovbox(original)
- local b = texgetbox(original)
- local factor = texget("baselineskip").width / texget("hsize")
- b.depth = 0
- b.height = b.width * factor
-end
+implement {
+ name = "hboxtovbox",
+ arguments = "integer",
+ actions = function(n)
+ local b = getbox(n)
+ local factor = texget("baselineskip").width / texget("hsize")
+ setfield(b,"depth",0)
+ setfield(b,"height",getfield(b,"width") * factor)
+ end
+}
-function commands.boxtostring(n)
- context.puretext(nodes.toutf(tex.box[n].list)) -- helper is defined later
-end
+implement {
+ name = "boxtostring",
+ arguments = "integer",
+ actions = function(n)
+ context.puretext(nodes.toutf(texgetbox(n).list)) -- helper is defined later
+ end
+}
diff --git a/tex/context/base/supp-box.mkiv b/tex/context/base/supp-box.mkiv
index 66f373b72..54b0e2860 100644
--- a/tex/context/base/supp-box.mkiv
+++ b/tex/context/base/supp-box.mkiv
@@ -570,7 +570,7 @@
%D \doiftext {data} {then branch}
%D \stoptyping
-\unexpanded\def\doiftextelse#1%
+\unexpanded\def\doifelsetext#1%
{\begingroup
\setbox\scratchbox\hbox
{\settrialtypesetting
@@ -581,6 +581,8 @@
\endgroup\expandafter\firstoftwoarguments
\fi}
+\let\doiftextelse\doifelsetext
+
\unexpanded\def\doiftext#1%
{\begingroup
\setbox\scratchbox\hbox
@@ -658,8 +660,8 @@
%D A slower but more versatile implementation is:
%D
%D \starttyping
-%D \long\def\dowithnextbox#1#2%
-%D {\long\def\syst_boxes_with_next_box{#1}%
+%D \unexpanded\def\dowithnextbox#1#2%
+%D {\def\syst_boxes_with_next_box{#1}%
%D \ifx#2\hbox
%D \afterassignment\syst_boxes_with_next_box_indeed
%D \else\ifx#2\vbox
@@ -1063,10 +1065,11 @@
%D \showhyphens{dohyphenatedword}
%D \stoptyping
-\def\doshowhyphenatednextbox
- {\ctxcommand{showhyphenatedinlist(tex.box[\number\nextbox].list)}}
+\unexpanded\def\doshowhyphenatednextbox
+ {\clf_showhyphenatedinlist\nextbox}
-\unexpanded\def\showhyphens{\dowithnextboxcs\doshowhyphenatednextbox\hbox}
+\unexpanded\def\showhyphens
+ {\dowithnextboxcs\doshowhyphenatednextbox\hbox}
%D The following macros are seldom used but handy for tracing.
%D
@@ -1076,14 +1079,20 @@
%D \hyphenatedfile{tufte}
%D \stoptyping
-\def\dohyphenatednextbox
- {\ctxcommand{hyphenatedlist(tex.box[\number\nextbox].list)}%
+\unexpanded\def\dohyphenatednextbox
+ {\clf_hyphenatedlist\nextbox false\relax
\unhbox\nextbox}
\unexpanded\def\hyphenatedword {\dowithnextboxcs\dohyphenatednextbox\hbox}
\unexpanded\def\hyphenatedpar {\dowithnextboxcs\dohyphenatednextbox\hbox}
\unexpanded\def\hyphenatedfile#1{\dowithnextboxcs\dohyphenatednextbox\hbox{\readfile{#1}\donothing\donothing}}
+\unexpanded\def\dohyphenatednextboxcolor
+ {\clf_hyphenatedlist\nextbox true\relax
+ \unhbox\nextbox}
+
+\unexpanded\def\hyphenatedcoloredword{\dowithnextboxcs\dohyphenatednextboxcolor\hbox}
+
%D \macros
%D {processtokens}
%D
@@ -1363,7 +1372,12 @@
\begingroup
\setbox\scratchbox\hbox{\settrialtypesetting#2{\savecurrentattributes{pic}}}%
\setbox\scratchbox\hbox{\restorecurrentattributes{pic}#1}%
- \ctxcommand{applytochars(\number\scratchbox,"\strippedcsname#2",true)}%
+ \clf_applytobox
+ method {char}%
+ box \scratchbox
+ command {\checkedstrippedcsname#2}%
+ nested true%
+ \relax
\endgroup}
\unexpanded\def\processisolatedwords#1#2%
@@ -1371,34 +1385,88 @@
\begingroup
\setbox\scratchbox\hbox{\settrialtypesetting#2{\savecurrentattributes{pic}}}%
\setbox\scratchbox\hbox{\restorecurrentattributes{pic}#1}%
- \ctxcommand{applytowords(\number\scratchbox,"\strippedcsname#2",true)}%
+ \clf_applytobox
+ method {word}%
+ box \scratchbox
+ command {\checkedstrippedcsname#2}%
+ nested true%
+ \relax
\endgroup}
-\unexpanded\def\processwords#1%
- {\processisolatedwords{#1}\processword}
-
-\let\processword\relax
-
-\unexpanded\def\applytosplitstringchar#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char")}}
-\unexpanded\def\applytosplitstringword#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word")}}
-\unexpanded\def\applytosplitstringline#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line")}}
-
-\unexpanded\def\applytosplitstringcharspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","char",true)}}
-\unexpanded\def\applytosplitstringwordspaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","word",true)}}
-\unexpanded\def\applytosplitstringlinespaced#1#2{\dontleavehmode\ctxcommand{processsplit(\!!bs#2\!!es,"\strippedcsname#1","line",true)}}
-
%D A variant:
\unexpanded\def\applytocharacters#1%
{\dontleavehmode
- \dowithnextbox{\ctxcommand{applytochars(\number\nextbox,"\strippedcsname#1",true)}}%
+ \dowithnextbox{\clf_applytobox
+ method {char}%
+ box \nextbox
+ command {\checkedstrippedcsname#1}%
+ nested true%
+ \relax}%
\hbox}
\unexpanded\def\applytowords#1%
{\dontleavehmode
- \dowithnextbox{\ctxcommand{applytowords(\number\nextbox,"\strippedcsname#1",true)}}%
+ \dowithnextbox{\clf_applytobox
+ method {word}%
+ box \nextbox
+ command {\checkedstrippedcsname#1}%
+ nested true%
+ \relax}%
\hbox}
+%D The old call:
+
+\unexpanded\def\processwords#1%
+ {\processisolatedwords{#1}\processword}
+
+\let\processword\relax
+
+\unexpanded\def\applytosplitstringchar#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {char}%
+ \relax}
+
+\unexpanded\def\applytosplitstringword#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {word}%
+ \relax}
+
+\unexpanded\def\applytosplitstringline#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {line}%
+ \relax}
+
+\unexpanded\def\applytosplitstringcharspaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {char}%
+ spaced true%
+ \relax}
+
+\unexpanded\def\applytosplitstringwordspaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {word}%
+ spaced true%
+ \relax}
+
+\unexpanded\def\applytosplitstringlinespaced#1#2%
+ {\dontleavehmode\clf_processsplit
+ data {#2}%
+ command {\checkedstrippedcsname#1}%
+ method {line}%
+ spaced true%
+ \relax}
+
%D \macros
%D {sbox}
%D
@@ -1795,7 +1863,7 @@
\box\d_syst_boxes_separator
\fi
\egroup
- \ctxcommand{hboxtovbox(\number\scratchbox)}%
+ \clf_hboxtovbox\scratchbox
\box\scratchbox
\endgroup}
@@ -1849,7 +1917,7 @@
\unexpanded\def\stophboxestohbox
{\egroup
- \ctxcommand{vboxlisttohbox(\number\scratchbox,\number\nextbox,\number\dimexpr\hboxestohboxslack)}%
+ \clf_vboxlisttohbox\scratchbox\nextbox\dimexpr\hboxestohboxslack\relax
\dontleavehmode
\unhbox\nextbox
\removeunwantedspaces
@@ -2024,7 +2092,7 @@
{\bgroup
\def\syst_boxes_process_indeed{#1}% #1 can be redefined halfway
\setbox\processbox\emptybox
- \doifnextbgroupelse\syst_boxes_process_yes\syst_boxes_process_nop}
+ \doifelsenextbgroup\syst_boxes_process_yes\syst_boxes_process_nop}
\def\syst_boxes_process_yes
{\dowithnextboxcs\syst_boxes_process_content\hbox}
@@ -2032,7 +2100,7 @@
\def\syst_boxes_process_content
{\removeunwantedspaces
\syst_boxes_process_indeed % takes \nextbox makes \processbox
- \doifnextbgroupelse\syst_boxes_process_yes\syst_boxes_process_nop}
+ \doifelsenextbgroup\syst_boxes_process_yes\syst_boxes_process_nop}
\unexpanded\def\syst_boxes_process_nop
{\removeunwantedspaces
@@ -2489,13 +2557,15 @@
\copy\csname\??stackbox#1:#2\endcsname
\fi}}
-\unexpanded\def\doifboxelse#1#2#3#4%
+\unexpanded\def\doifelsebox#1#2#3#4%
{\ifcsname\??stackbox#1:#2\endcsname
\ifvoid\csname\??stackbox#1:#2\endcsname#4\else#3\fi
\else
#4%
\fi}
+\let\doifboxelse\doifelsebox
+
%D This one is cheaper (the above is no longer used that much):
\installcorenamespace {boxstack}
@@ -2525,9 +2595,6 @@
\unexpanded\def\globalpushbox{\syst_boxes_push\global}
\unexpanded\def\globalpopbox {\syst_boxes_pop \global}
-% \unexpanded\def\pushbox#1{\ctxcommand{pushbox(\number#1)}}
-% \unexpanded\def\popbox #1{\ctxcommand{popbox(\number#1)}}
-
%D \macros
%D {removedepth, obeydepth}
%D
@@ -2776,12 +2843,16 @@
% {\dowithnextbox{\edef#1{\syst_boxes_nodestostring}}\hbox}
%
% \def\syst_boxes_nodestostring
-% {\ctxcommand{boxtostring(\number\nextbox)}}
+% {\clf_boxtostring\nextbox}
\unexpanded\def\nodestostring#1#2% more tolerant for #2=\cs
{\begingroup
\setbox\nextbox\hbox{#2}%
- \normalexpanded{\endgroup\edef\noexpand#1{\ctxcommand{boxtostring(\number\nextbox)}}}}
+ \normalexpanded{\endgroup\edef\noexpand#1{\clf_boxtostring\nextbox}}}
+
+%D Even more dirty:
+
+\let\hyphenatedhbox\hbox
\protect \endinput
diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv
index f77ee3454..925f25cc4 100644
--- a/tex/context/base/supp-mat.mkiv
+++ b/tex/context/base/supp-mat.mkiv
@@ -53,6 +53,36 @@
\let\normalstartdmath \Ustartdisplaymath
\let\normalstopdmath \Ustopdisplaymath
+% \unexpanded\def\Ustartdisplaymath
+% {\ifinner
+% \ifhmode
+% \normalUstartmath
+% \let\Ustopdisplaymath\normalUstopmath
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi
+% \else
+% \normalUstartdisplaymath
+% \let\Ustopdisplaymath\normalUstopdisplaymath
+% \fi}
+
+\unexpanded\def\Ucheckedstartdisplaymath
+ {\ifinner
+ \ifhmode
+ \normalUstartmath
+ \let\Ucheckedstopdisplaymath\normalUstopmath
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi
+ \else
+ \normalUstartdisplaymath
+ \let\Ucheckedstopdisplaymath\normalUstopdisplaymath
+ \fi}
+
+\let\Ucheckedstopdisplaymath\relax
+
\def\normalmathaligntab{&} % \let\normalmathaligntab\aligntab does to work well in a let to & (a def works ok)
\let\normalsuper \Usuperscript % obsolete
@@ -60,8 +90,8 @@
\let\startimath \Ustartmath
\let\stopimath \Ustopmath
-\let\startdmath \Ustartdisplaymath
-\let\stopdmath \Ustopmath
+\let\startdmath \Ustartdisplaymath % \Ucheckedstartdisplaymath
+\let\stopdmath \Ustopdisplaymath % \Ucheckedstopdisplaymath
\unexpanded\def\mathematics#1{\relax \ifmmode#1\else\normalstartimath#1\normalstopimath\fi}
\unexpanded\def\displaymath#1{\noindent \ifmmode#1\else\normalstartdmath#1\normalstopdmath\fi}
diff --git a/tex/context/base/supp-ran.lua b/tex/context/base/supp-ran.lua
index 7997db8f6..4968e8cfc 100644
--- a/tex/context/base/supp-ran.lua
+++ b/tex/context/base/supp-ran.lua
@@ -10,23 +10,31 @@ if not modules then modules = { } end modules ['supp-ran'] = {
local report_system = logs.reporter("system","randomizer")
-local math = math
-local context, commands = context, commands
+local trace_random = false trackers.register("system.randomizer", function(v) trace_random = v end)
+local trace_random_mp = false trackers.register("system.randomizer.mp",function(v) trace_random_mp = v end)
-local random, randomseed, round, seed, last = math.random, math.randomseed, math.round, false, 1
+local insert, remove = table.insert, table.remove
-local maxcount = 2^30-1 -- 1073741823
+local math = math
+local context = context
+local implement = interfaces.implement
-local function setrandomseedi(n,comment)
- if not n then
- -- n = 0.5 -- hack
- end
+local random = math.random
+local randomseed = math.randomseed
+local round = math.round
+local stack = { }
+local last = 1
+local maxcount = 2^30-1 -- 1073741823
+
+local function setrandomseedi(n)
if n <= 1 then
n = n * maxcount
+ elseif n < 1000 then
+ n = n * 1000
end
n = round(n)
- if false then
- report_system("setting seed to %s (%s)",n,comment or "normal")
+ if trace_random then
+ report_system("setting seed to %s",n)
end
randomseed(n)
last = random(0,maxcount) -- we need an initial value
@@ -34,40 +42,66 @@ end
math.setrandomseedi = setrandomseedi
-function commands.getrandomcounta(min,max)
+local function getrandomnumber(min,max)
last = random(min,max)
- context(last)
-end
-
-function commands.getrandomcountb(min,max)
- last = random(min,max)/65536
- context(last)
+ return last
end
-function commands.setrandomseed(n)
+local function setrandomseed(n)
last = n
setrandomseedi(n)
end
-function commands.getrandomseed(n)
- context(last)
+local function getrandomseed()
+ return last
+end
+
+local function getmprandomnumber()
+ last = random(0,4095)
+ if trace_random_mp then
+ report_system("using mp seed %s",last)
+ end
+ return last
end
-- maybe stack
-function commands.freezerandomseed(n)
- if seed == false or seed == nil then
- seed = last
- setrandomseedi(seed,"freeze",seed)
+local function pushrandomseed()
+ insert(stack,last)
+ if trace_random then
+ report_system("pushing seed %s",last)
end
- if n then
- randomseed(n)
+end
+
+local function reuserandomseed(n)
+ local seed = stack[#stack]
+ if seed then
+ if trace_random then
+ report_system("reusing seed %s",last)
+ end
+ randomseed(seed)
end
end
-function commands.defrostrandomseed()
- if seed ~= false then
- setrandomseedi(seed,"defrost",seed) -- was last (bug)
- seed = false
+local function poprandomseed()
+ local seed = remove(stack)
+ if seed then
+ if trace_random then
+ report_system("popping seed %s",seed)
+ end
+ randomseed(seed)
end
end
+
+-- todo: also open up in utilities.randomizer.*
+
+implement { name = "getrandomnumber", actions = { getrandomnumber, context }, arguments = { "integer", "integer" } }
+implement { name = "getrandomdimen", actions = { getrandomnumber, context }, arguments = { "dimen", "dimen" } }
+implement { name = "getrandomfloat", actions = { getrandomnumber, context }, arguments = { "number", "number" } }
+implement { name = "getmprandomnumber", actions = { getmprandomnumber, context } }
+implement { name = "setrandomseed", actions = { setrandomseed }, arguments = { "integer" } }
+implement { name = "getrandomseed", actions = { getrandomseed, context } }
+implement { name = "pushrandomseed", actions = { pushrandomseed } }
+implement { name = "poprandomseed", actions = { poprandomseed } }
+implement { name = "reuserandomseed", actions = { reuserandomseed } }
+
diff --git a/tex/context/base/supp-ran.mkiv b/tex/context/base/supp-ran.mkiv
index f5466a0e1..f7cfd6e73 100644
--- a/tex/context/base/supp-ran.mkiv
+++ b/tex/context/base/supp-ran.mkiv
@@ -18,13 +18,22 @@
\registerctxluafile{supp-ran}{1.001}
-\unexpanded\def\getrandomcount #1#2#3{#1=\ctxcommand{getrandomcounta(\number#2,\number#3)}\relax}
-\unexpanded\def\getrandomdimen #1#2#3{#1=\ctxcommand{getrandomcounta(\number\dimexpr#2,\number\dimexpr#3)}\scaledpoint\relax}
-\unexpanded\def\getrandomnumber#1#2#3{\edef#1{\ctxcommand{getrandomcounta(\number#2,\number#3)}}}
-\unexpanded\def\getrandomfloat #1#2#3{\edef#1{\ctxcommand{getrandomcountb(\number\dimexpr#2\points,\number\dimexpr#3\points)}}}
-\unexpanded\def\setrandomseed #1{\ctxcommand{setrandomseed(\number#1)}}
-\unexpanded\def\getrandomseed #1{\edef#1{\ctxcommand{getrandomseed()}}}
-\unexpanded\def\freezerandomseed {\ctxcommand{freezerandomseed()}}
-\unexpanded\def\defrostrandomseed {\ctxcommand{defrostrandomseed()}}
-
-\endinput
+\unprotect
+
+\unexpanded\def\getrandomcount #1#2#3{#1=\clf_getrandomnumber#2 #3\relax}
+\unexpanded\def\getrandomdimen #1#2#3{#1=\clf_getrandomdimen#2 #3 \scaledpoint\relax}
+\unexpanded\def\getrandomnumber#1#2#3{\edef#1{\clf_getrandomnumber#2 #3}}
+\unexpanded\def\getrandomfloat #1#2#3{\edef#1{\clf_getrandomfloat#2 #3}}
+\unexpanded\def\setrandomseed #1{\clf_setrandomseed#1\relax}
+\unexpanded\def\getrandomseed #1{\edef#1{\clf_getrandomseed}}
+\unexpanded\def\pushrandomseed {\clf_pushrandomseed}
+\unexpanded\def\poprandomseed {\clf_poprandomseed}
+\unexpanded\def\reuserandomseed {\clf_reuserandomseed} % within push/pop
+
+\let\freezerandomseed \pushrandomseed
+\let\defrostrandomseed\poprandomseed
+
+\def\randomnumber #1#2{\clf_getrandomnumber\numexpr#1\relax\numexpr#2\relax}
+\let\mprandomnumber \clf_getmprandomnumber
+
+\protect \endinput
diff --git a/tex/context/base/symb-imp-cc.mkiv b/tex/context/base/symb-imp-cc.mkiv
index 45ea97732..e753d695f 100644
--- a/tex/context/base/symb-imp-cc.mkiv
+++ b/tex/context/base/symb-imp-cc.mkiv
@@ -11,8 +11,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D LS/HH: This font has changed over time so there might be diffences
+%D in rendering. The otf, ttf and type1 version can differ too.
-\doiffontpresentelse{ccicons.otf} { % redone ctan version
+\doifelsefontpresent{ccicons.otf} { % redone ctan version
\definefontfeature
[creativecommons]
[mode=base,
diff --git a/tex/context/base/symb-ini.lua b/tex/context/base/symb-ini.lua
index 9586338be..6a4a18825 100644
--- a/tex/context/base/symb-ini.lua
+++ b/tex/context/base/symb-ini.lua
@@ -6,20 +6,27 @@ if not modules then modules = { } end modules ['symb-ini'] = {
license = "see context related readme files"
}
-local context, commands = context, commands
+local context = context
+local variables = interfaces.variables
-local variables = interfaces.variables
+fonts = fonts or { } -- brrrr
-fonts = fonts or { } -- brrrr
+local symbols = fonts.symbols or { }
+fonts.symbols = symbols
-local symbols = fonts.symbols or { }
-fonts.symbols = symbols
+local listitem = utilities.parsers.listitem
+local uselibrary = resolvers.uselibrary
local report_symbols = logs.reporter ("fonts","symbols")
local status_symbols = logs.messenger("fonts","symbols")
-local patterns = { "symb-imp-%s.mkiv", "symb-imp-%s.tex", "symb-%s.mkiv", "symb-%s.tex" }
-local listitem = utilities.parsers.listitem
+local patterns = {
+ "symb-imp-%s.mkiv",
+ "symb-imp-%s.tex",
+ -- obsolete:
+ "symb-%s.mkiv",
+ "symb-%s.tex"
+}
local function action(name,foundname)
-- context.startnointerference()
@@ -37,7 +44,7 @@ end
function symbols.uselibrary(name)
if name ~= variables.reset then
for name in listitem(name) do
- commands.uselibrary {
+ uselibrary {
name = name,
patterns = patterns,
action = action,
@@ -48,4 +55,8 @@ function symbols.uselibrary(name)
end
end
-commands.usesymbols = symbols.uselibrary
+interfaces.implement {
+ name = "usesymbols",
+ actions = symbols.uselibrary,
+ arguments = "string",
+}
diff --git a/tex/context/base/symb-ini.mkiv b/tex/context/base/symb-ini.mkiv
index e957e68c1..40a0af6d6 100644
--- a/tex/context/base/symb-ini.mkiv
+++ b/tex/context/base/symb-ini.mkiv
@@ -74,9 +74,12 @@
\letvalue{\??symbolset}\empty
-\unexpanded\def\doifinsymbolsetelse#1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\unexpanded\def\doifelseinsymbolset#1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
\unexpanded\def\doifinsymbolset #1#2{\ifcsname\??symbol#1:#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
-\unexpanded\def\doifsymbolsetelse #1{\ifcsname\??symbolset#1\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+\unexpanded\def\doifelsesymbolset #1{\ifcsname\??symbolset#1\endcsname\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments\fi}
+
+\let\doifinsymbolsetelse\doifelseinsymbolset
+\let\doifsymbolsetelse \doifelsesymbolset
\def\symbolset#1{\csname\??symbolset\ifcsname\??symbolset#1\endcsname#1\fi\endcsname} % no [#1], to be used in commalists etc
@@ -216,7 +219,7 @@
\def\symb_fetch_second#1%
{\doifinsymbolset\empty\currentsymbol{\settrue\c_symb_found}}
-\def\doifsymboldefinedelse#1%
+\unexpanded\def\doifelsesymboldefined#1%
{\begingroup
\edef\currentsymbol{#1}%
\let\symb_fetch\symb_fetch_first
@@ -233,6 +236,8 @@
\fi
\fi}
+\let\doifsymboldefinedelse\doifelsesymboldefined
+
%D \macros
%D {setupsymbolset,startsymbolset}
%D
@@ -300,7 +305,7 @@
%D
%D \showsetup{usesymbols}
-\unexpanded\def\usesymbols[#1]{\ctxcommand{usesymbols("#1")}}
+\unexpanded\def\usesymbols[#1]{\clf_usesymbols{#1}}
%D As longs as symbols are linked to levels or numbers, we can
%D also use the conversion mechanism, but in for instance the
diff --git a/tex/context/base/syst-aux.lua b/tex/context/base/syst-aux.lua
index 6b5e18d16..fff9bbb4c 100644
--- a/tex/context/base/syst-aux.lua
+++ b/tex/context/base/syst-aux.lua
@@ -11,42 +11,66 @@ if not modules then modules = { } end modules ['syst-aux'] = {
-- utfmatch(str,"(.?)(.*)$")
-- utf.sub(str,1,1)
-local commands, context = commands, context
-
-local settings_to_array = utilities.parsers.settings_to_array
-local format = string.format
+local tonumber = tonumber
local utfsub = utf.sub
-local P, S, C, Cc, Cs, Carg, lpegmatch, utf8char = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match, lpeg.patterns.utf8char
-
+local P, S, R, C, Cc, Cs, Carg, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match
-local setvalue = context.setvalue
-
-local pattern = C(utf8char^-1) * C(P(1)^0)
+local context = context
+local implement = interfaces.implement
+local formatters = string.formatters
+local setcatcode = tex.setcatcode
+local utf8character = lpeg.patterns.utf8character
+local settings_to_array = utilities.parsers.settings_to_array
+local setmacro = interfaces.setmacro
+
+local pattern = C(utf8character^-1) * C(P(1)^0)
+
+implement {
+ name = "getfirstcharacter",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ setmacro("firstcharacter",first)
+ setmacro("remainingcharacters",rest)
+ end
+}
-function commands.getfirstcharacter(str)
- local first, rest = lpegmatch(pattern,str)
- setvalue("firstcharacter",first)
- setvalue("remainingcharacters",rest)
-end
+implement {
+ name = "thefirstcharacter",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(first)
+ end
+}
-function commands.thefirstcharacter(str)
- local first, rest = lpegmatch(pattern,str)
- context(first)
-end
-function commands.theremainingcharacters(str)
- local first, rest = lpegmatch(pattern,str)
- context(rest)
-end
+implement {
+ name = "theremainingcharacters",
+ arguments = "string",
+ actions = function(str)
+ local first, rest = lpegmatch(pattern,str)
+ context(rest)
+ end
+}
-local pattern = C(utf8char^-1)
+local pattern = C(utf8character^-1)
+local ctx_doifelse = commands.doifelse
-function commands.doiffirstcharelse(chr,str)
- commands.doifelse(lpegmatch(pattern,str) == chr)
-end
+implement {
+ name = "doifelsefirstchar",
+ arguments = { "string", "string" },
+ actions = function(str,chr)
+ ctx_doifelse(lpegmatch(pattern,str) == chr)
+ end
+}
-function commands.getsubstring(str,first,last)
- context(utfsub(str,tonumber(first),tonumber(last)))
-end
+implement {
+ name = "getsubstring",
+ arguments = { "string", "string", "string" },
+ actions = function(str,first,last)
+ context(utfsub(str,tonumber(first),tonumber(last)))
+ end
+}
-- function commands.addtocommalist(list,item)
-- if list == "" then
@@ -78,21 +102,40 @@ end
-- end
-- end
-local pattern = (C((1-P("%"))^1) * Carg(1)) /function(n,d) return format("%.0fsp",d * tonumber(n)/100) end * P("%") * P(-1)
+local pattern = (C((1-P("%"))^1) * Carg(1)) / function(n,d)
+ return formatters["%.0fsp"](d * tonumber(n)/100) end * P("%") * P(-1) -- .0 ?
--- commands.percentageof("10%",65536*10)
+-- percentageof("10%",65536*10)
-function commands.percentageof(str,dim)
- context(lpegmatch(pattern,str,1,dim) or str)
-end
+implement {
+ name = "percentageof",
+ arguments = { "string", "dimen" },
+ actions = function(str,dim)
+ context(lpegmatch(pattern,str,1,dim) or str)
+ end
+}
-- \gdef\setpercentdimen#1#2%
-- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
-local spaces = P(" ")^0/""
+local spaces = P(" ")^0 / ""
+local nohash = 1 - P("#")
+local digit = R("09")
+local double = P("##") / "#"
+local single = P("#")
+local sentinel = spaces * (nohash^1 / "\\%0")
+local sargument = (single * digit)^1
+local dargument = (double * digit)^1
-local pattern = Cs(
- ( P("global") / "\\global" )^0
+local usespaces = nil
+local texpreamble = nil
+
+local pattern = Cs( -- ^-1
+ ( P("spaces") / function() usespaces = true return "" end )^0
+ * spaces
+ * ( P("nospaces") / function() usespaces = false return "" end )^0
+ * spaces
+ * ( P("global") / "\\global" )^0
* spaces
* ( P("unexpanded") / "\\unexpanded" )^0
* spaces
@@ -102,14 +145,85 @@ local pattern = Cs(
* spaces
* ( P((1-S(" #"))^1) / "def\\csname %0\\endcsname" )
* spaces
- * Cs( (P("##")/"#" + P(1))^0 )
+ * (
+ -- (double * digit)^1 * sentinel^-1 * double^-1
+ -- + (single * digit)^1 * sentinel^-1 * single^-1
+ ( P("[") * dargument * P("]") + dargument)^1 * sentinel^-1 * double^-1
+ + ( P("[") * sargument * P("]") + sargument)^1 * sentinel^-1 * single^-1
+ + sentinel^-1 * (double+single)^-1
+ )
)
-function commands.thetexdefinition(str)
- context(lpegmatch(pattern,str))
+local ctx_dostarttexdefinition = context.dostarttexdefinition
+
+local function texdefinition_one(str)
+ usespaces = nil
+ texpreamble = lpegmatch(pattern,str)
+ if usespaces == true then
+ setcatcode(32,10) -- space
+ setcatcode(13, 5) -- endofline
+ elseif usespaces == false then
+ setcatcode(32, 9) -- ignore
+ setcatcode(13, 9) -- ignore
+ else
+ -- this is default
+ -- setcatcode(32,10) -- space
+ -- setcatcode(13, 9) -- ignore
+ end
+ ctx_dostarttexdefinition()
+end
+
+local function texdefinition_two()
+ context(texpreamble)
end
-local upper, lower = utf.upper, utf.lower
+implement { name = "texdefinition_one", actions = texdefinition_one, scope = "private", arguments = "string" }
+implement { name = "texdefinition_two", actions = texdefinition_two, scope = "private" }
+
+implement { name = "upper", arguments = "string", actions = { utf.upper, context } }
+implement { name = "lower", arguments = "string", actions = { utf.lower, context } }
+implement { name = "strip", arguments = "string", actions = { string.strip, context } } -- or utf.strip
+
+implement {
+ name = "converteddimen",
+ arguments = { "dimen", "string" },
+ actions = function(dimen,unit)
+ context(number.todimen(dimen,unit or "pt","%0.5f")) -- no unit appended (%F)
+ end
+}
+
+-- where, not really the best spot for this:
-function commands.upper(s) context(upper(s)) end
-function commands.lower(s) context(lower(s)) end
+implement {
+ name = "immediatemessage",
+ arguments = { "'message'", "string" },
+ actions = logs.status
+}
+
+implement {
+ name = "resettimer",
+ actions = function()
+ statistics.resettiming("whatever")
+ statistics.starttiming("whatever")
+ end
+}
+
+implement {
+ name = "elapsedtime",
+ actions = function()
+ statistics.stoptiming("whatever")
+ context(statistics.elapsedtime("whatever"))
+ end
+}
+
+local accuracy = table.setmetatableindex(function(t,k)
+ local v = formatters["%0." ..k .. "f"]
+ t[k] = v
+ return v
+end)
+
+implement {
+ name = "rounded",
+ arguments = "integer",
+ actions = function(n,m) context(accuracy[n](m)) end
+}
diff --git a/tex/context/base/syst-aux.mkiv b/tex/context/base/syst-aux.mkiv
index c7be461a3..bd9b1630e 100644
--- a/tex/context/base/syst-aux.mkiv
+++ b/tex/context/base/syst-aux.mkiv
@@ -11,13 +11,17 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D There are some references to \LUA\ variants here but these concern
-%D (often old) experiments, moved from local test modules to here,
-%D cleaned up, but not really used. After all it's not that urgent
-%D and replacing helpers is a delicate process. Don't depend on it.
+%D There are some references to \LUA\ variants here but these concern (often old)
+%D experiments, moved from local test modules to here, cleaned up, but not really
+%D used. After all it's not that urgent and replacing helpers is a delicate process.
+%D Don't depend on it.
\registerctxluafile{syst-aux}{1.001}
+% A dedicated primitive \ifvoidmacro\cs == \ifx\cs\empty is some 10% faster but
+% probably not that noticeable in practice. An \ifvoidtoks might make sense but we
+% don't test that often for it (and it's more work to implement in the engine).
+
%D This is a stripped down combination of:
%D
%D \startitemize
@@ -26,20 +30,18 @@
%D \item \type {syst-new.tex}
%D \stopitemize
%D
-%D We keep them around (for \MKII) so you can find comments,
-%D experiences, intermediate versions and cleaner variants
-%D there (and also non-\ETEX\ variants).
+%D We keep them around (for \MKII) so you can find comments, experiences,
+%D intermediate versions and cleaner variants there (and also non-\ETEX\ variants).
%D
-%D Contrary to the older files, we now assume that this one
-%D is used in \CONTEXT\ and therefore we might also assume that
-%D some basic functionality is available.
+%D Contrary to the older files, we now assume that this one is used in \CONTEXT\ and
+%D therefore we might also assume that some basic functionality is available.
%D
-%D The original files contain previous implementations and notes about
-%D performance. This file will be stripped down in due time.
-
-%D Some of the macros here were only used in the bibliography module. They
-%D have been be moved to a separate syst module since the bib module is no
-%D longer using them. Some more will go away.
+%D The original files contain previous implementations and notes about performance.
+%D This file will be stripped down in due time.
+%D
+%D Some of the macros here were only used in the bibliography module. They have been
+%D be moved to a separate syst module since the bib module is no longer using them.
+%D Some more will go away.
\unprotect
@@ -72,13 +74,20 @@
% %def\expunded#1{\normalexpanded\expandafter{#1}}
% \def\expunded#1{\expandafter\empty#1} % used within an edef anyway
-
%D As we don't have namespace definers yet, we use a special one:
-\newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
+\ifdefined\c_syst_helpers_n_of_namespaces
+
+ % lets plug in a better error message
+
+\else
+
+ \newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
-\def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
-%def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+ \def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
+ %def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+
+\fi
\unexpanded\def\installsystemnamespace#1% maybe move this to syst-ini
{\ifcsname ??#1\endcsname
@@ -128,10 +137,16 @@
\newif\if!!doned \newif\if!!donee \newif\if!!donef
\def\!!zerocount {0} % alongside \zerocount
-\def\!!minusone {-1} % alongside \minusone
-\def\!!plusone {1} % alongside \plusone
-\def\!!plustwo {2} % alongside \plustwo
-\def\!!plusthree {3} % alongside \plusthree
+\def\!!minusone {-1} % ...
+\def\!!plusone {1} % ...
+\def\!!plustwo {2} % ...
+\def\!!plusthree {3} % ...
+\def\!!plusfour {4} % ...
+\def\!!plusfive {5} % ...
+\def\!!plussix {6} % ...
+\def\!!plusseven {7} % ...
+\def\!!pluseight {8} % ...
+\def\!!plusnine {9} % alongside \plusnine
\setnewconstant \uprotationangle 0
\setnewconstant\rightrotationangle 90
@@ -272,12 +287,14 @@
%D {\localnext} because we don't want clashes with \type
%D {\next}.
-\unexpanded\def\doifnextcharelse#1#2#3% #1 should not be {} !
+\unexpanded\def\doifelsenextchar#1#2#3% #1 should not be {} !
{\let\charactertoken=#1% = needed here
\def\m_syst_action_yes{#2}%
\def\m_syst_action_nop{#3}%
\futurelet\nexttoken\syst_helpers_inspect_next_character}
+\let\doifnextcharelse\doifelsenextchar
+
\def\syst_helpers_inspect_next_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_character
@@ -312,18 +329,21 @@
\let\syst_helpers_next_optional_character_token=[
-\unexpanded\def\doifnextoptionalelse#1#2%
+\unexpanded\def\doifelsenextoptional#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
-\unexpanded\def\doifnextoptionalcselse#1#2% \cs \cs (upto 10% faster)
+\unexpanded\def\doifelsenextoptionalcs#1#2% \cs \cs (upto 10% faster)
{\let\m_syst_action_yes#1%
\let\m_syst_action_nop#2%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_optional_character}
+\let\doifnextoptionalelse \doifelsenextoptional
+\let\doifnextoptionalcselse\doifelsenextoptionalcs
+
\def\syst_helpers_inspect_next_optional_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_optional_character
@@ -340,12 +360,21 @@
\let\syst_helpers_next_bgroup_character_token\bgroup
-\unexpanded\def\doifnextbgroupelse#1#2%
+\unexpanded\def\doifelsenextbgroup#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+\unexpanded\def\doifelsenextbgroupcs#1#2%
+ {\let\m_syst_action_yes#1%
+ \let\m_syst_action_nop#2%
+ \let\if_next_blank_space_token\iffalse
+ \futurelet\nexttoken\syst_helpers_inspect_next_bgroup_character}
+
+\let\doifnextbgroupelse \doifelsenextbgroup
+\let\doifnextbgroupcselse\doifelsenextbgroupcs
+
\def\syst_helpers_inspect_next_bgroup_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_bgroup_character
@@ -362,12 +391,14 @@
\let\syst_helpers_next_parenthesis_character_token(
-\unexpanded\def\doifnextparenthesiselse#1#2%
+\unexpanded\def\doifelsenextparenthesis#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\let\if_next_blank_space_token\iffalse
\futurelet\nexttoken\syst_helpers_inspect_next_parenthesis_character}
+\let\doifnextparenthesiselse\doifelsenextparenthesis
+
\def\syst_helpers_inspect_next_parenthesis_character
{\ifx\nexttoken\blankspace
\expandafter\syst_helpers_reinspect_next_parenthesis_character
@@ -384,16 +415,19 @@
%D The next one is handy in predictable situations:
-\unexpanded\def\doiffastoptionalcheckelse#1#2%
+\unexpanded\def\doifelsefastoptionalcheck#1#2%
{\def\m_syst_action_yes{#1}%
\def\m_syst_action_nop{#2}%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
-\unexpanded\def\doiffastoptionalcheckcselse#1#2% \cs \cs
+\unexpanded\def\doifelsefastoptionalcheckcs#1#2% \cs \cs
{\let\m_syst_action_yes#1%
\let\m_syst_action_nop#2%
\futurelet\nexttoken\syst_helpers_do_if_fast_optional_check_else}
+\let\doiffastoptionalcheckelse \doifelsefastoptionalcheck
+\let\doiffastoptionalcheckcselse\doifelsefastoptionalcheckcs
+
\def\syst_helpers_do_if_fast_optional_check_else
{\ifx\nexttoken\syst_helpers_next_optional_character_token
\expandafter\m_syst_action_yes
@@ -562,12 +596,12 @@
\suppressifcsnameerror\plusone
-\def\doifundefinedelse#1%
+\def\doifelseundefined#1%
{\ifcsname#1\endcsname
\expandafter\secondoftwoarguments\else\expandafter\firstoftwoarguments
\fi}
-\def\doifdefinedelse#1%
+\def\doifelsedefined#1%
{\ifcsname#1\endcsname
\expandafter\firstoftwoarguments\else\expandafter\secondoftwoarguments
\fi}
@@ -582,6 +616,9 @@
\expandafter\firstofoneargument\else\expandafter\gobbleoneargument
\fi}
+\let\doifundefinedelse\doifelseundefined
+\let\doifdefinedelse \doifelsedefined
+
%D \macros
%D {letbeundefined}
%D
@@ -605,7 +642,7 @@
%D behavior in text and math mode, which was due to this grouping subtilities. We
%D therefore decided to use \type{\begingroup} instead of \type{\bgroup}.
-\unexpanded\def\doifalldefinedelse#1%
+\unexpanded\def\doifelsealldefined#1%
{\begingroup
\donetrue % we could use a reserved one and avoid the group
\processcommalist[#1]\syst_helpers_do_if_all_defined_else
@@ -615,6 +652,8 @@
\endgroup\expandafter\secondoftwoarguments
\fi}
+\let\doifalldefinedelse\doifelsealldefined
+
\def\syst_helpers_do_if_all_defined_else#1%
{\ifcsname#1\endcsname\else
\donefalse
@@ -678,7 +717,7 @@
%D
%D This time, the string is not expanded.
-\unexpanded\def\doifemptyelse#1%
+\unexpanded\def\doifelseempty#1%
{\def\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\firstoftwoarguments
@@ -686,6 +725,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifemptyelse\doifelseempty
+
\unexpanded\def\doifempty#1%
{\def\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
@@ -751,7 +792,7 @@
\def\syst_helpers_re_do_if_in_set {\expandafter\syst_helpers_do_check_if_item_in_set \m_syst_string_two,],\relax}
\def\syst_helpers_re_do_if_not_in_set {\expandafter\syst_helpers_do_check_if_item_not_in_set \m_syst_string_two,],\relax}
-\unexpanded\def\doifinsetelse#1% make this two step too
+\unexpanded\def\doifelseinset#1% make this two step too
{\edef\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\thirdofthreearguments
@@ -759,6 +800,8 @@
\expandafter\syst_helpers_do_if_in_set_else
\fi}
+\let\doifinsetelse\doifelseinset
+
\def\syst_helpers_do_if_in_set_else#1%
{\edef\m_syst_string_two{#1}%
\ifx\m_syst_string_two\empty
@@ -949,10 +992,12 @@
\fi\fi
#1#2}
-\unexpanded\def\doifcommonelse{\syst_helpers_do_do_if_common_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelsecommon{\syst_helpers_do_do_if_common_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifcommon {\syst_helpers_do_do_if_common_else\firstofoneargument \gobbleoneargument }
\unexpanded\def\doifnotcommon {\syst_helpers_do_do_if_common_else\gobbleoneargument \firstofoneargument }
+\let\doifcommonelse\doifelsecommon
+
%D \macros
%D {processcommalist,processcommacommand,quitcommalist,
%D processcommalistwithparameters}
@@ -1294,10 +1339,12 @@
\let\firstcharacter \empty
\let\remainingcharacters\empty
-\unexpanded\def\getfirstcharacter #1{\ctxcommand{getfirstcharacter(\!!bs#1\!!es)}}
-\unexpanded\def\doiffirstcharelse #1#2{\ctxcommand{doiffirstcharelse(\!!bs#1\!!es,\!!bs#2\!!es)}} % chr str
-\unexpanded\def\thefirstcharacter #1{\ctxcommand{thefirstcharacter(\!!bs#1\!!es)}}
-\unexpanded\def\theremainingcharacters#1{\ctxcommand{theremainingcharacters(\!!bs#1\!!es)}}
+\unexpanded\def\getfirstcharacter #1{\clf_getfirstcharacter{#1}}
+\unexpanded\def\doifelsefirstchar #1#2{\clf_doifelsefirstchar{#1}{#2}}
+\unexpanded\def\thefirstcharacter #1{\clf_thefirstcharacter{#1}}
+\unexpanded\def\theremainingcharacters#1{\clf_theremainingcharacters{#1}}
+
+\let\doiffirstcharelse\doifelsefirstchar
%D \macros
%D {doifinstringelse, doifincsnameelse}
@@ -1311,7 +1358,7 @@
\let\m_syst_sub_string\empty
-\unexpanded\def\doifinstringelse#1%
+\unexpanded\def\doifelseinstring#1%
{\edef\m_syst_sub_string{#1}% expand #1 here
\ifx\m_syst_sub_string\empty
\expandafter\thirdofthreearguments
@@ -1319,6 +1366,8 @@
\expandafter\syst_helpers_do_if_in_string_else_indeed
\fi}
+\let\doifinstringelse\doifelseinstring
+
\unexpanded\def\syst_helpers_do_if_in_string_else_indeed#1%
{\syst_helpers_do_if_in_string_else\m_syst_sub_string{#1}%
\expandafter\firstoftwoarguments
@@ -1381,13 +1430,15 @@
{\unless\if##2@}%
\expandafter\syst_helpers_do_do_if_in_csname_else#2#1@@\_e_o_s_}
-\unexpanded\def\doifincsnameelse#1#2%
+\unexpanded\def\doifelseincsname#1#2%
{\normalexpanded{\syst_helpers_do_if_in_csname_else{#1}}{#2}%
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifincsnameelse\doifelseincsname
+
%D \macros
%D {doifnumberelse,doifnumber,doifnotnumber}
%D
@@ -1403,13 +1454,15 @@
%D The macro accepts \type {123}, \type {abc}, \type {{}}, \type {\getal} and
%D \type {\the\count...}. This macro is a rather dirty one.
-\def\doifnumberelse#1% does not accept counters (fully expandable)
+\def\doifelsenumber#1% does not accept counters (fully expandable)
{\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifnumberelse\doifelsenumber
+
\def\doifnumber#1%
{\ifcase0\ifcase1#1\or\or\or\or\or\or\or\or\or\else1\fi\space
\expandafter\firstofoneargument
@@ -1434,10 +1487,10 @@
%D \stoptyping
\def\percentdimen#1#2% dimen percentage (with %)
- {\dimexpr\ctxcommand{percentageof("#2",\number#1)}\relax}
+ {\dimexpr\clf_percentageof{#2}\dimexpr#1\relax}
\unexpanded\def\setpercentdimen#1#2% dimen percentage (with %)
- {#1=\ctxcommand{percentageof("#2",\number#1)}\relax}
+ {#1=\clf_percentageof{#2}\dimexpr#1\relax}
%D \macros
%D {makerawcommalist,
@@ -1457,7 +1510,7 @@
%D
%D \starttyping
%D \makerawcommalist[string,string,...]\stringlist
-%D \rawdoifinsetelse{string}{string,...}{...}{...}
+%D \rawdoifelseinset{string}{string,...}{...}{...}
%D \rawprocesscommalist[string,string,...]\commando
%D \rawprocessaction[x][a=>\a,b=>\b,c=>\c]
%D \stoptyping
@@ -1492,12 +1545,12 @@
\unexpanded\def\rawprocesscommacommand[#1]% not really needed
{\normalexpanded{\rawprocesscommalist[#1]}}
-% \def\rawdoifinsetelse#1#2{\doifinstringelse{,#1,}{,#2,}}
+% \def\rawdoifelseinset#1#2{\doifinstringelse{,#1,}{,#2,}}
% \def\rawdoifinset #1#2{\doifinstring {,#1,}{,#2,}}
\def\m_syst_two_commas{,,}
-\unexpanded\def\rawdoifinsetelse#1%
+\unexpanded\def\rawdoifelseinset#1%
{\edef\m_syst_sub_string{,#1,}% expand #1 here
\ifx\m_syst_sub_string\m_syst_two_commas
\expandafter\thirdofthreearguments
@@ -1505,6 +1558,8 @@
\expandafter\syst_helpers_raw_do_if_in_set_else
\fi}
+\let\rawdoifinsetelse\rawdoifinsetelse
+
\unexpanded\def\syst_helpers_raw_do_if_in_set_else#1%
{\syst_helpers_do_if_in_string_else\m_syst_sub_string{,#1,}%
\expandafter\firstoftwoarguments
@@ -2170,7 +2225,7 @@
\unexpanded\def\dosingleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue#1}%
{\syst_helpers_single_empty_one_nop#1}}
@@ -2182,13 +2237,13 @@
\unexpanded\def\dodoubleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_double_empty_one_yes#1}%
{\syst_helpers_double_empty_one_nop#1}}
\def\syst_helpers_double_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\secondargumenttrue#1[{#2}]}%
{\syst_helpers_double_empty_two_nop#1{#2}}}
@@ -2212,19 +2267,19 @@
\unexpanded\def\dotripleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_triple_empty_one_yes#1}%
{\syst_helpers_triple_empty_one_nop#1}}
\def\syst_helpers_triple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_triple_empty_two_yes#1{#2}}%
{\syst_helpers_triple_empty_two_nop#1{#2}}}
\def\syst_helpers_triple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\thirdargumenttrue#1[{#2}][{#3}]}%
{\syst_helpers_triple_empty_three_nop#1{#2}{#3}}}
@@ -2260,25 +2315,25 @@
\unexpanded\def\doquadrupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_one_yes#1}%
{\syst_helpers_quadruple_empty_one_nop#1}}
\def\syst_helpers_quadruple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_two_yes#1{#2}}%
{\syst_helpers_quadruple_empty_two_nop#1{#2}}}
\def\syst_helpers_quadruple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quadruple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_quadruple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_quadruple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\fourthargumenttrue#1[{#2}][{#3}][{#4}]}%
{\syst_helpers_quadruple_empty_four_nop#1{#2}{#3}{#4}}}
@@ -2327,31 +2382,31 @@
\unexpanded\def\doquintupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_one_yes#1}%
{\syst_helpers_quintuple_empty_one_nop#1}}
\def\syst_helpers_quintuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_two_yes#1{#2}}%
{\syst_helpers_quintuple_empty_two_nop#1{#2}}}
\def\syst_helpers_quintuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_quintuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_quintuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_quintuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_quintuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_quintuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\fifthargumenttrue#1[{#2}][{#3}][{#4}][{#5}]}%
{\syst_helpers_quintuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
@@ -2414,37 +2469,37 @@
\unexpanded\def\dosixtupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_one_yes#1}
{\syst_helpers_sixtuple_empty_one_nop#1}}
\def\syst_helpers_sixtuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_two_yes#1{#2}}%
{\syst_helpers_sixtuple_empty_two_nop#1{#2}}}
\def\syst_helpers_sixtuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_sixtuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_sixtuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_sixtuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_sixtuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_sixtuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
{\syst_helpers_sixtuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
\def\syst_helpers_sixtuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\sixthargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}]}%
{\syst_helpers_sixtuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
@@ -2522,43 +2577,43 @@
\unexpanded\def\doseventupleempty#1%
{\syst_helpers_argument_reset
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_one_yes#1}%
{\syst_helpers_seventuple_empty_one_nop#1}}
\def\syst_helpers_seventuple_empty_one_yes#1[#2]%
{\firstargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_two_yes#1{#2}}%
{\syst_helpers_seventuple_empty_two_nop#1{#2}}}
\def\syst_helpers_seventuple_empty_two_yes#1#2[#3]%
{\secondargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_three_yes#1{#2}{#3}}%
{\syst_helpers_seventuple_empty_three_nop#1{#2}{#3}}}
\def\syst_helpers_seventuple_empty_three_yes#1#2#3[#4]%
{\thirdargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_four_yes#1{#2}{#3}{#4}}%
{\syst_helpers_seventuple_empty_four_nop#1{#2}{#3}{#4}}}
\def\syst_helpers_seventuple_empty_four_yes#1#2#3#4[#5]%
{\fourthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_five_yes#1{#2}{#3}{#4}{#5}}%
{\syst_helpers_seventuple_empty_five_nop#1{#2}{#3}{#4}{#5}}}
\def\syst_helpers_seventuple_empty_five_yes#1#2#3#4#5[#6]%
{\fifthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\syst_helpers_seventuple_empty_six_yes#1{#2}{#3}{#4}{#5}{#6}}%
{\syst_helpers_seventuple_empty_six_nop#1{#2}{#3}{#4}{#5}{#6}}}
\def\syst_helpers_seventuple_empty_six_yes#1#2#3#4#5#6[#7]%
{\sixthargumenttrue
- \doifnextoptionalelse
+ \doifelsenextoptional
{\seventhargumenttrue#1[{#2}][{#3}][{#4}][{#5}][{#6}][{#7}]}%
{\syst_helpers_seventuple_empty_seven_nop#1{#2}{#3}{#4}{#5}{#6}{#7}}}
@@ -2714,13 +2769,13 @@
\unexpanded\def\complexorsimple#1%
{% \relax % prevents lookahead, brrr
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue \csname\s!complex\strippedcsname#1\endcsname}
{\firstargumentfalse\csname\s!simple \strippedcsname#1\endcsname}}
\unexpanded\def\complexorsimpleempty#1%
{% \relax % prevents lookahead, brrr
- \doifnextoptionalelse
+ \doifelsenextoptional
{\firstargumenttrue \csname\s!complex\strippedcsname#1\endcsname}
{\firstargumentfalse\csname\s!complex\strippedcsname#1\endcsname[]}}
@@ -2732,10 +2787,10 @@
%D in protection.
\unexpanded\def\syst_helpers_complex_or_simple#1#2%
- {\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#2}}
+ {\doifelsenextoptional{\firstargumenttrue#1}{\firstargumentfalse#2}}
\unexpanded\def\syst_helpers_complex_or_simple_empty#1%
- {\doifnextoptionalelse{\firstargumenttrue#1}{\firstargumentfalse#1[]}}
+ {\doifelsenextoptional{\firstargumenttrue#1}{\firstargumentfalse#1[]}}
\unexpanded\def\definecomplexorsimple#1%
{\unexpanded\edef#1{\syst_helpers_complex_or_simple
@@ -2936,7 +2991,14 @@
\def\fifthofsixarguments #1#2#3#4#5#6{#5}
\def\sixthofsixarguments #1#2#3#4#5#6{#6}
-\unexpanded\def\firstofoneunexpanded#1{#1}
+\unexpanded\def\firstofoneunexpanded #1{#1}
+
+\unexpanded\def\firstoftwounexpanded #1#2{#1}
+\unexpanded\def\secondoftwounexpanded #1#2{#2}
+
+\unexpanded\def\firstofthreeunexpanded #1#2#3{#1}
+\unexpanded\def\secondofthreeunexpanded#1#2#3{#2}
+\unexpanded\def\thirdofthreeunexpanded #1#2#3{#3}
%D \macros
%D {globalletempty,letempty,
@@ -3021,7 +3083,7 @@
%D
%D A fully expandable message:
-\def\immediatemessage#1{\ctxlua{logs.status("message","#1")}}
+\let\immediatemessage\clf_immediatemessage % {} mandate
%D \macros
%D {debuggerinfo}
@@ -3237,6 +3299,25 @@
% [here #oeps: \the\scratchcounter]
% \stoptexdefinition
+% \bgroup \obeylines
+%
+% \global\let\stoptexdefinition\relax
+%
+% \unexpanded\gdef\starttexdefinition%
+% {\bgroup%
+% \obeylines%
+% \syst_helpers_start_tex_definition_one}
+%
+% \gdef\syst_helpers_start_tex_definition_one#1
+% {\catcode\endoflineasciicode\ignorecatcode%
+% \syst_helpers_start_tex_definition_two{#1}}
+%
+% \gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition%
+% {\egroup%
+% \ctxcommand{thetexdefinition("#1")}{#2}}
+%
+% \egroup
+
\bgroup \obeylines
\global\let\stoptexdefinition\relax
@@ -3244,19 +3325,19 @@
\unexpanded\gdef\starttexdefinition%
{\bgroup%
\obeylines%
- \syst_helpers_start_tex_definition_one}
+ \syst_helpers_start_tex_definition}
-\gdef\syst_helpers_start_tex_definition_one#1
+\gdef\syst_helpers_start_tex_definition#1
{\catcode\endoflineasciicode\ignorecatcode%
- \syst_helpers_start_tex_definition_two{#1}}
+ \clf_texdefinition_one{#1}}
-\gdef\syst_helpers_start_tex_definition_two#1#2\stoptexdefinition%
+\gdef\dostarttexdefinition#1\stoptexdefinition%
{\egroup%
- \ctxcommand{thetexdefinition("#1")}{#2}}
+ \clf_texdefinition_two{#1}}
\egroup
-\unexpanded\def\texdefinition#1{\csname\ifcsname#1\endcsname#1\else donothing\fi\endcsname}
+\unexpanded\def\texdefinition#1{\csname\ifcsname#1\endcsname#1\else donothing\fi\endcsname} % todo: a nop cs: char 0 or some corenamespace
% This is a first variant, more might be added:
@@ -3333,14 +3414,14 @@
\def\syst_helpers_do_do_do_increment#1,#2){\dodoglobal\edef#1{\the\numexpr\ifdefined#1\ifx#1\relax\else#1\fi\fi+#2\relax}}
\def\syst_helpers_do_do_do_decrement#1,#2){\dodoglobal\edef#1{\the\numexpr\ifdefined#1\ifx#1\relax\else#1\fi\fi-#2\relax}}
-\def\syst_helpers_do_do_increment(#1{\doifnextcharelse,{\syst_helpers_do_do_do_increment#1}{\syst_helpers_do_do_do_increment#1,\plusone}}
-\def\syst_helpers_do_do_decrement(#1{\doifnextcharelse,{\syst_helpers_do_do_do_decrement#1}{\syst_helpers_do_do_do_decrement#1,\plusone}}
+\def\syst_helpers_do_do_increment(#1{\doifelsenextchar,{\syst_helpers_do_do_do_increment#1}{\syst_helpers_do_do_do_increment#1,\plusone}}
+\def\syst_helpers_do_do_decrement(#1{\doifelsenextchar,{\syst_helpers_do_do_do_decrement#1}{\syst_helpers_do_do_do_decrement#1,\plusone}}
\unexpanded\def\fastincrement#1{\dodoglobal\edef#1{\the\numexpr#1+\plusone \relax}}
\unexpanded\def\fastdecrement#1{\dodoglobal\edef#1{\the\numexpr#1+\minusone\relax}}
-\unexpanded\def\increment{\doifnextcharelse(\syst_helpers_do_do_increment\syst_helpers_do_increment}
-\unexpanded\def\decrement{\doifnextcharelse(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
+\unexpanded\def\increment{\doifelsenextchar(\syst_helpers_do_do_increment\syst_helpers_do_increment}
+\unexpanded\def\decrement{\doifelsenextchar(\syst_helpers_do_do_decrement\syst_helpers_do_decrement}
\unexpanded\def\incrementvalue#1{\expandafter\increment\csname#1\endcsname}
\unexpanded\def\decrementvalue#1{\expandafter\decrement\csname#1\endcsname}
@@ -3384,6 +3465,7 @@
{\expandafter\syst_helpers_checked_stripped_csname\string#1}
\def\syst_helpers_checked_stripped_csname#1%
+ %{\ifx#1\letterbackslash\else#1\fi}
{\if\noexpand#1\letterbackslash\else#1\fi}
%D \macros
@@ -4016,13 +4098,15 @@
\def\syst_helpers_check_if_assignment_else#1=#2#3\_e_o_p_{\if#2@}%
-\unexpanded\def\doifassignmentelse#1% expandable
+\unexpanded\def\doifelseassignment#1% expandable
{\expandafter\syst_helpers_check_if_assignment_else\detokenize{#1}=@@\_e_o_p_
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifassignmentelse\doifelseassignment
+
\newif\ifassignment
\unexpanded\def\docheckassignment#1%
@@ -4095,7 +4179,7 @@
%D
%D Watch the one level expansion of the second argument.
-\unexpanded\def\doifmeaningelse#1#2%
+\unexpanded\def\doifelsemeaning#1#2%
{\edef\m_syst_string_one{\meaning#1}%
\def \m_syst_string_two{#2}%
\edef\m_syst_string_two{\meaning\m_syst_string_two}%
@@ -4105,6 +4189,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifmeaningelse\doifelsemeaning
+
%D \macros
%D {doifsamestringselse,doifsamestring,doifnotsamestring}
%D
@@ -4122,10 +4208,12 @@
\edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#4}}}%
\ifx\m_syst_string_one\m_syst_string_two\expandafter#1\else\expandafter#2\fi}
-\unexpanded\def\doifsamestringelse{\syst_helpers_if_samestring_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelsesamestring{\syst_helpers_if_samestring_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifsamestring {\syst_helpers_if_samestring_else\firstofoneargument \gobbleoneargument }
\unexpanded\def\doifnotsamestring {\syst_helpers_if_samestring_else\gobbleoneargument \firstofoneargument }
+\let\doifsamestringelse\doifelsesamestring
+
%D \macros
%D {ConvertToConstant,ConvertConstantAfter}
%D
@@ -4187,7 +4275,7 @@
\unexpanded\def\CheckConstantAfter#1#2%
{\expandafter\convertargument\v!prefix!\to\ascii
\convertargument#1\to#2\relax
- \doifinstringelse\ascii{#2}
+ \doifelseinstring\ascii{#2}
{\expandafter\convertargument#1\to#2}
{}}
@@ -4426,6 +4514,18 @@
{\m_syst_helpers_handle_group_after
\egroup}%
+\unexpanded\def\syst_helpers_handle_group_pickup#1#2#3% no inner group (so no kerning interference)
+ {\bgroup
+ \def\m_syst_helpers_handle_group_before{#1}%
+ \def\m_syst_helpers_handle_group_after {#2\egroup#3}%
+ \afterassignment\m_syst_helpers_handle_group_pickup_before
+ \let\next=}
+
+\def\m_syst_helpers_handle_group_pickup_before
+ {\bgroup
+ \aftergroup\m_syst_helpers_handle_group_after
+ \m_syst_helpers_handle_group_before}
+
\unexpanded\def\syst_helpers_handle_group_nop
{\ifnum\currentgrouptype=\semisimplegroupcode
\expandafter\syst_helpers_handle_group_nop_a
@@ -4458,10 +4558,13 @@
%D implementation became:
\unexpanded\def\groupedcommand#1#2%
- {\doifnextbgroupelse{\syst_helpers_handle_group_normal{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+ {\doifelsenextbgroup{\syst_helpers_handle_group_normal{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
\unexpanded\def\simplegroupedcommand#1#2%
- {\doifnextbgroupelse{\syst_helpers_handle_group_simple{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+ {\doifelsenextbgroup{\syst_helpers_handle_group_simple{#1}{#2}}{\syst_helpers_handle_group_nop{#1}{#2}}}
+
+\unexpanded\def\pickupgroupedcommand#1#2#3%
+ {\doifelsenextbgroup{\syst_helpers_handle_group_pickup{#1}{#2}{#3}}{\syst_helpers_handle_group_nop{#1}{#2}}}
%D Users should be aware of the fact that grouping can
%D interfere with ones paragraph settings that are executed
@@ -4555,14 +4658,14 @@
\newtoks\AfterPar
\def\redowithpar\par
- {\doifnextcharelse\par\redowithpar\dodowithpar}%
+ {\doifelsenextchar\par\redowithpar\dodowithpar}%
\def\dowithpar#1#2%
{\def\dodowithpar##1\par{#1##1#2}%
\redowithpar\par}
\def\redogotopar\par
- {\doifnextcharelse\par\redogotopar\dodogotopar}%
+ {\doifelsenextchar\par\redogotopar\dodogotopar}%
\def\dogotopar#1%
{\def\dodogotopar{#1}%
@@ -4626,7 +4729,7 @@
\unexpanded\def\dowithpargument#1%
{\def\syst_helpers_next_par##1 \par{#1{##1}}%
\def\syst_helpers_next_arg##1{#1{##1}}%
- \doifnextbgroupelse\syst_helpers_next_arg{\doifnextcharelse\par{#1{}}\syst_helpers_next_par}}
+ \doifelsenextbgroup\syst_helpers_next_arg{\doifelsenextchar\par{#1{}}\syst_helpers_next_par}}
%D The \type{p} in the previous command stands for paragraph.
%D When we want to act upon words we can use the \type{w}
@@ -4656,7 +4759,7 @@
\unexpanded\def\dowithwargument#1%
{\def\syst_helpers_next_war##1 {#1{##1}}%
\def\syst_helpers_next_arg##1{#1{##1}}%
- \doifnextbgroupelse\syst_helpers_next_arg\syst_helpers_next_war}
+ \doifelsenextbgroup\syst_helpers_next_arg\syst_helpers_next_war}
%D \macros
%D {dorepeat,dorepeatwithcommand}
@@ -4751,11 +4854,13 @@
\expandafter\firstoftwoarguments
\fi}
-\def\doifstringinstringelse#1#2%
+\def\doifelsestringinstring#1#2%
{\expandafter\def\expandafter\syst_helpers_if_instring_else\expandafter##\expandafter1#1##2##3\_e_o_s_
{\syst_helpers_if_instring_else_indeed##2}%
\expandafter\expandafter\expandafter\syst_helpers_if_instring_else\expandafter#2#1@@\_e_o_s_}
+\let\doifstringinstringelse\doifelsestringinstring
+
%D \macros
%D {appendtoks,prependtoks,appendtoksonce,prependtoksonce,
%D doifintokselse,flushtoks,dotoks}
@@ -4801,19 +4906,21 @@
\def\syst_helpers_append_toks_once#1\to#2%
{\let\m_syst_helpers_scratch#2%
\t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_append_toks_indeed}
+ \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_append_toks_indeed}
\def\syst_helpers_prepend_toks_once#1\to#2%
{\let\m_syst_helpers_scratch#2%
\t_syst_helpers_scratch\expandafter{\gobbleoneargument#1}%
- \doifintokselse\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_prepend_toks_indeed}
+ \doifelseintoks\t_syst_helpers_scratch\m_syst_helpers_scratch\donothing\syst_helpers_prepend_toks_indeed}
%D The test macro:
-\unexpanded\def\doifintokselse#1#2% #1 en #2 zijn toks
+\unexpanded\def\doifelseintoks#1#2% #1 en #2 zijn toks
{\edef\asciia{\detokenize\expandafter{\the#1}}%
\edef\asciib{\detokenize\expandafter{\the#2}}%
- \doifstringinstringelse\asciia\asciib}
+ \doifelsestringinstring\asciia\asciib}
+
+\let\doifintokselse\doifelseintoks
%D A nice one too:
@@ -5007,24 +5114,26 @@
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}
\unexpanded\def\addtocommalist#1#2% {item} \cs
- {\rawdoifinsetelse{#1}#2\resetglobal
+ {\rawdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{\ifx#2\empty\else#2,\fi#1}}}
\unexpanded\def\pretocommalist#1#2% {item} \cs
- {\rawdoifinsetelse{#1}#2\resetglobal
+ {\rawdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
-\unexpanded\def\robustdoifinsetelse#1#2%
+\unexpanded\def\robustdoifelseinset#1#2%
{\edef\m_syst_string_one{\detokenize\expandafter{\normalexpanded{#1}}}%
\edef\m_syst_string_two{\detokenize\expandafter{\normalexpanded{#2}}}%
- \rawdoifinsetelse\m_syst_string_one\m_syst_string_two}
+ \rawdoifelseinset\m_syst_string_one\m_syst_string_two}
+
+\let\robustdoifinsetelse\robustdoifelseinset
\unexpanded\def\robustaddtocommalist#1#2% {item} \cs
- {\robustdoifinsetelse{#1}#2\resetglobal
+ {\robustdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{\ifx#2\empty\else#2,\fi#1}}}
\unexpanded\def\robustpretocommalist#1#2% {item} \cs
- {\robustdoifinsetelse{#1}#2\resetglobal
+ {\robustdoifelseinset{#1}#2\resetglobal
{\dodoglobal\edef#2{#1\ifx#2\empty\else,#2\fi}}}
\unexpanded\def\xsplitstring#1#2% \cs {str}
@@ -5038,7 +5147,7 @@
\def\acleanedupcommalist#1,,#2\relax{#1}
\unexpanded\def\removefromcommalist#1#2% to be sped up
- {\rawdoifinsetelse{#1}#2%
+ {\rawdoifelseinset{#1}#2%
{\normalexpanded{\xsplitstring\noexpand#2{#1}}%
\dodoglobal\edef#2%
{\ifx\m_syst_string_one\empty
@@ -5276,7 +5385,7 @@
\let\popmacro \localpopmacro
%D \macros
-%D {setlocalhsize}
+%D {setlocalhsize,distributedhsize}
%D
%D Sometimes we need to work with the \type{\hsize} that is
%D corrected for indentation and left and right skips. The
@@ -5295,7 +5404,7 @@
\newdimen\localhsize
\unexpanded\def\setlocalhsize % don't change !
- {\doifnextoptionalelse
+ {\doifelsenextoptional
\syst_helpers_set_local_hsize_yes
\syst_helpers_set_local_hsize_nop}
@@ -5314,6 +5423,12 @@
\fi
\relax}
+\def\distributedhsize#1#2#3%
+ {\dimexpr(#1-\numexpr#3-1\relax\dimexpr#2\relax)/#3\relax}
+
+\def\hsizefraction#1#2%
+ {\dimexpr#1/#2\relax}
+
%D \macros
%D {doifvalue,doifnotvalue,doifelsevalue,
%D doifnothing,doifsomething,doifelsenothing,
@@ -5376,7 +5491,7 @@
\expandafter\secondoftwoarguments
\fi}
-\unexpanded\def\doifsomethingelse#1%
+\unexpanded\def\doifelsesomething#1%
{\edef\m_syst_string_one{#1}%
\ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
@@ -5408,18 +5523,25 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doifvalueelse \doifelsevalue
+\let\doifnothingelse \doifelsenothing
+\let\doifsomethingelse \doifelsesomething
+\let\doifvaluenothingelse\doifelsevaluenothing
+
%D \macros
%D {doifemptyelsevalue, doifemptyvalue, doifnotemptyvalue}
%D
%D Also handy:
-\def\doifemptyelsevalue#1%
+\def\doifelseemptyvalue#1%
{\expandafter\ifx\csname#1\endcsname\empty
\expandafter\firstoftwoarguments
\else
\expandafter\secondoftwoarguments
\fi}
+\let\doifemptyvalueelse\doifelseemptyvalue
+
\def\doifemptyvalue#1%
{\expandafter\ifx\csname#1\endcsname\empty
\expandafter\firstofoneargument
@@ -5449,10 +5571,12 @@
\processcommalist[#3]\syst_helpers_do_common_check_all
\ifdone\expandafter#1\else\expandafter#2\fi}
-\unexpanded\def\doifallcommonelse{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
+\unexpanded\def\doifelseallcommon{\syst_helpers_do_if_all_common_else\firstoftwoarguments\secondoftwoarguments}
\unexpanded\def\doifallcommon {\syst_helpers_do_if_all_common_else\firstofonearguments\gobbleoneargument }
\unexpanded\def\doifnotallcommon {\syst_helpers_do_if_all_common_else\gobbleoneargument \firstofonearguments }
+\let\doifallcommonelse\doifelseallcommon
+
%D \macros
%D {DOIF,DOIFELSE,DOIFNOT}
%D
@@ -5656,13 +5780,15 @@
\def\syst_helpers_if_some_space_else#1 #2#3\_e_o_s_{\if\noexpand#2@}
-\def\doifsomespaceelse#1% % #2#3%
+\def\doifelsesomespace#1% % #2#3%
{\syst_helpers_if_some_space_else#1 @ @\_e_o_s_ % #3\else#2\fi}
\expandafter\secondoftwoarguments
\else
\expandafter\firstoftwoarguments
\fi}
+\let\doifsomespaceelse\doifelsesomespace
+
%D \macros
%D {processseparatedlist}
%D
@@ -5912,11 +6038,11 @@
%D These macros are sort of obsolete as we never use uppercase this
%D way. But nevertheless we provide them:
-\def\utfupper#1{\ctxcommand{upper(\!!bs#1\!!es)}} % expandable
-\def\utflower#1{\ctxcommand{lower(\!!bs#1\!!es)}} % expandable
+\def\utfupper#1{\clf_upper{#1}} % expandable
+\def\utflower#1{\clf_lower{#1}} % expandable
-\unexpanded\def\uppercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{upper(\!!bs#1\!!es)}}}
-\unexpanded\def\lowercasestring#1\to#2{\dodoglobal\edef#2{\ctxcommand{lower(\!!bs#1\!!es)}}}
+\unexpanded\def\uppercasestring#1\to#2{\dodoglobal\edef#2{\clf_upper{#1}}}
+\unexpanded\def\lowercasestring#1\to#2{\dodoglobal\edef#2{\clf_lower{#1}}}
%D \macros
%D {handletokens}
@@ -6112,24 +6238,20 @@
% \def\threedigitrounding#1%
% {\expandafter\expandafter\expandafter\dothreedigitrounding\expandafter\WITHOUTPT\the\dimexpr#1\points+.0005\points\relax0000.*00\relax}
-\def\integerrounding #1{\cldcontext{"\letterpercent 0.0f",#1}}
-\def\onedigitrounding #1{\cldcontext{"\letterpercent 0.1f",#1}}
-\def\twodigitrounding #1{\cldcontext{"\letterpercent 0.2f",#1}}
-\def\threedigitrounding#1{\cldcontext{"\letterpercent 0.3f",#1}}
+\def\integerrounding #1{\clf_rounded\zerocount\numexpr#1\relax}
+\def\onedigitrounding #1{\clf_rounded\plusone \numexpr#1\relax}
+\def\twodigitrounding #1{\clf_rounded\plustwo \numexpr#1\relax}
+\def\threedigitrounding#1{\clf_rounded\plusthree\numexpr#1\relax}
%D \macros
%D {processcontent}
%D
-%D This is the first occasion where \TEX\ and \ETEX\ are no
-%D longer compatible, although in many cases things go ok.
-%D Beware of verbatim, i.e. catcode changes.
+%D This macro is first used in the tabulation macros.
%D
%D \starttyping
%D \unexpanded\def\starthans%
%D {\processcontent{stophans}\test{\message{\test}\wait}}
%D \stoptyping
-%D
-%D This macro is first used in the tabulation macros.
\unexpanded\def\processcontent#1%
{\begingroup\expandafter\syst_helpers_process_content\csname#1\endcsname}
@@ -6195,7 +6317,7 @@
%D Not that fast I guess, but here's a way to test for token
%D registers being empty.
-\unexpanded\def\doifsometokselse#1%
+\unexpanded\def\doifelsesometoks#1%
{\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
\ifx\m_syst_string_one\empty
\expandafter\secondoftwoarguments
@@ -6203,6 +6325,8 @@
\expandafter\firstoftwoarguments
\fi}
+\let\doifsometokselse\doifelsesometoks
+
\unexpanded\def\doifsometoks#1%
{\edef\m_syst_string_one{\the#1}% one level expansion so quite ok
\ifx\m_syst_string_one\empty
@@ -6241,9 +6365,11 @@
\unexpanded\def\stopstrictinspectnextcharacter
{\let\syst_helpers_inspect_next_character\syst_helpers_normal_inspect_next_character}
-\def\strictdoifnextoptionalelse#1#2%
+\unexpanded\def\strictdoifelsenextoptional#1#2%
{\startstrictinspectnextcharacter
- \doifnextcharelse[{\stopstrictinspectnextcharacter#1}{\stopstrictinspectnextcharacter#2}}
+ \doifelsenextchar[{\stopstrictinspectnextcharacter#1}{\stopstrictinspectnextcharacter#2}}
+
+\let\strictdoifnextoptionalelse\strictdoifelsenextoptional
%D \macros
%D {gobblespacetokens}
@@ -6436,9 +6562,11 @@
%D
%D This is a dirty one: we simply append a unit and discard it when needed.
-\def\doifdimensionelse#1%
+\def\doifelsedimension#1%
{\afterassignment\syst_helpers_if_dimension_else\scratchdimen#1pt\relax}
+\let\doifdimensionelse\doifelsedimension
+
\def\syst_helpers_if_dimension_else#1%
{\ifx#1\relax
\expandafter\secondoftwoarguments
@@ -6467,7 +6595,9 @@
\installsystemnamespace{dimencheckb}
\installsystemnamespace{dimencheckc}
-\def\doifdimenstringelse#1{\normalexpanded{\noexpand\dodimenteststageone#1}\empty\empty]}
+\def\doifelsedimenstring#1{\normalexpanded{\noexpand\dodimenteststageone#1}\empty\empty]}
+
+\let\doifdimenstringelse\doifelsedimenstring
\def\dodimenteststageone #1#2{\csname \??dimenchecka\ifcsname \??dimenchecka#2\endcsname#2\else x\fi\endcsname#2}
\def\dodimenteststagetwo #1#2{\csname \??dimencheckb\ifcsname \??dimencheckb#2\endcsname#2\else x\fi\endcsname#2}
@@ -6590,10 +6720,12 @@
% sometimes handy:
-\unexpanded\def\doifhasspaceelse#1%
+\unexpanded\def\doifelsehasspace#1%
{\edef\m_syst_string_one{#1}%
\normalexpanded{\syst_helpers_if_has_space_else#1\space}\empty\relax}
+\let\doifhasspaceelse\doifelsehasspace
+
\unexpanded\def\syst_helpers_if_has_space_else#1 #2#3\relax % \space\empty\relax
{\ifx\m_syst_string_one\space
\expandafter\firstoftwoarguments
@@ -6602,7 +6734,7 @@
\else
\doubleexpandafter\firstoftwoarguments
\fi\fi}
-
+
% this will replace loadfile once and alike !!! todo
\installsystemnamespace{flag}
@@ -6621,6 +6753,8 @@
\doubleexpandafter\secondoftwoarguments
\fi\fi}
+\let\doifflaggedelse\doifelseflagged
+
\def\doifnotflagged#1%
{\expandafter\ifx\csname\??flag#1\endcsname\relax
\expandafter\firstofoneargument
@@ -6629,10 +6763,10 @@
\else
\doubleexpandafter\firstofoneargument
\fi\fi}
-
+
\unexpanded\def\inheritparameter[#1]#2[#3]#4[#5]% tag tokey fromkey
{\expandafter\def\csname#1#3\expandafter\endcsname\expandafter{\csname#1#5\endcsname}}
-
+
\def\syst_helpers_if_non_zero_positive_else#1#2\end % #3#4%
{\ifx#1\relax
\ifcase\scratchcounter
@@ -6647,9 +6781,11 @@
\expandafter\secondoftwoarguments
\fi}
-\def\doifnonzeropositiveelse#1%
+\def\doifelsenonzeropositive#1%
{\begingroup\afterassignment\syst_helpers_if_non_zero_positive_else\scratchcounter=0#1\relax\empty\end}
-
+
+\let\doifnonzeropositiveelse\doifelsenonzeropositive
+
% here ?
\unexpanded\def\dosetrawvalue #1#2#3{\expandafter \def\csname#1#2\endcsname{#3}}
@@ -6693,8 +6829,8 @@
% \fi
% \def\elapsedseconds{\expandafter\withoutpt\the\dimexpr\elapsedtime sp\relax}
-\def\resettimer {\ctxcommand{resettimer()}}
-\def\elapsedtime {\ctxcommand{elapsedtime()}}
+\let\resettimer \clf_resettimer
+\let\elapsedtime \clf_elapsedtime
\let\elapsedseconds \elapsedtime
\newcount\c_syst_helpers_test_feature_n
@@ -6808,7 +6944,7 @@
%D \typebuffer \getbuffer
\unexpanded\def\ignoreimplicitspaces
- {\doifnextcharelse\relax\relax\relax}
+ {\doifelsenextchar\relax\relax\relax}
%D \macros
%D {processwords}
@@ -6871,7 +7007,7 @@
\def\syst_helpers_direct_double_empty_one_nop#1{#1[][]}
\def\syst_helpers_direct_double_empty_two_nop#1[#2]{#1[#2][]}
-%D Used in math definitions (in an type {\edef}):
+%D Used in math definitions (in an \type {\edef}):
%D \startbuffer
%D [\docheckedpair{}]
@@ -6985,8 +7121,80 @@
% expandable:
-%def\getsubstring#1#2#3{\cldcontext{utf.sub([[#3]],tonumber("#1"),tonumber("#2"))}}
-\def\getsubstring#1#2#3{\ctxcommand{getsubstring(\!!bs#3\!!es,"#1","#2")}}
+\def\getsubstring#1#2#3{\clf_getsubstring{#3}{#1}{#2}}
+
+%D Other dimensions than pt (used in mb-mp)
+
+\def\converteddimen#1#2{\clf_converteddimen\dimexpr#1\relax{#2}}
+
+%D Maybe (looks ugly):
+%D
+%D \starttyping
+%D \doifcase {foo}
+%D {bar} {BAR}
+%D {foo} {FOO}
+%D {default} {DEFAULT}
+%D
+%D \doifcase {foo}
+%D {bar} {BAR}
+%D {foo} {\doifcase {bar}
+%D {bar} {BAR}
+%D {foo} {FOO}
+%D {default} {DEFAULT}
+%D }
+%D {default} {DEFAULT}
+%D \stoptyping
+
+% \doifcase {\btxfoundname{author}}
+% {author} {\btxflush{author}}
+% {editor} {\texdefinition{btx:apa:editor-or-editors}}
+% {title} {\texdefinition{btx:apa:title-subtitle-type}}
+% {default} {\btxflush{author}}
+
+% \unexpanded\def\doifcase#1%
+% {\edef\m_case_asked{#1}%
+% \syst_aux_case}
+%
+% \def\syst_aux_case#1%
+% {\edef\m_case_temp{#1}%
+% \ifx\m_case_temp\m_case_asked
+% \expandafter\syst_aux_case_yes
+% \else\ifx\m_case_temp\s!default
+% \doubleexpandafter\firstofoneargument
+% \else
+% \doubleexpandafter\syst_aux_case_nop
+% \fi\fi}
+%
+% \def\syst_aux_skip#1#2%
+% {\edef\m_case_temp{#1}%
+% \ifx\m_case_temp\s!default
+% \expandafter\syst_aux_done
+% \else
+% \expandafter\syst_aux_skip
+% \fi}
+%
+% \def\syst_aux_case_yes#1%
+% {\def\syst_aux_done{#1}%
+% \syst_aux_skip}
+%
+% \def\syst_aux_case_nop#1%
+% {\syst_aux_case}
+
+%D \macros
+%D {ntimes}
+%D
+%D some repetition:
+%D
+%D \startbuffer
+%D \ntimes{*}{20}
+%D \stopbuffer
+%D
+%D \typebuffer \blank gives: \getbuffer \blank
+%D
+%D This is not real fast but quite okay:
+
+%def\ntimes#1#2{\ifnum#2>\zerocount#1\ntimes{#1}{\numexpr#2-\plusone\relax}\fi} % 1.72
+\def\ntimes#1#2{\clf_ntimes{#1}\numexpr#2\relax} % 0.33
\protect \endinput
diff --git a/tex/context/base/syst-con.lua b/tex/context/base/syst-con.lua
index 48f02da3a..0fa685b2d 100644
--- a/tex/context/base/syst-con.lua
+++ b/tex/context/base/syst-con.lua
@@ -6,57 +6,63 @@ if not modules then modules = { } end modules ['syst-con'] = {
license = "see context related readme files"
}
-converters = converters or { }
+local tonumber = tonumber
+local math = math
+local utfchar = utf.char
+local gsub = string.gsub
+
+converters = converters or { }
+local converters = converters
+
+local context = context
+local commands = commands
+local implement = interfaces.implement
+
+local formatters = string.formatters
--[[ldx--
For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
the top of 's char range but outside the unicode range.
--ldx]]--
-local tonumber = tonumber
-local utfchar = utf.char
-local gsub, format = string.gsub, string.format
+function converters.hexstringtonumber(n) tonumber(n,16) end
+function converters.octstringtonumber(n) tonumber(n, 8) end
-function converters.hexstringtonumber(n) tonumber(n,16) end
-function converters.octstringtonumber(n) tonumber(n, 8) end
function converters.rawcharacter (n) utfchar(0x110000+n) end
-function converters.lchexnumber (n) format("%x" ,n) end
-function converters.uchexnumber (n) format("%X" ,n) end
-function converters.lchexnumbers (n) format("%02x",n) end
-function converters.uchexnumbers (n) format("%02X",n) end
-function converters.octnumber (n) format("%03o",n) end
-
-function commands.hexstringtonumber(n) context(tonumber(n,16)) end
-function commands.octstringtonumber(n) context(tonumber(n, 8)) end
-function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
-function commands.lchexnumber (n) context("%x" ,n) end
-function commands.uchexnumber (n) context("%X" ,n) end
-function commands.lchexnumbers (n) context("%02x",n) end
-function commands.uchexnumbers (n) context("%02X",n) end
-function commands.octnumber (n) context("%03o",n) end
-
-function commands.format(fmt,...) -- used ?
- fmt = gsub(fmt,"@","%%")
- context(fmt,...)
-end
-
-local cosd, sind, tand = math.cosd, math.sind, math.tand
-local cos, sin, tan = math.cos, math.sin, math.tan
-
--- unfortunately %s spits out: 6.1230317691119e-017
---
--- function commands.sind(n) context(sind(n)) end
--- function commands.cosd(n) context(cosd(n)) end
--- function commands.tand(n) context(tand(n)) end
---
--- function commands.sin (n) context(sin (n)) end
--- function commands.cos (n) context(cos (n)) end
--- function commands.tan (n) context(tan (n)) end
-
-function commands.sind(n) context("%0.6f",sind(n)) end
-function commands.cosd(n) context("%0.6f",cosd(n)) end
-function commands.tand(n) context("%0.6f",tand(n)) end
-
-function commands.sin (n) context("%0.6f",sin (n)) end
-function commands.cos (n) context("%0.6f",cos (n)) end
-function commands.tan (n) context("%0.6f",tan (n)) end
+
+local lchexnumber = formatters["%x"]
+local uchexnumber = formatters["%X"]
+local lchexnumbers = formatters["%02x"]
+local uchexnumbers = formatters["%02X"]
+local octnumber = formatters["%03o"]
+local nicenumber = formatters["%0.6F"]
+
+converters.lchexnumber = lchexnumber
+converters.uchexnumber = uchexnumber
+converters.lchexnumbers = lchexnumbers
+converters.uchexnumbers = uchexnumbers
+converters.octnumber = octnumber
+converters.nicenumber = nicenumber
+
+implement { name = "hexstringtonumber", actions = { tonumber, context }, arguments = { "integer", 16 } }
+implement { name = "octstringtonumber", actions = { tonumber, context }, arguments = { "integer", 8 } }
+
+implement { name = "rawcharacter", actions = function(n) context(utfchar(0x110000+n)) end, arguments = "integer" }
+
+implement { name = "lchexnumber", actions = { lchexnumber, context }, arguments = "integer" }
+implement { name = "uchexnumber", actions = { uchexnumber, context }, arguments = "integer" }
+implement { name = "lchexnumbers", actions = { lchexnumbers, context }, arguments = "integer" }
+implement { name = "uchexnumbers", actions = { uchexnumbers, context }, arguments = "integer" }
+implement { name = "octnumber", actions = { octnumber, context }, arguments = "integer" }
+
+implement { name = "sin", actions = { math.sin, nicenumber, context }, arguments = "number" }
+implement { name = "cos", actions = { math.cos, nicenumber, context }, arguments = "number" }
+implement { name = "tan", actions = { math.tan, nicenumber, context }, arguments = "number" }
+
+implement { name = "sind", actions = { math.sind, nicenumber, context }, arguments = "number" }
+implement { name = "cosd", actions = { math.cosd, nicenumber, context }, arguments = "number" }
+implement { name = "tand", actions = { math.tand, nicenumber, context }, arguments = "number" }
+
+-- only as commands
+
+function commands.format(fmt,...) context((gsub(fmt,"@","%%")),...) end
diff --git a/tex/context/base/syst-con.mkiv b/tex/context/base/syst-con.mkiv
index de8ed597e..17c407819 100644
--- a/tex/context/base/syst-con.mkiv
+++ b/tex/context/base/syst-con.mkiv
@@ -46,10 +46,10 @@
%D [\expandafter\uchexnumber\expandafter{\the\zerocount}]
%D \stoptyping
-\def\lchexnumber #1{\ctxcommand{lchexnumber(\number#1)}}
-\def\uchexnumber #1{\ctxcommand{uchexnumber(\number#1)}}
-\def\lchexnumbers#1{\ctxcommand{lchexnumbers(\number#1)}}
-\def\uchexnumbers#1{\ctxcommand{uchexnumbers(\number#1)}}
+\def\lchexnumber #1{\clf_lchexnumber \numexpr#1\relax}
+\def\uchexnumber #1{\clf_uchexnumber \numexpr#1\relax}
+\def\lchexnumbers#1{\clf_lchexnumbers\numexpr#1\relax}
+\def\uchexnumbers#1{\clf_uchexnumbers\numexpr#1\relax}
\let\hexnumber\uchexnumber
@@ -58,7 +58,7 @@
%D
%D For unicode remapping purposes, we need octal numbers.
-\def\octnumber#1{\ctxcommand{octnumber(\number#1)}}
+\def\octnumber#1{\clf_octnumber\numexpr#1\relax}
%D \macros
%D {hexstringtonumber,octstringtonumber}
@@ -67,8 +67,8 @@
%D a decimal number, thereby taking care of lowercase characters
%D as well.
-\def\hexstringtonumber#1{\ctxcommand{hexstringtonumber("#1")}}
-\def\octstringtonumber#1{\ctxcommand{octstringtonumber("#1")}}
+\def\hexstringtonumber#1{\clf_hexstringtonumber\numexpr#1\relax}
+\def\octstringtonumber#1{\clf_octstringtonumber\numexpr#1\relax}
%D \macros
%D {rawcharacter}
@@ -76,7 +76,7 @@
%D This macro can be used to produce proper 8 bit characters
%D that we sometimes need in backends and round||trips.
-\def\rawcharacter#1{\ctxcommand{rawcharacter(\number#1)}}
+\def\rawcharacter#1{\clf_rawcharacter\numexpr#1\relax}
%D \macros
%D {twodigits, threedigits}
@@ -126,23 +126,12 @@
%D \macros
%D {setcalculatedsin,setcalculatedcos,setcalculatedtan}
-%D
-%D This saves some 2K in the format. At some point we will redo the
-%D code that calls this. Beware: in \MKII\ this is a separate module.
-
-% \let\calculatesin\gobbleoneargument
-% \let\calculatecos\gobbleoneargument
-% \let\calculatetan\gobbleoneargument
-
-% \def\setcalculatedsin#1#2{\edef#1{\cldcontext{math.sind(#2)}}} % jit-unsafe
-% \def\setcalculatedcos#1#2{\edef#1{\cldcontext{math.cosd(#2)}}} % jit-unsafe
-% \def\setcalculatedtan#1#2{\edef#1{\cldcontext{math.tand(#2)}}} % jit-unsafe
-\def\setcalculatedsin#1#2{\edef#1{\ctxcommand{sind(#2)}}}
-\def\setcalculatedcos#1#2{\edef#1{\ctxcommand{cosd(#2)}}}
-\def\setcalculatedtan#1#2{\edef#1{\ctxcommand{tand(#2)}}}
+\def\setcalculatedsin#1#2{\edef#1{\clf_sind#2}}
+\def\setcalculatedcos#1#2{\edef#1{\clf_cosd#2}}
+\def\setcalculatedtan#1#2{\edef#1{\clf_tand#2}}
- \def\formatted#1{\ctxcommand{format(#1)}}
-\unexpanded\def\format #1{\ctxcommand{format(#1)}}
+ \def\formatted#1{\ctxcommand{format(#1)}} % not clf
+\unexpanded\def\format #1{\ctxcommand{format(#1)}} % not clf
\protect \endinput
diff --git a/tex/context/base/syst-ini.mkiv b/tex/context/base/syst-ini.mkiv
index ab1c53131..8631aed90 100644
--- a/tex/context/base/syst-ini.mkiv
+++ b/tex/context/base/syst-ini.mkiv
@@ -114,11 +114,6 @@
end
}
-%D Handy.
-
-\suppresslongerror = 1
-\suppressoutererror = 1
-
%D \ETEX\ has a not so handy way of telling you the version number, i.e. the revision
%D number has a period in it:
@@ -198,7 +193,7 @@
\countdef \c_syst_last_allocated_language = 41 \c_syst_last_allocated_language = \c_syst_min_allocated_language % not used in context
\countdef \c_syst_last_allocated_insertion = 42 \c_syst_last_allocated_insertion = \c_syst_min_allocated_insert
\countdef \c_syst_last_allocated_family = 43 \c_syst_last_allocated_family = \c_syst_min_allocated_family % not used in context
-\countdef \c_syst_last_allocated_attribute = 44 \c_syst_last_allocated_attribute = \c_syst_min_allocated_attribute
+\countdef \c_syst_last_allocated_attribute = 44 \c_syst_last_allocated_attribute = \c_syst_min_allocated_attribute % not used in context
\countdef \c_syst_min_counter_value = 125 \c_syst_min_counter_value = -"7FFFFFFF % beware, we use index 125 at the lua end
\countdef \c_syst_max_counter_value = 126 \c_syst_max_counter_value = "7FFFFFFF % beware, we use index 126 at the lua end
@@ -246,9 +241,10 @@
% Watch out, for the moment we disable the check for already being defined
% later we will revert this but first all chardefs must be replaced.
-\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
-\normalprotected\def\setnewconstant#1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
-\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\newconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1}
+\normalprotected\def\setnewconstant #1{\ifdefined#1\let#1\undefined\fi\newcount#1#1} % just a number
+\normalprotected\def\setconstant {} % dummy, no checking, so it warns
+\normalprotected\def\setconstantvalue#1#2{\csname#1\endcsname\numexpr#2\relax}
% maybe setconstant with check
@@ -300,7 +296,7 @@
%D 128-1023 are private and should not be touched.
\let\attributeunsetvalue\c_syst_min_counter_value % used to be \minusone
-\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_min_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
+\normalprotected\def\newattribute{\syst_basics_allocate\c_syst_last_allocated_attribute\attribute\attributedef\c_syst_max_allocated_register}
%D Not used by \CONTEXT\ but for instance \PICTEX\ needs it. It's a trick to force
%D strings instead of tokens that take more memory. It's a trick to trick to force
@@ -326,8 +322,8 @@
\newcount\scratchcounterone \newcount\scratchcountertwo \newcount\scratchcounterthree
\newdimen \scratchdimenone \newdimen \scratchdimentwo \newdimen \scratchdimenthree
-\newdimen \scratchskipone \newdimen \scratchskiptwo \newdimen \scratchskipthree
-\newbox \scratchmuskipone \newbox \scratchmuskiptwo \newbox \scratchmuskipthree
+\newskip \scratchskipone \newskip \scratchskiptwo \newskip \scratchskipthree
+\newmuskip\scratchmuskipone \newmuskip\scratchmuskiptwo \newmuskip\scratchmuskipthree
\newtoks \scratchtoksone \newtoks \scratchtokstwo \newtoks \scratchtoksthree
\newbox \scratchboxone \newbox \scratchboxtwo \newbox \scratchboxthree
@@ -385,6 +381,8 @@
\newmuskip\zeromuskip \zeromuskip = 0mu
\newmuskip\onemuskip \onemuskip = 1mu
+\newmuskip\muquad \muquad = 18mu
+
\let\points \onepoint
\let\halfpoint\halfapoint
@@ -416,6 +414,7 @@
\chardef \plustwentythousand = 20000
\chardef \medcard = 32768
\chardef \maxcard = 65536 % pdftex has less mathchars
+\chardef \maxcardminusone = 65535
%D \macros
%D {doubleexpandafter,tripleexpandafter,expanded,startexpanded}
@@ -630,6 +629,8 @@
%D
%D In \LUATEX\ we have ways around this.
+% no longer \errorstopmode cf. plain tex 3.141592653
+
\normalprotected\def\tracingall
{\tracingonline \plusone
\tracingcommands \plusthree
@@ -646,8 +647,7 @@
\tracingifs \plusone
\tracingscantokens\plusone
\tracingnesting \plusone
- \tracingassigns \plustwo
- \errorstopmode}
+ \tracingassigns \plustwo}
\normalprotected\def\loggingall
{\tracingall
@@ -744,6 +744,9 @@
\normalprotected\def\settrue #1{\let#1\conditionaltrue }
\normalprotected\def\setfalse#1{\let#1\conditionalfalse}
+\normalprotected\def\settruevalue #1{\expandafter\let\csname#1\endcsname\conditionaltrue }
+\normalprotected\def\setfalsevalue#1{\expandafter\let\csname#1\endcsname\conditionalfalse}
+
\let\newconditional\setfalse
\let\ifconditional \ifcase
@@ -762,8 +765,8 @@
\normalprotected\def\newfraction#1{\let#1\!!plusone}
-%D It would be handy to have a primitive \unless\ifcase because then we could
-%D use nicer values. Anyhow, this conditional code used to be in the \type
+%D It would be handy to have a primitive \type {\unless\ifcase} because then we
+%D could use nicer values. Anyhow, this conditional code used to be in the \type
%D {syst-aux} module but is now promoted to here.
%D \macros
@@ -1012,12 +1015,18 @@
% \bindprimitive ifabsnum ifpdfabsnum
%D We need to make sure that we start up in \DVI\ mode, so, after testing for running
-%D \PDFTEX, we default to \DVI.
-
-\pdfoutput \zerocount
-\pdfminorversion \plusfive
-\pdfgentounicode \plusone
-\pdfinclusioncopyfonts\plusone
+%D \PDFTEX, we default to \DVI. Why?
+
+\pdfoutput \zerocount
+\pdfminorversion \plusseven
+\pdfgentounicode \plusone
+\pdfinclusioncopyfonts \plusone
+\pdfinclusionerrorlevel \zerocount
+\pdfdecimaldigits \plusten
+\pdfhorigin 1 true in
+\pdfvorigin \pdfhorigin
+\pdfimageresolution 300
+\pdfpkresolution 600
\normalprotected\def\nopdfcompression {\pdfobjcompresslevel\zerocount \pdfcompresslevel\zerocount}
\normalprotected\def\maximumpdfcompression{\pdfobjcompresslevel\plusnine \pdfcompresslevel\plusnine }
@@ -1080,6 +1089,12 @@
\voffset\zeropoint \let\voffset\relax \newdimen\voffset % prevent messing up
\hoffset\zeropoint \let\hoffset\relax \newdimen\hoffset % prevent messing up
+%D Handy.
+
+\suppresslongerror \plusone
+\suppressoutererror \plusone
+\suppressmathparerror \plusone
+
%D While cleaning this code up a bit I was listening to Heather Nova's \CD\ Redbird.
%D The first song on that \CD\ ends with a few lines suitable for ending this
%D initialization module:
@@ -1097,4 +1112,21 @@
%D
%D So let's see what \TEX\ can do now that we've opened up the basic machinery.
+%D Now we define a few helpers that we need in a very early stage. We hav eno message system
+%D yet but redundant definitions are fatal anyway.
+
+\newcount\c_syst_helpers_n_of_namespaces \c_syst_helpers_n_of_namespaces\pluseight % 1-8 reserved for catcodes
+
+\def\v_interfaces_prefix_template_system{\number \c_syst_helpers_n_of_namespaces>>}
+%def\v_interfaces_prefix_template_system{\characters\c_syst_helpers_n_of_namespaces>>} % no \characters yet
+
+\normalprotected\def\installsystemnamespace#1%
+ {\ifcsname ??#1\endcsname
+ \immediate\write16{fatal error: duplicate system namespace '#1'}%
+ \expandafter\normalend
+ \else
+ \global\advance\c_syst_helpers_n_of_namespaces\plusone
+ \expandafter\edef\csname ??#1\endcsname{\v_interfaces_prefix_template_system}%
+ \fi}
+
\protect \endinput
diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua
index e47041444..a6665f410 100644
--- a/tex/context/base/syst-lua.lua
+++ b/tex/context/base/syst-lua.lua
@@ -6,48 +6,96 @@ if not modules then modules = { } end modules ['syst-lua'] = {
license = "see context related readme files"
}
-local format, find, match, rep = string.format, string.find, string.match, string.rep
+local find, match = string.find, string.match
local tonumber = tonumber
-local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
+local S, C, P, lpegmatch, lpegtsplitat = lpeg.S, lpeg.C, lpeg.P, lpeg.match, lpeg.tsplitat
-commands = commands or { }
-local commands = commands
-local context = context
+commands = commands or { }
+local commands = commands
-function commands.writestatus(...) logs.status(...) end -- overloaded later
+local implement = interfaces.implement
-local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
-local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
-local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
-local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+local two_strings = interfaces.strings[2]
--- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
--- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
--- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
--- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
+local context = context
+local csprint = context.sprint
+
+local prtcatcodes = tex.prtcatcodes
+
+implement { -- will b eoverloaded later
+ name = "writestatus",
+ arguments = two_strings,
+ actions = logs.status,
+}
+
+local ctx_firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
+local ctx_secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
+local ctx_firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
+local ctx_gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+
+-- contextsprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+-- contextsprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+-- contextsprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
function commands.doifelse(b)
if b then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
+ end
+end
+
+function commands.doifelsesomething(b)
+ if b and b ~= "" then
+ ctx_firstoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_ft]]) -- ctx_firstoftwoarguments
+ else
+ ctx_secondoftwoarguments()
+-- csprint(prtcatcodes,[[\ui_st]]) -- ctx_secondoftwoarguments
end
end
function commands.doif(b)
if b then
- firstofoneargument()
+ ctx_firstofoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+ else
+ ctx_gobbleoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+ end
+end
+
+function commands.doifsomething(b)
+ if b and b ~= "" then
+ ctx_firstofoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
else
- gobbleoneargument()
+ ctx_gobbleoneargument()
+-- context.__flushdirect(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
end
end
function commands.doifnot(b)
if b then
- gobbleoneargument()
+ ctx_gobbleoneargument()
+-- csprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
+ else
+ ctx_firstofoneargument()
+-- csprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
+ end
+end
+
+function commands.doifnotthing(b)
+ if b and b ~= "" then
+ ctx_gobbleoneargument()
+-- csprint(prtcatcodes,[[\ui_go]]) -- ctx_gobbleonearguments
else
- firstofoneargument()
+ ctx_firstofoneargument()
+-- csprint(prtcatcodes,[[\ui_fo]]) -- ctx_firstofonearguments
end
end
@@ -59,16 +107,16 @@ end
function commands.doifelsespaces(str)
if find(str,"^ +$") then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
local s = lpegtsplitat(",")
local h = { }
-function commands.doifcommonelse(a,b) -- often the same test
+local function doifelsecommon(a,b) -- often the same test
local ha = h[a]
local hb = h[b]
if not ha then
@@ -84,41 +132,112 @@ function commands.doifcommonelse(a,b) -- often the same test
for i=1,na do
for j=1,nb do
if ha[i] == hb[j] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
-function commands.doifinsetelse(a,b)
+local function doifelseinset(a,b)
local hb = h[b]
if not hb then hb = lpegmatch(s,b) h[b] = hb end
for i=1,#hb do
if a == hb[i] then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
return
end
end
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
+implement {
+ name = "doifelsecommon",
+ arguments = two_strings,
+ actions = doifelsecommon
+}
+
+implement {
+ name = "doifelseinset",
+ arguments = two_strings,
+ actions = doifelseinset
+}
+
local pattern = lpeg.patterns.validdimen
-function commands.doifdimenstringelse(str)
+function commands.doifelsedimenstring(str)
if lpegmatch(pattern,str) then
- firstoftwoarguments()
+ ctx_firstoftwoarguments()
else
- secondoftwoarguments()
+ ctx_secondoftwoarguments()
end
end
-function commands.firstinset(str)
- local first = match(str,"^([^,]+),")
- context(first or str)
-end
+local p_first = C((1-P(",")-P(-1))^0)
-function commands.ntimes(str,n)
- context(rep(str,n or 1))
-end
+implement {
+ name = "firstinset",
+ arguments = "string",
+ actions = function(str) context(lpegmatch(p_first,str or "")) end
+}
+
+implement {
+ name = "ntimes",
+ arguments = { "string", "integer" },
+ actions = { string.rep, context }
+}
+
+implement {
+ name = "execute",
+ arguments = "string",
+ actions = os.execute -- wrapped in sandbox
+}
+
+-- function commands.write(n,str)
+-- if n == 18 then
+-- os.execute(str)
+-- elseif n == 16 then
+-- -- immediate
+-- logs.report(str)
+-- else
+-- -- at the tex end we can still drop the write / also delayed vs immediate
+-- context.writeviatex(n,str)
+-- end
+-- end
+
+implement {
+ name = "doifelsesame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_firstoftwoarguments()
+ else
+ ctx_secondoftwoarguments()
+ end
+ end
+}
+
+implement {
+ name = "doifsame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_firstofoneargument()
+ else
+ ctx_gobbleoneargument()
+ end
+ end
+}
+
+implement {
+ name = "doifnotsame",
+ arguments = two_strings,
+ actions = function(a,b)
+ if a == b then
+ ctx_gobbleoneargument()
+ else
+ ctx_firstofoneargument()
+ end
+ end
+}
diff --git a/tex/context/base/syst-lua.mkiv b/tex/context/base/syst-lua.mkiv
index 88a8c246e..5e82a9ea9 100644
--- a/tex/context/base/syst-lua.mkiv
+++ b/tex/context/base/syst-lua.mkiv
@@ -15,15 +15,18 @@
\unprotect
-\def\expdoifelse#1#2{\ctxcommand{doifelse(\!!bs#1\!!es==\!!bs#2\!!es)}}
-\def\expdoif #1#2{\ctxcommand{doif (\!!bs#1\!!es==\!!bs#2\!!es)}}
-\def\expdoifnot #1#2{\ctxcommand{doifnot (\!!bs#1\!!es==\!!bs#2\!!es)}}
+\def\expdoifelse#1#2{\clf_doifelsesame{#1}{#2}}
+\def\expdoif #1#2{\clf_doifsame {#1}{#2}}
+\def\expdoifnot #1#2{\clf_doifnotsame {#1}{#2}}
% \testfeatureonce{100000}{\doifelse{hello world}{here i am}{}} % 0.3
% \testfeatureonce{100000}{\expandabledoifelse{hello world}{here i am}{}} % 1.5
-\def\expdoifcommonelse#1#2{\ctxcommand{doifcommonelse("#1","#2")}}
-\def\expdoifinsetelse #1#2{\ctxcommand{doifinsetelse("#1","#2")}}
+\def\expdoifelsecommon#1#2{\clf_doifelsecommon{#1}{#2}}
+\def\expdoifelseinset #1#2{\clf_doifelseinset {#1}{#2}}
+
+\def\expdoifcommonelse\expdoifelsecommon
+\def\expdoifinsetelse \expdoifelseinset
% we define these here, just in case ...
@@ -32,7 +35,7 @@
\edef\!!bs{[\luastringsep[}
\edef\!!es{]\luastringsep]}
-\unexpanded\def\writestatus#1#2{\ctxcommand{writestatus(\!!bs#1\!!es,\!!bs#2\!!es)}}
+\unexpanded\def\writestatus#1#2{\clf_writestatus{#1}{#2}}
% A handy helper:
@@ -41,7 +44,7 @@
% We can omit the tex.ctxcatcodes here as nowadays we seldom
% change the regime at the \TEX\ end:
-\def\luaexpr#1{\directlua{tex.print(tostring(#1))}}
+\def\luaexpr#1{\ctxlua{tex.print(tostring(#1))}} % no use is shortcutting has to be compiled
% helpers:
@@ -50,4 +53,49 @@
\def\ui_ft#1#2{#1}
\def\ui_st#1#2{#2}
+%D Let's bring this under \LUA\ (and therefore \MKIV\ sandbox) control:
+
+% \setnewconstant\c_syst_write 18
+%
+% \unexpanded\def\write#1#% so we can handle \immediate
+% {\ifnum#1=\c_syst_write
+% \expandafter\syst_execute
+% \else
+% \normalwrite#1%
+% \fi}
+%
+% \unexpanded\def\syst_execute#1%
+% {\ctxlua{os.execute(\!!bs#1\!!es)}}
+
+%D But as we only use write 16 we could as well do all in \LUA\
+%D and ignore the rest. Okay, we still can do writes here but only
+%D when not blocked.
+
+% Nicer would be if we could just disable write 18 and keep os.execute
+% which in fact we can do by defining write18 as macro instead of
+% primitive ... todo.
+
+% \unexpanded\def\write#1#%
+% {\syst_write{#1}}
+%
+% \def\syst_write#1#2%
+% {\ctxcommand{write(\number#1,\!!bs\normalunexpanded{#2}\!!es)}}
+%
+% \unexpanded\def\writeviatex#1#2%
+% {\ifx\normalwrite\relax\else
+% \normalwrite#1{#2}%
+% \fi}
+
+% we have no way yet to pickup \immediate unless we redefine it
+% we assume immediate execution
+
+\let\syst_write_execute\clf_execute % always {...}
+
+\unexpanded\def\write#1#%
+ {\ifnum#1=18
+ \expandafter\syst_write_execute
+ \else
+ \normalwrite#1%
+ \fi}
+
\protect \endinput
diff --git a/tex/context/base/syst-rtp.mkiv b/tex/context/base/syst-rtp.mkiv
index f65e599ec..82c0778b4 100644
--- a/tex/context/base/syst-rtp.mkiv
+++ b/tex/context/base/syst-rtp.mkiv
@@ -13,6 +13,6 @@
\unprotect
-\def\executesystemcommand#1{\ctxlua{os.execute([[#1]])}}
+\let\executesystemcommand\clf_execute
\protect \endinput
diff --git a/tex/context/base/tabl-ltb.mkiv b/tex/context/base/tabl-ltb.mkiv
index f7fbc0390..4c892138e 100644
--- a/tex/context/base/tabl-ltb.mkiv
+++ b/tex/context/base/tabl-ltb.mkiv
@@ -138,6 +138,8 @@
\expandafter\secondoftwoarguments
\fi}
+\let\doiflinetablecparameterelse\doifelselinetablecparameter
+
\def\linetablecparameter#1%
{\csname
\ifcsname\??lec\number\linetablecolumn#1\endcsname
@@ -614,7 +616,7 @@
{\global\linetablepage\zerocount
\ifcase\linetablerepeat \else \ifcase\linetablepage
\doif{\linetableparameter\c!repeat}\v!no
- {\global\linetablepage\doifoddpageelse\plusone\plustwo}%
+ {\global\linetablepage\doifelseoddpage\plusone\plustwo}%
\fi \fi}
\def\flushlinetablehead
diff --git a/tex/context/base/tabl-ntb.mkii b/tex/context/base/tabl-ntb.mkii
index 465ed44b2..fbf43eb1e 100644
--- a/tex/context/base/tabl-ntb.mkii
+++ b/tex/context/base/tabl-ntb.mkii
@@ -423,7 +423,9 @@
\ifcsname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\csname\@@tblprefix\c!x\positivecol\c!y\positiverow\endcsname\fi
\ifcsname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\csname\@@tblprefix\c!x\negativecol\c!y\negativerow\endcsname\fi
% done
- \global\letcscsname\@@tblsplitafter\csname\@@tbl\@@tbl\c!after\endcsname
+ \global\letcscsname\@@tblsplitafter \csname\@@tbl\@@tbl\c!after \endcsname
+ \global\letcscsname\@@tblsplitbefore \csname\@@tbl\@@tbl\c!before \endcsname
+ \global\letcscsname\@@tblsplitsamepage\csname\@@tbl\@@tbl\c!samepage\endcsname
\relax}
% we cannot use +n (checking on number/last/first would slow down too much)
@@ -439,7 +441,9 @@
% \dorecurse{10}{\bTR \dorecurse{6}{\bTD xxx \eTD} \eTR}
% \eTABLE
-\globallet\@@tblsplitafter\relax
+\globallet\@@tblsplitafter \relax
+\globallet\@@tblsplitbefore \relax
+\globallet\@@tblsplitsamepage\relax
% split + page:
%
@@ -851,7 +855,17 @@
{\noalign
{\global\advance\tblrow\plusone
\global\tblcol\zerocount
- \global\tblspn\zerocount}%
+ \global\tblspn\zerocount
+ \bgroup % protect local vars
+ \@@tblsplitbefore
+ \egroup
+ \ifx\@@tblsplitsamepage\v!before
+ \unpenalty
+ \nobreak
+ \else\ifx\@@tblsplitsamepage\v!both
+ \unpenalty
+ \nobreak
+ \fi\fi}%
\nexttblcol
\kern\dimexpr\tbltblleftmargindistance-\tbltblcolumndistance\relax}
@@ -862,7 +876,14 @@
{\nointerlineskip
\ifnum\tblrow>\noftblheadlines
\ifnum\gettblnob\tblrow=\zerocount
- \allowbreak
+ \unpenalty
+ \ifx\@@tblsplitsamepage\v!after
+ \nobreak
+ \else\ifx\@@tblsplitsamepage\v!both
+ \nobreak
+ \else
+ \allowbreak
+ \fi\fi
\fi
\else
\allowbreak % else no proper head split off
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index a1ae94712..fcf8ac312 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module as a more modern variant in xtables but as we follow a bit
+%D This module has a more modern variant in xtables but as we follow a bit
%D different approach with settings there, this mechanism will stay. In fact
%D each of them has its advantages. This module could be sped up a bit and made
%D more efficient by delegating some housekeeping to \LUA\ but it's not worth
@@ -19,8 +19,14 @@
%D real purpose in it. If needed I can squeeze out a few more percentages
%D runtime.
+% columndistance 'optimized' ... needs checking
+%
+% we don't need the alignment mechanism .. we can just pack the row in a box
+
\writestatus{loading}{ConTeXt Table Macros / Natural Tables}
+% sometimes this helps (with nc going wild): \setupTABLE[maxwidth=100cm]
+%
% bug: width 3cm is not honored and column becomes too wide
% as given width is added to distributed width
%
@@ -98,7 +104,7 @@
\to \t_tabl_ntb_cell_start
\unexpanded\def\tabl_ntb_cell_start
- {\inhibitblank
+ {% \inhibitblank
\dotagTABLEcell
%\tabl_ntb_next_level
\usenaturaltablelocalstyleandcolor\c!style\c!color
@@ -145,6 +151,9 @@
\newcount\c_tabl_ntb_maximum_row_span
\newcount\c_tabl_ntb_maximum_col_span
+\newcount\c_tabl_ntb_encountered_col
+\newcount\c_tabl_ntb_encountered_max
+
\newtoks\t_tabl_ntb
\newtoks\t_tabl_ntb_row
@@ -162,6 +171,11 @@
\newdimen\d_tabl_ntb_height
\newdimen\d_tabl_ntb_width
+\newdimen\d_tabl_ntb_leftmargindistance
+\newdimen\d_tabl_ntb_rightmargindistance
+\newdimen\d_tabl_ntb_columndistance
+\newdimen\d_tabl_ntb_maxwidth
+
\newtoks\everyTABLEpass % public
\newcount\tablecellrows % public (needs checking)
@@ -208,6 +222,8 @@
\installdirectcommandhandler \??naturaltable {naturaltable} % \??naturaltable
\installsimpleframedcommandhandler \??naturaltablelocal {naturaltablelocal} \??naturaltablelocal
+\installcorenamespace{naturaltabletal}
+\installcorenamespace{naturaltablegal}
\installcorenamespace{naturaltablenob}
\installcorenamespace{naturaltabletag}
\installcorenamespace{naturaltablecol}
@@ -219,11 +235,26 @@
\installcorenamespace{naturaltablehei}
\installcorenamespace{naturaltabledis}
\installcorenamespace{naturaltableaut}
+%installcorenamespace{naturaltablefwd} % forcedwidth
\installcorenamespace{naturaltabletxt}
\installcorenamespace{naturaltablespn}
\installcorenamespace{naturaltableref}
\installcorenamespace{naturaltableset}
\installcorenamespace{naturaltablecell}
+\installcorenamespace{naturaltablesqueeze}
+\installcorenamespace{naturaltabletok}
+
+\letvalue{\??naturaltablesqueeze }\donefalse
+\letvalue{\??naturaltablesqueeze\v!fit }\donetrue
+\letvalue{\??naturaltablesqueeze\v!fixed}\donetrue
+\letvalue{\??naturaltablesqueeze\v!broad}\donetrue
+\letvalue{\??naturaltablesqueeze\v!local}\donetrue
+
+\def\tabl_ntb_let_gal{\global\expandafter\let\csname\??naturaltablegal\m_tabl_tbl_level\endcsname}
+\def\tabl_ntb_get_gal{\csname\??naturaltablegal\m_tabl_tbl_level\endcsname}
+
+\def\tabl_ntb_let_tal#1{\global\expandafter\let\csname\??naturaltabletal\m_tabl_tbl_level:\number#1\endcsname}
+\def\tabl_ntb_get_tal#1{\csname\??naturaltabletal\m_tabl_tbl_level:\number#1\endcsname}
\def\tabl_ntb_set_nob#1{\expandafter\let\csname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone}
\def\tabl_ntb_get_nob#1{\ifcsname\??naturaltablenob\m_tabl_tbl_level:\number#1\endcsname\plusone\else\zerocount\fi}
@@ -281,15 +312,22 @@
% \def\tabl_ntb_row_state#1#2{\ifcsname\??naturaltablerow\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
% \def\tabl_ntb_col_state#1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
-\def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
-\def\tabl_ntb_spn_doifelse#1{\doifelse {\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname}\!!plusone}
+%def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
+%def\tabl_ntb_spn_doifelse#1{\doifelse {\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname}\!!plusone}
-\def\tabl_ntb_set_spn #1{\setvalue {\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
-\def\tabl_ntb_spn_doifelse#1{\doifelsevalue{\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
+%def\tabl_ntb_set_spn #1{\setvalue {\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
+%def\tabl_ntb_spn_doifelse#1{\doifelsevalue{\??naturaltablespn\m_tabl_tbl_level:\number#1}{1}}
-\def\tabl_ntb_let_ref#1#2{\expandafter\glet\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
-\def\tabl_ntb_set_ref#1#2{\expandafter\xdef\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
-\def\tabl_ntb_get_ref#1#2{\ifcsname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\fi}
+\def\tabl_ntb_let_ref #1#2{\expandafter\glet\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_set_ref #1#2{\expandafter\xdef\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname}
+\def\tabl_ntb_get_ref #1#2{\ifcsname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\csname\??naturaltableref\m_tabl_tbl_level:\number#1:\number#2\endcsname\fi}
+
+\def\tabl_ntb_set_spn #1{\expandafter\let\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname \!!plusone}
+\def\tabl_ntb_spn_doifelse#1{\ifcase0\csname\??naturaltablespn\m_tabl_tbl_level:\number#1\endcsname\relax % could be inlined
+ \expandafter\secondoftwoarguments % unset
+ \else
+ \expandafter\firstoftwoarguments % a span
+ \fi}
% keep for a while:
%
@@ -317,6 +355,7 @@
\newif\ifenableTBLbreak \enableTBLbreakfalse
\newif\ifmultipleTBLheads \multipleTBLheadsfalse
\newif\iftightTBLrowspan \tightTBLrowspantrue
+\newif\iftightTBLcolspan \tightTBLcolspanfalse
\newif\iftraceTABLE \traceTABLEfalse
@@ -549,7 +588,6 @@
% local
\ifcsname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\csname\m_tabl_ntb_prefix\c!y++\m_tabl_ntb_positive_row\endcsname\fi
% done
- \xdef\m_tabl_ntb_after_split{\naturaltablelocalparameter\c!after}% to be checked
\relax}
% we cannot use +n (checking on number/last/first would slow down too much)
@@ -565,7 +603,9 @@
% \dorecurse{10}{\bTR \dorecurse{6}{\bTD xxx \eTD} \eTR}
% \eTABLE
-\let\m_tabl_ntb_after_split\relax
+\let\m_tabl_ntb_before_split\empty
+\let\m_tabl_ntb_after_split \empty
+\let\m_tabl_ntb_same_page \empty
% split + page:
%
@@ -577,6 +617,7 @@
\unexpanded\def\tabl_ntb_tr
{\c_tabl_ntb_running_col\zerocount
+ \c_tabl_ntb_encountered_col\zerocount
\advance\c_tabl_ntb_maximum_row\plusone
\iffirstargument
\expandafter\tabl_ntb_tr_yes
@@ -591,7 +632,8 @@
\def\m_tabl_ntb_default_nc{\naturaltableparameter\c!nc}
\unexpanded\def\tabl_ntb_td
- {\iffirstargument
+ {\advance\c_tabl_ntb_encountered_col\plusone
+ \iffirstargument
\expandafter\tabl_ntb_td_yes
\else
\expandafter\tabl_ntb_td_nop
@@ -651,7 +693,10 @@
% save text
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
- {#1}{#2}}
+ {#1}{#2}%
+ \ifnum\c_tabl_ntb_encountered_col>\c_tabl_ntb_encountered_max
+ \c_tabl_ntb_encountered_max\c_tabl_ntb_encountered_col
+ \fi}
\def\tabl_ntb_td_nop[#1]#2\eTD
{\global\advance\c_tabl_ntb_spn\plusone\relax
@@ -671,7 +716,10 @@
\tabl_ntb_let_ref\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col\empty
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_maximum_row\c_tabl_ntb_running_col{\the\c_tabl_ntb_maximum_row}{\the\c_tabl_ntb_running_col}}%
- {#1}{#2}}
+ {#1}{#2}%
+ \ifnum\c_tabl_ntb_encountered_col>\c_tabl_ntb_encountered_max
+ \c_tabl_ntb_encountered_max\c_tabl_ntb_encountered_col
+ \fi}
\def\tabl_ntb_td_pass_n#1%
{\scratchcounter\numexpr\m_tabl_ntb_n-\c_tabl_ntb_running_col+\minusone-\c_tabl_ntb_spn\relax
@@ -850,10 +898,20 @@
\ifhmode\kern\zeropoint\fi % blocks \removeunwantedspaces: check this on icare handelingsschema
\resetcharacteralign % new
\setupcurrentnaturaltablelocal[\c!align={\v!right,\v!broad,\v!high},#1]%
+ %
+ \d_tabl_ntb_leftmargindistance \naturaltablelocalparameter\c!leftmargindistance\relax
+ \d_tabl_ntb_rightmargindistance\naturaltablelocalparameter\c!rightmargindistance\relax
+ \d_tabl_ntb_columndistance \naturaltablelocalparameter\c!columndistance\relax
+ \d_tabl_ntb_maxwidth \naturaltablelocalparameter\c!maxwidth\relax
+ %
\usesetupsparameter\naturaltablelocalparameter
\doifelse{\naturaltablelocalparameter\c!textwidth}\v!local
{\hsize\availablehsize}
{\hsize\naturaltablelocalparameter\c!textwidth}%
+ \enableTBLbreakfalse
+ \multipleTBLheadsfalse
+ \autoTBLspreadfalse
+ \tightTBLcolspanfalse
\processaction
[\naturaltablelocalparameter\c!split]
[ \v!yes=>\enableTBLbreaktrue,
@@ -865,7 +923,8 @@
\tabl_ntb_preset_parameters
\processallactionsinset
[\naturaltablelocalparameter\c!option]
- [\v!stretch=>\autoTBLspreadtrue]%
+ [\v!stretch=>\autoTBLspreadtrue,%
+ \v!tight=>\tightTBLcolspantrue]%
\linewidth\naturaltablelocalparameter\c!rulethickness % needs to be frozen
\dontcomplain
\c_tabl_ntb_running_col \zerocount
@@ -905,10 +964,10 @@
\unexpanded\def\eTABLE % beware, we need to get rid of spurious spaces when in hmode
{% tricky and dirty order -)
- \doifsometokselse\t_tabl_ntb_head % slow, better a flag
+ \doifelsesometoks\t_tabl_ntb_head % slow, better a flag
{\the\t_tabl_ntb_head
\c_tabl_ntb_n_of_head_lines\c_tabl_ntb_maximum_row\relax
- \doifsometokselse\t_tabl_ntb_next
+ \doifelsesometoks\t_tabl_ntb_next
{\the\t_tabl_ntb_next
\c_tabl_ntb_n_of_next_lines\numexpr\c_tabl_ntb_maximum_row-\c_tabl_ntb_n_of_head_lines\relax}%
{\c_tabl_ntb_n_of_next_lines\zerocount}% was 1
@@ -1013,7 +1072,7 @@
\ifautoTBLemptycell
\normalexpanded
{\tabl_ntb_set_txt_process\c_tabl_ntb_current_row\c_tabl_ntb_current_col{\the\c_tabl_ntb_current_row}{\the\c_tabl_ntb_current_col}}%
- {}{\strut}%
+ {\c!option=\v!tight}{\strut\kern\scaledpoint}% the kern forces the tight
\fi}
\def\tabl_ntb_loop_two
@@ -1045,79 +1104,179 @@
\newcount\c_tabl_prelocated_rows % \prelocateTBLrows{1000} may speed up large tables
-\def\tabl_ntb_row_start{\t_tabl_ntb_row\emptytoks}
-\def\tabl_ntb_row_stop {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\t_tabl_ntb_row\tabl_ntb_row_align_stop}}}
+% \def\tabl_ntb_row_start{\t_tabl_ntb_row\emptytoks}
+% \def\tabl_ntb_row_stop {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\t_tabl_ntb_row\tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_start
+ {\t_tabl_ntb_row\emptytoks}
+
+\def\tabl_ntb_row_stop
+ {\ifenableTBLbreak
+ \tabl_ntb_row_stop_split
+ \else
+ \tabl_ntb_row_stop_boxed
+ \fi}
+
+\def\tabl_ntb_row_stop_boxed
+ {% \noindent % no, else double leftskip in narrower
+ \normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ % no need for init
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_stop_split
+ {\ifcsname\??naturaltableset\m_tabl_tbl_level:\c!y++\the\c_tabl_ntb_current_row\endcsname
+ \tabl_ntb_row_stop_split_yes
+ \else
+ \tabl_ntb_row_stop_split_nop
+ \fi}
+
+\def\tabl_ntb_row_stop_split_nop
+ {\normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_reset
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\def\tabl_ntb_row_stop_split_yes
+ {\begingroup
+ \csname\??naturaltableset\m_tabl_tbl_level:\c!y++\the\c_tabl_ntb_current_row\endcsname
+ \xdef\m_tabl_ntb_before_split{\naturaltablelocalparameter\c!before}% to be checked
+ \xdef\m_tabl_ntb_after_split {\naturaltablelocalparameter\c!after}% to be checked
+ \xdef\m_tabl_ntb_same_page {\naturaltablelocalparameter\c!samepage}%
+ \endgroup
+ \normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_set{\m_tabl_ntb_before_split}{\m_tabl_ntb_after_split}{\m_tabl_ntb_same_page}%
+ \tabl_ntb_row_align_start
+ \the\t_tabl_ntb_row
+ \tabl_ntb_row_align_stop}}}
+
+\unexpanded\def\tabl_ntb_row_align_set#1#2#3%
+ {\xdef\m_tabl_ntb_before_split{#1}%
+ \xdef\m_tabl_ntb_after_split {#2}%
+ \xdef\m_tabl_ntb_same_page {#3}}
+
+\unexpanded\def\tabl_ntb_row_align_reset
+ {\global\let\m_tabl_ntb_before_split\empty
+ \global\let\m_tabl_ntb_after_split \empty
+ \global\let\m_tabl_ntb_same_page \empty}
\def\tabl_ntb_prelocate_error
{\writestatus\m!system{fatal error: use \string\prelocateTBLrows\space to increase table memory (now: \the\c_tabl_prelocated_rows)}}
% \prelocateTBLrows{1000} % may speed up large tables
-\installcorenamespace{naturaltabletok}
-
\def\prelocateTBLrows#1% we start at zero so we have one to much, better play safe anyway
- {\dostepwiserecurse\c_tabl_prelocated_rows{#1}\plusone{\expandafter\newtoks\csname\??naturaltabletok\recurselevel\endcsname}%
+ {\dostepwiserecurse\c_tabl_prelocated_rows{#1}\plusone
+ {\expandafter\newtoks\csname\??naturaltabletok\recurselevel\endcsname}%
\def\tabl_ntb_row_start
{\ifnum\c_tabl_ntb_row<\c_tabl_prelocated_rows\relax
- \expandafter\let\expandafter\t_tabl_ntb_row\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\t_tabl_ntb_row\emptytoks
+ \tabl_ntb_prelocate_okay
\else
\tabl_ntb_prelocate_error
\fi}%
\def\tabl_ntb_row_stop
- {\normalexpanded{\t_tabl_ntb{\the\t_tabl_ntb\noexpand\tabl_ntb_row_align_start\the\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\tabl_ntb_row_align_stop}}}%
+ {\normalexpanded
+ {\t_tabl_ntb
+ {\the\t_tabl_ntb
+ \tabl_ntb_row_align_start
+ \the\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname
+ \tabl_ntb_row_align_stop}}}%
\global\c_tabl_prelocated_rows#1\relax}
+\def\tabl_ntb_prelocate_okay
+ {\expandafter\let\expandafter\t_tabl_ntb_row\csname\??naturaltabletok\the\c_tabl_ntb_row\endcsname\t_tabl_ntb_row\emptytoks}
+
% We use aligments to handle the empty (skipped) columns, so
% that we don't have to (re|)|calculate these.
-\def\tabl_ntb_column_skip
- {\global\advance\c_tabl_ntb_col\plusone}
-
-\def\tabl_ntb_column_next
- {\global\advance\c_tabl_ntb_col\plusone
- \kern\naturaltablelocalparameter\c!columndistance
- \aligntab}
-
-\def\tabl_ntb_column_span
- {\span}
-
\let\m_tabl_ntb_saved_row\!!zerocount
\let\m_tabl_ntb_saved_col\!!zerocount
-\def\tabl_ntb_row_align_start
- {\noalign{\tabl_ntb_row_align_reset}%
- \tabl_ntb_column_next
- \kern\dimexpr\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!columndistance\relax}
-
-\unexpanded\def\tabl_ntb_row_align_reset
+\unexpanded\def\tabl_ntb_row_align_start
{\global\advance\c_tabl_ntb_row\plusone
- \global\c_tabl_ntb_col\zerocount
- \global\c_tabl_ntb_spn\zerocount}
+ \global\c_tabl_ntb_col\plusone
+ \global\c_tabl_ntb_spn\zerocount
+ \tabl_ntb_row_align_start_inject
+ \dostarttagged\t!tablerow\empty
+ \hbox\bgroup
+ \kern\dimexpr\d_tabl_ntb_leftmargindistance\relax}
\unexpanded\def\tabl_ntb_row_align_stop
- {\kern\dimexpr\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
- \crcr
- \noalign
- {\nointerlineskip
- \ifnum\c_tabl_ntb_row>\c_tabl_ntb_n_of_head_lines
- \ifnum\tabl_ntb_get_nob\c_tabl_ntb_row=\zerocount
- \allowbreak
- \fi
- \else
- \allowbreak % else no proper head split off
- \fi
- \bgroup % protect local vars
- \m_tabl_ntb_after_split
- \egroup
- \bgroup % protect local vars
- \scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
- \ifnum\scratchcounter>\c_tabl_ntb_n_of_hdnx_lines\relax
- \ifnum\scratchcounter<\c_tabl_ntb_maximum_row\relax
- \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
- {\blank[\naturaltablelocalparameter\c!spaceinbetween]}%
- \fi
- \fi
- \egroup}}
+ {\kern\dimexpr\d_tabl_ntb_rightmargindistance-\d_tabl_ntb_columndistance\relax
+ \egroup
+ \dostoptagged
+ \tabl_ntb_row_align_stop_inject}
+
+\unexpanded\def\tabl_ntb_before_page
+ {\ifx\m_tabl_ntb_same_page\v!before
+ % \blank[\v!samepage,\v!strong]%
+ \unpenalty
+ \nobreak
+ \else\ifx\m_tabl_ntb_same_page\v!both
+ % \blank[\v!samepage,\v!strong]%
+ \unpenalty
+ \nobreak
+ \fi\fi}
+
+\unexpanded\def\tabl_ntb_after_page
+ {\ifnum\c_tabl_ntb_row>\c_tabl_ntb_n_of_head_lines
+ \ifnum\tabl_ntb_get_nob\c_tabl_ntb_row=\zerocount
+ \unpenalty
+ \ifx\m_tabl_ntb_same_page\v!after
+ % \blank[\v!samepage,\v!strong]%
+ \nobreak
+ \else\ifx\m_tabl_ntb_same_page\v!both
+ % \blank[\v!samepage,\v!strong]%
+ \nobreak
+ \else
+ % \blank[\v!preference,\v!weak]%
+ \allowbreak
+ \fi\fi
+ \fi
+ \else
+ % \blank[\v!preference,\v!weak]%
+ \allowbreak % else no proper head split off
+ \fi}
+
+\unexpanded\def\tabl_ntb_inbetween
+ {\scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
+ \ifnum\scratchcounter>\c_tabl_ntb_n_of_hdnx_lines\relax
+ \ifnum\scratchcounter<\c_tabl_ntb_maximum_row\relax
+ \edef\p_spaceinbetween{\naturaltablelocalparameter\c!spaceinbetween}%
+ \ifx\p_spaceinbetween\empty\else
+ \blank[\p_spaceinbetween]%
+ \fi
+ \fi
+ \fi}
+
+\unexpanded\def\tabl_ntb_row_align_start_inject
+ {\bgroup % protect local vars
+ \m_tabl_ntb_before_split
+ \egroup
+ \ifenableTBLbreak
+ \tabl_ntb_before_page
+ \fi}
+
+\unexpanded\def\tabl_ntb_row_align_stop_inject
+ {\par
+ \nointerlineskip
+ \ifenableTBLbreak
+ \tabl_ntb_after_page
+ \fi
+ \bgroup % protect local vars
+ \m_tabl_ntb_after_split
+ \egroup
+ \bgroup % protect local vars
+ \tabl_ntb_inbetween
+ \egroup}
\def\tabl_ntb_flush_content
{\the\everyTABLEpass
@@ -1125,47 +1284,56 @@
\global\c_tabl_ntb_col\zerocount
\global\c_tabl_ntb_row\zerocount
\global\advance\c_tabl_ntb_row\minusone
- \tabskip\zeropoint
- \dostarttagged\t!table\empty
- \dostarttagged\t!tablerow\empty
- \appendtoks\dostoptagged\dostarttagged\t!tablerow\empty\to\everycr
- \halign\bgroup
- \registerparoptions
- % watch out: tagging the cell happens at the outer level (faster)
- \ignorespaces\alignmark\alignmark\unskip
- \aligntab\aligntab
- \ignorespaces\alignmark\alignmark\unskip
- \cr % one too many
- \the\t_tabl_ntb
- \dostoptagged
- \egroup
+ \dostarttaggedchained\t!table\empty\??naturaltable
+ %\registerparoptions % (*) triggers max hsize
+ \the\t_tabl_ntb
\dostoptagged}
+\unexpanded\def\tabl_ntb_span#1%
+ {\dorecurse{#1}
+ {\hskip\tabl_ntb_get_wid\c_tabl_ntb_col\relax
+ \global\advance\c_tabl_ntb_col\plusone}}
+
+\unexpanded\def\tabl_ntb_skip#1%
+ {\global\advance\c_tabl_ntb_col#1\relax}
+
+\unexpanded\def\tabl_ntb_plus
+ {\global\advance\c_tabl_ntb_col\plusone
+ \kern\d_tabl_ntb_columndistance}
+
+% \setvalue{\??naturaltablecell\the\c_tabl_ntb_none}#1#2%
+% {\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
+% \ifnum\scratchcounter>\zerocount
+% \normalexpanded
+% {\t_tabl_ntb_row
+% {\the\t_tabl_ntb_row
+% \tabl_ntb_span{\the\scratchcounter}%
+% \tabl_ntb_plus}}%
+% \fi}
+
\setvalue{\??naturaltablecell\the\c_tabl_ntb_none}#1#2%
{\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\ifnum\scratchcounter>\zerocount
- \advance\scratchcounter\minusone
- \ifnum\scratchcounter>\zerocount
- \tabl_ntb_span
- \fi
- \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
+ \normalexpanded
+ {\t_tabl_ntb_row
+ {\the\t_tabl_ntb_row
+ \tabl_ntb_span{\the\scratchcounter}}}%
\fi}
\setvalue{\??naturaltablecell\the\c_tabl_ntb_cell}#1#2%
{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_pass #1 #2 }% space delimited -> less tokens
\scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\ifnum\scratchcounter>\zerocount
- \advance\scratchcounter\minusone
- \ifnum\scratchcounter>\zerocount
- \tabl_ntb_span
- \fi
- \t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_next}
+ \normalexpanded
+ {\t_tabl_ntb_row
+ {\the\t_tabl_ntb_row
+ \ifnum\scratchcounter=\plusone
+ \tabl_ntb_plus
+ \else
+ \tabl_ntb_skip{\the\scratchcounter}%
+ \fi}}%
\fi}
-\def\tabl_ntb_span
- {\dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_span}}%
- \dorecurse\scratchcounter{\t_tabl_ntb_row\expandafter{\the\t_tabl_ntb_row\tabl_ntb_column_skip}}}
-
\unexpanded\def\tabl_ntb_cell#1#2%
{\csname\??naturaltablecell\the\tabl_ntb_get_tag{#1}{#2}\endcsname{#1}{#2}}
@@ -1183,16 +1351,22 @@
{\d_tabl_ntb_width\zeropoint
\scratchcounter\c_tabl_ntb_col
\!!counta\tabl_ntb_get_col{#1}{#2}\relax
- \dorecurse\!!counta
- {\advance\d_tabl_ntb_width\dimexpr
- \tabl_ntb_get_wid\scratchcounter
- +\naturaltablelocalparameter\c!columndistance
- \ifnum\recurselevel<\!!counta
- +\tabl_ntb_get_dis\scratchcounter
- \fi
- \relax
- \advance\scratchcounter\plusone}%
- \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
+ \ifcase\!!counta\or
+ \advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \relax
+ \advance\scratchcounter\plusone
+ \else
+ \dorecurse\!!counta
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \ifnum\recurselevel<\!!counta
+ +\d_tabl_ntb_columndistance
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
+ \advance\scratchcounter\plusone}%
+ \fi
\setbox\scratchbox\hbox{\tabl_ntb_get_txt{#1}{#2}}%
\tabl_ntb_set_ht{#1}{#2}{\the\ht\scratchbox}%
\tabl_ntb_set_wd{#1}{#2}{\the\wd\scratchbox}%
@@ -1223,16 +1397,22 @@
% width
\d_tabl_ntb_width\zeropoint
\scratchcounter\c_tabl_ntb_col
- \dorecurse\!!counta
- {\advance\d_tabl_ntb_width\dimexpr
- \tabl_ntb_get_wid\scratchcounter
- +\naturaltablelocalparameter\c!columndistance
- \ifnum\recurselevel<\!!counta
- +\tabl_ntb_get_dis\scratchcounter
- \fi
- \relax
- \advance\scratchcounter\plusone}%
- \advance\d_tabl_ntb_width-\naturaltablelocalparameter\c!columndistance\relax
+ \ifcase\!!counta\or
+ \advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \relax
+ \advance\scratchcounter\plusone
+ \else
+ \dorecurse\!!counta
+ {\advance\d_tabl_ntb_width\dimexpr
+ \tabl_ntb_get_wid\scratchcounter
+ \ifnum\recurselevel<\!!counta
+ +\d_tabl_ntb_columndistance
+ +\tabl_ntb_get_dis\scratchcounter
+ \fi
+ \relax
+ \advance\scratchcounter\plusone}%
+ \fi
% cell
\setbox\scratchbox\hbox attr \taggedattribute \attribute\taggedattribute \bgroup
\dotagTABLEsignal % maybe we need to add some packaging in this case
@@ -1255,18 +1435,40 @@
\fi
\dostoptagged} % right spot
+% \def\tabl_ntb_cell_finalize
+% {\doifnotinset\localwidth{\v!fit,\v!broad}% user set
+% {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
+% \ifdim\localwidth>\scratchdimen
+% \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
+% \fi}}
+
\def\tabl_ntb_cell_finalize
- {\doifnotinset\localwidth{\v!fit,\v!broad}% user set
- {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
- \ifdim\localwidth>\scratchdimen
- \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
- \fi}}
+ {\ifx\localwidth\v!fit
+ % nothing
+ \else\ifx\localwidth\v!broad
+ % nothing
+ \else\ifx\localwidth\empty
+ % nothing (safeguard)
+ \else
+ \tabl_ntb_cell_finalize_indeed
+ \fi\fi\fi}
+
+\def\tabl_ntb_cell_finalize_indeed
+ {\scratchdimen\tabl_ntb_get_aut\c_tabl_ntb_col\relax
+ \ifdim\localwidth>\scratchdimen
+ \tabl_ntb_set_aut\c_tabl_ntb_col{\the\dimexpr\localwidth}%
+ \fi}
+
+\let\tabl_ntb_preroll\relax
\def\tabl_ntb_table_stop
- {\setbox\scratchbox\hbox
- {\setupcurrentnaturaltablelocal[\c!frame=\v!off,\c!background=,\c!align=\v!no]%
- \inheritednaturaltablelocalframed{\strut}}%
- \edef\minimalcellheight{\the\ht\scratchbox}% not used
+ {\forgetall % new, here see narrower-004.tex
+ %\setbox\scratchbox\hbox
+ % {\letnaturaltablelocalparameter\c!frame\v!off
+ % \letnaturaltablelocalparameter\c!background\empty
+ % \letnaturaltablelocalparameter\c!align\v!no
+ % \inheritednaturaltablelocalframed{\strut}}%
+ %\edef\minimalcellheight{\the\ht\scratchbox}% not used
\dorecurse\c_tabl_ntb_maximum_col
{\tabl_ntb_let_aut\recurselevel\zeropoint
% new
@@ -1275,14 +1477,23 @@
{\tabl_ntb_let_wd\recurselevel\c_tabl_ntb_current_col_one\zeropoint
\tabl_ntb_let_ht\recurselevel\c_tabl_ntb_current_col_one\zeropoint}%
% till here
+ \tabl_ntb_let_tal\recurselevel\zerocount
\tabl_ntb_let_wid\recurselevel\zeropoint
\tabl_ntb_let_dis\recurselevel\zeropoint}%
\dorecurse\c_tabl_ntb_maximum_row
{\tabl_ntb_let_hei\recurselevel\maxdimen}%
+ \tabl_ntb_let_gal\zerocount
+\tabl_ntb_preroll\relax
\c_tabl_tbl_pass\plusone
\let\tabl_ntb_pass\tabl_ntb_pass_one
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_a
- \setbox0\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \ifcase\tabl_ntb_get_gal\or
+ % \c_tabl_tbl_pass\plusone
+ % \let\tabl_ntb_pass\tabl_ntb_pass_one
+ \let\tabl_ntb_cell_process\tabl_ntb_cell_process_a_extra
+ \setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
+ \fi
\tabl_ntb_let_dis\c_tabl_ntb_maximum_col\zeropoint
\ifautoTBLspread
% experimental, stretch non fixed cells to \hsize
@@ -1291,21 +1502,24 @@
\tabl_ntb_stretch_widths
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
- \else\ifdim\wd0>\hsize
+ \else\ifdim\wd\scratchbox>\hsize
\ifautoTBLhsize
\tabl_ntb_check_widths_one % trial run
\tabl_ntb_check_widths_two % real run
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_b
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi
- \else\ifautoTBLrowspan\ifnum\c_tabl_ntb_maximum_row_span>1 % max ?
+ \else\ifautoTBLrowspan\ifnum\c_tabl_ntb_maximum_row_span>\plusone % max ?
% added jan 2002 because nx=* did no longer work
+ \ifnum\c_tabl_ntb_encountered_max<\c_tabl_ntb_maximum_col
+ % added jun 2014 because someone had less columns than nx .. sigh / see *nx*
+ \writestatus\m!TABLE{missing\space\number\numexpr\c_tabl_ntb_maximum_col-\c_tabl_ntb_encountered_max\relax\space column(s), guessing widths}%
+ \fi
\edef\savedhsize{\the\hsize}%
- \hsize\wd0\relax % new per 17/04/2006
+ \hsize\wd\scratchbox\relax % new per 17/04/2006
\tabl_ntb_check_widths_one % trial run
\tabl_ntb_check_widths_two % real run
\hsize\savedhsize
- %
\let\tabl_ntb_cell_process\tabl_ntb_cell_process_c
\setbox\scratchbox\vbox{\settrialtypesetting \tabl_ntb_flush_content}%
\fi\fi\fi\fi
@@ -1319,39 +1533,56 @@
\c_tabl_tbl_pass\plusthree
\let\tabl_ntb_pass\tabl_ntb_pass_three
\ifnum\m_tabl_tbl_level>\plusone
- \expandafter\tabl_tbl_split_nop
+ \tabl_tbl_split_nop
\else\ifenableTBLbreak
- \doubleexpandafter\tabl_tbl_split_yes
+ \tabl_tbl_split_yes
\else
- \doubleexpandafter\tabl_tbl_split_nop
- \fi\fi{\tabl_ntb_flush_content}}
+ \tabl_tbl_split_nop
+ \fi\fi}
\def\tabl_ntb_stretch_widths % more variants, e.g. a max to \dimend
{\ifcase\c_tabl_ntb_maximum_col\else % else division by zero
\!!dimend\zeropoint
- \!!dimene\hsize
+ \!!dimene\dimexpr
+ \hsize
+ -\d_tabl_ntb_leftmargindistance
+ -\d_tabl_ntb_rightmargindistance
+ +\d_tabl_ntb_columndistance
+ \relax
\dorecurse\c_tabl_ntb_maximum_col
- {\advance\!!dimend\dimexpr\tabl_ntb_get_wid\recurselevel+\naturaltablelocalparameter\c!columndistance\relax
- \advance\!!dimene-\tabl_ntb_get_dis\recurselevel}%
- \advance\!!dimend\dimexpr-\naturaltablelocalparameter\c!columndistance+\naturaltablelocalparameter\c!leftmargindistance+\naturaltablelocalparameter\c!rightmargindistance\relax
+ {\advance\!!dimend\dimexpr
+ \tabl_ntb_get_wid\recurselevel
+ \relax
+ \advance\!!dimene\dimexpr
+ -\tabl_ntb_get_dis\recurselevel
+ -\d_tabl_ntb_columndistance
+ \relax}%
+ \relax
% distribute width (stretch)
\ifdim\!!dimend<\!!dimene
\advance\!!dimend-\!!dimene
- \!!dimend-\!!dimend
\divide\!!dimend\c_tabl_ntb_maximum_col
\dorecurse\c_tabl_ntb_maximum_col
- {\tabl_ntb_set_wid\recurselevel{\the\dimexpr\tabl_ntb_get_wid\recurselevel+\!!dimend\relax}}%
+ {\tabl_ntb_set_wid\recurselevel{\the\dimexpr\tabl_ntb_get_wid\recurselevel-\!!dimend\relax}}%
\fi
\fi}
-\def\tabl_tbl_split_nop#1%
- {\setbox\b_tabl_ntb_final\vbox{#1}%
+\def\tabl_tbl_split_nop
+ {\setbox\b_tabl_ntb_final\vbox{\tabl_ntb_flush_content}%
\postprocessTABLEbox\b_tabl_ntb_final
\beforeTABLEbox
+ % packaging prevents max hsized box
+ % \hbox{\registerparoptions\box\b_tabl_ntb_final}% (*) better here
+ % better :
+ \ifinsidefloat
+ % nothing, else we get a \hsized box
+ \else
+ \registerparoptions % (*) better here
+ \fi
\box\b_tabl_ntb_final
\afterTABLEbox}
-\def\tabl_tbl_split_yes % #1
+\def\tabl_tbl_split_yes
{\ifinsidesplitfloat
\donetrue
\else\ifinsidefloat
@@ -1369,14 +1600,15 @@
\let\extratblsplitheight\zeropoint % additional space taken by before/afterTABLEsplitbox
-\def\tabl_ntb_split_box#1%
+\def\tabl_ntb_split_box
{\resettsplit
\def\tsplitminimumfreelines{2}%
\def\tsplitminimumfreespace{\dimexpr\extratblsplitheight+\naturaltablelocalparameter\c!splitoffset\relax}%
\def\tsplitbeforeresult {\beforeTABLEsplitbox}%
\def\tsplitafterresult {\afterTABLEsplitbox}%
\def\tsplitafter {\m_tabl_ntb_after_split}%
- \setbox\tsplitcontent\vbox{#1}%
+ \def\tsplitbefore {\m_tabl_ntb_before_split}% supported ?
+ \setbox\tsplitcontent\vbox{\tabl_ntb_flush_content}%
\ifmultipleTBLheads
\dorecurse\c_tabl_ntb_n_of_head_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
@@ -1385,8 +1617,10 @@
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
\setbox\tsplitnext\vbox{\unvcopy\tsplitnext\unvcopy\scratchbox}}%
\fi
- \doifsomething{\naturaltablelocalparameter\c!spaceinbetween}
- {\def\tsplitinbetween{\blank[\naturaltablelocalparameter\c!spaceinbetween]}}%
+ \edef\p_spaceinbetween{\naturaltablelocalparameter\c!spaceinbetween}%
+ \ifx\p_spaceinbetween\empty\else
+ \blank[\p_spaceinbetween]%
+ \fi
\def\postprocesstsplit{\postprocessTABLEsplitbox{\box\tsplitresult}}%
\handletsplit}
@@ -1406,15 +1640,23 @@
\def\tabl_ntb_check_widths_indeed#1%
{\iftraceTABLE\tabl_ntb_show_widths{B#1}\fi
\!!counta\zerocount
- \!!dimena\dimexpr\hsize-\naturaltablelocalparameter\c!leftmargindistance-\naturaltablelocalparameter\c!rightmargindistance-\naturaltablelocalparameter\c!columndistance\relax
+ \!!dimena\dimexpr
+ \hsize
+ -\d_tabl_ntb_leftmargindistance
+ -\d_tabl_ntb_rightmargindistance
+ -\d_tabl_ntb_columndistance
+ \relax
\dorecurse\c_tabl_ntb_maximum_col
{\scratchdimen\tabl_ntb_get_aut\recurselevel\relax
- \advance\!!dimena-\tabl_ntb_get_dis\recurselevel\relax
+ \advance\!!dimena\dimexpr
+ -\tabl_ntb_get_dis\recurselevel
+ -\d_tabl_ntb_columndistance
+ \relax
\ifdim\scratchdimen>\zeropoint\relax
\advance\!!dimena -\scratchdimen
\else
\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
- \ifdim\scratchdimen>\naturaltablelocalparameter\c!maxwidth\relax
+ \ifdim\scratchdimen>\d_tabl_ntb_maxwidth\relax
\ifcase#1\else\tabl_ntb_let_wid\recurselevel\zeropoint\fi
\advance\!!counta \plusone
\else
@@ -1423,7 +1665,9 @@
\else
% eigenlijk moet dit alleen als de kolom wordt overspannen door een
% vorige, maw extra dubbele loop en status var
- \advance\!!counta \plusone
+ \ifnum\c_tabl_ntb_encountered_max=\c_tabl_ntb_maximum_col % *nx* bah
+ \advance\!!counta \plusone % setting maxwidth to a large value also works
+ \fi
\fi
\fi
\fi}%
@@ -1500,7 +1744,6 @@
\fi
\fi}
-
\def\tabl_ntb_check_heights_one
{\dorecurse\c_tabl_ntb_maximum_row
{\c_tabl_ntb_current_row_three\recurselevel\relax
@@ -1515,7 +1758,9 @@
\def\tabl_ntb_show_widths#1%
{\vbox
- {\forgetall\tttf[#1]\dorecurse\c_tabl_ntb_maximum_col
+ {\forgetall
+ \tttf[#1]%
+ \dorecurse\c_tabl_ntb_maximum_col
{\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
[\recurselevel:\the\scratchdimen]}}}
@@ -1532,46 +1777,124 @@
% \setsecondpasscharacteralign \checkalignment{#3}% {\strut#2\unskip}%
% \ignorespaces}
-\def\tabl_ntb_char_align
- {\doifelse{\naturaltablelocalparameter\c!aligncharacter}\v!yes
- \tabl_ntb_char_align_indeed
- \gobbletwoarguments}
+\def\tabl_ntb_char_align % called often
+ {\edef\p_characteralign{\naturaltablelocalparameter\c!aligncharacter}%
+ \ifx\p_characteralign\v!yes
+ \ifcase\c_tabl_tbl_pass\or
+ \tabl_ntb_let_tal\currentTABLEcolumn\plusone
+ \tabl_ntb_let_gal\plusone
+ \fi
+ \expandafter\tabl_ntb_char_align_indeed
+ \else
+ \expandafter\gobbletwoarguments
+ \fi}
\def\tabl_ntb_char_align_indeed#1#2% row column
{\ifcase\c_tabl_tbl_pass \or
- \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}%
+ \setcharacteralign{#2}{\naturaltablelocalparameter\c!alignmentcharacter}% we could store the character in tal
\fi
\typo_charalign_adapt_font
\signalcharacteralign{#2}{#1}}
+\unexpanded\def\tabl_ntb_cell_process_a_extra#1#2%
+ {\ifcase\tabl_ntb_get_tal{#2}\relax
+ \expandafter\tabl_ntb_cell_process_x
+ \else
+ \expandafter\tabl_ntb_cell_process_a
+ \fi{#1}{#2}}
+
+\unexpanded\def\tabl_ntb_cell_process_x#1#2[#3]#4%
+ {}
+
+% problem: when span doesn't break we can have a span that is the sum of
+% cells but still to small .. chicken egg problem ... for that we should
+% also have a smallest width run
+%
+% nilling the background makes a run upto 25% faster
+
+\def\tabl_ntb_cell_process_a_check_span_one
+ {\ifautosqueezeTBLspan
+ \edef\p_width{\naturaltablelocalparameter\c!width}%
+ \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+ \else
+ \donetrue
+ \fi
+ \ifdone % brr, 0
+ \ifnum\scratchcounter>\plusone
+ \tabl_ntb_set_spn\c_tabl_ntb_col
+ \fi
+ \fi}
+
+% \def\tabl_ntb_cell_process_a_check_span_two_yes
+% {\iftightTBLcolspan
+% \donefalse
+% \else
+% \ifnum\scratchcounter>\plusone
+% \begingroup
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \ifdone
+% \endgroup
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \else
+% % a dimension
+% \endgroup
+% \donefalse
+% \fi
+% \else
+% \edef\p_option{\naturaltablelocalparameter\c!option}%
+% \ifx\p_option\v!tight\donefalse\else\donetrue\fi
+% \fi
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
+
+% \def\tabl_ntb_cell_process_a_check_span_two_nop
+% {\ifnum\scratchcounter>\plusone
+% \edef\p_width{\naturaltablelocalparameter\c!width}%
+% \csname\??naturaltablesqueeze\ifcsname\??naturaltablesqueeze\p_width\endcsname\p_width\fi\endcsname
+% \else
+% \donetrue
+% \fi
+% \ifdone
+% \ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+% \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+% \fi
+% \fi}
+
+\let\tabl_ntb_cell_process_a_check_span_two_yes\relax
+
+\def\tabl_ntb_cell_process_a_check_span_two_nop
+ {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
+ \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
+ \fi}
+
\unexpanded\def\tabl_ntb_cell_process_a#1#2[#3]#4% grouping added ! ! !
{\bgroup
+ \letnaturaltablelocalparameter\c!option\empty
\tabl_ntb_setup_cell{#1}{#2}%
+ \setupcurrentnaturaltablelocal[#3]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
+ \scratchcounter\tabl_ntb_get_col{#1}{#2}\relax
\setbox\scratchbox\hbox
{\scratchdimen\naturaltablelocalparameter\c!distance\relax
\ifdim\scratchdimen>\tabl_ntb_get_dis{#2}\relax
\tabl_ntb_set_dis{#2}{\the\scratchdimen}%
\fi
- \setupcurrentnaturaltablelocal[#3,\c!background=,\c!frame=\v!off]% 25% faster
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop\tabl_ntb_cell_finalize}}%
\scratchdimen\tabl_ntb_get_wid\c_tabl_ntb_col\relax
\ifdim\wd\scratchbox>\scratchdimen
\ifsqueezeTBLspan
- \ifautosqueezeTBLspan
- \doifinsetelse{\naturaltablelocalparameter\c!width}{\v!fit,\v!fixed,\v!broad,\v!local}
- \donetrue \donefalse
- \else
- \donetrue
- \fi
- \ifdone % brr, 0
- \ifnum\tabl_ntb_get_col{#1}{#2}>\plusone \tabl_ntb_set_spn\c_tabl_ntb_col\fi
- \fi
+ \tabl_ntb_cell_process_a_check_span_one
\fi
\tabl_ntb_spn_doifelse\c_tabl_ntb_col
- \donothing
- {\ifdim\tabl_ntb_get_wid\c_tabl_ntb_col<\wd\scratchbox
- \tabl_ntb_set_wid\c_tabl_ntb_col{\the\wd\scratchbox}%
- \fi}% auto set
+ \tabl_ntb_cell_process_a_check_span_two_yes
+ \tabl_ntb_cell_process_a_check_span_two_nop
\fi
\scratchcounter\numexpr\c_tabl_ntb_row+\plusone\relax
\scratchdimen\tabl_ntb_get_hei\scratchcounter\relax
@@ -1592,8 +1915,7 @@
% unless span
\tabl_ntb_set_aut\c_tabl_ntb_col{\the\wd\scratchbox}%
% to be translated
- \writestatus\m!TABLE
- {no auto width in (\number#1,\number#2)\space\the\wd\scratchbox/\the\hsize}%
+ \writestatus\m!TABLE{no auto width in (\number#1,\number#2)\space\the\wd\scratchbox/\the\hsize}%
\fi\fi
\fi
\fi
@@ -1608,7 +1930,9 @@
\unexpanded\def\tabl_ntb_cell_process_b_c#1#2#3[#4]#5%
{\setbox\scratchbox\hbox
{\tabl_ntb_setup_cell{#2}{#3}%
- \setupcurrentnaturaltablelocal[#4,#1,\c!frame=\v!off,\c!background=]%
+ \setupcurrentnaturaltablelocal[#4,#1]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start#5\tabl_ntb_cell_stop}}%
\setbox2\emptyhbox
\wd2\wd\scratchbox
@@ -1647,7 +1971,10 @@
\unexpanded\def\tabl_ntb_cell_process_d#1#2[#3]#4%
{\tabl_ntb_setup_cell{#1}{#2}%
\bgroup
- \setupcurrentnaturaltablelocal[#3,\c!width=\d_tabl_ntb_width,\c!background=,\c!frame=\v!off]% 25% faster
+ \setupcurrentnaturaltablelocal[#3]%
+ \letnaturaltablelocalparameter\c!background\empty
+ \letnaturaltablelocalparameter\c!frame\v!off
+ \setnaturaltablelocalparameter\c!width{\d_tabl_ntb_width}%
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}%
\egroup}
@@ -1656,10 +1983,11 @@
\setupcurrentnaturaltablelocal[#3]% to get the color right, the way we
\color % handle color here prevents interference due to whatsit nodes
[\naturaltablelocalparameter\c!color] % as well as permits local colors to take precedence
- {\ifdim\d_tabl_ntb_height=\zeropoint\relax % case: nc=maxcolumns
- \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width]%
+ {\letnaturaltablelocalparameter\c!color\empty
+ \setnaturaltablelocalparameter\c!width{\d_tabl_ntb_width}%
+ \ifdim\d_tabl_ntb_height=\zeropoint\relax % case: nc=maxcolumns
\else
- \setupcurrentnaturaltablelocal[\c!color=,\c!width=\d_tabl_ntb_width,\c!height=\d_tabl_ntb_height]%
+ \setnaturaltablelocalparameter\c!height{\d_tabl_ntb_height}%
\fi
\inheritednaturaltablelocalframed{\tabl_ntb_cell_start\tabl_ntb_char_align{#1}{#2}#4\tabl_ntb_cell_stop}}%
\hskip\tabl_ntb_get_dis{#2}}
@@ -1667,7 +1995,7 @@
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
@@ -1712,14 +2040,14 @@
\newconditional\resetTABLEmode \settrue\resetTABLEmode
-\def\tabl_ntb_parameters_reset
+\def\tabl_ntb_parameters_reset % we can use setters instead
{\ifnum\m_tabl_tbl_level>\plusone % in ieder geval
\ifconditional\resetTABLEmode
% not ok yet
\setupTABLE
[\c!frameoffset=.5\linewidth,
\c!backgroundoffset=\v!frame,
- \c!framecolor=\s!black,
+ % \c!framecolor=\s!black,
\c!width=\v!fit,
\c!height=\v!fit,
\c!autowidth=\v!yes,
diff --git a/tex/context/base/tabl-tab.mkiv b/tex/context/base/tabl-tab.mkiv
index f9ac27e70..5add1c684 100644
--- a/tex/context/base/tabl-tab.mkiv
+++ b/tex/context/base/tabl-tab.mkiv
@@ -1503,9 +1503,9 @@
\global\setfalse\tableactionstatepermitted
\global\setfalse\hassometablehead
\global\setfalse\hassometabletail
- \expanded{\doifinstringelse{|}{#1}}
+ \expanded{\doifelseinstring{|}{#1}}
{\xdef\restarttable{\noexpand\dorestarttable{\noexpand\thirdstagestarttable{#1}}}}
- {\doifdefinedelse{\??tabletemplate#1}
+ {\doifelsedefined{\??tabletemplate#1}
{\gdef\restarttable{\getvalue{\??tabletemplate#1}}}
{\gdef\restarttable{\dorestarttable{\getvalue{#1}}}}}%
\egroup
@@ -1921,7 +1921,7 @@
{\global\currenttablecolumn\zerocount}
\def\dotablevrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\tablevrulethicknessfactor#1\relax
\global\multiply\tablevrulethicknessfactor\m_tabl_table_VLwidth\relax}
{\xdef\tablecurrentvrulecolor{#1}}}
@@ -1955,7 +1955,7 @@
\unexpanded\def\dotableVN#1{\global\noftablevrules#1\relax\VL}
\def\dotablehrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\tablehrulethicknessfactor#1\relax
\global\multiply\tablehrulethicknessfactor\m_tabl_table_HLheight\relax}
{\xdef\tablecurrenthrulecolor{#1}}}
@@ -2075,7 +2075,7 @@
\fi}
\def\dotabledrulecommand#1% global assignments
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\ifcase\tabledrulespan
\global\tabledrulespan#1\relax
\else
@@ -2203,7 +2203,7 @@
\def\dolocaltablesetup
{\directtablesparameter\c!commands\relax
- \doifsomething{\directtablesparameter\c!bodyfont}{\switchtobodyfont[\directtablesparameter\c!bodyfont]}%
+ \usebodyfontparameter\directtablesparameter
\tablelinethicknessunit\dimexpr\directtablesparameter\c!rulethickness/\tablelinethicknessfactor\relax
\edef\p_tabl_table_height{\directtablesparameter\c!height}%
\edef\p_tabl_table_depth{\directtablesparameter\c!depth}%
@@ -2288,8 +2288,7 @@
\c!distance=\v!medium,
\c!bodyfont=,
\c!commands=,
- \c!background=\v!screen, % huh?
- \c!backgroundscreen=\defaultbackgroundscreen,
+ \c!background=,
\c!backgroundcolor=,
\c!split=\v!auto]
diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua
index 21564a472..13d899de6 100644
--- a/tex/context/base/tabl-tbl.lua
+++ b/tex/context/base/tabl-tbl.lua
@@ -9,21 +9,25 @@ if not modules then modules = { } end modules ['tabl-tbl'] = {
-- A couple of hacks ... easier to do in Lua than in regular TeX. More will
-- follow.
-local context, commands = context, commands
-
local tonumber = tonumber
local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
+local context = context
+local commands = commands
+
local texsetcount = tex.setcount
-local separator = P("|")
-local nested = lpeg.patterns.nested
-local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+local separator = P("|")
+local nested = lpeg.patterns.nested
+local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+
+local ctx_settabulatelastentry = context.settabulatelastentry
+local ctx_settabulateentry = context.settabulateentry
-function commands.presettabulate(preamble)
+local function presettabulate(preamble)
preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here
- if find(preamble,"%*") then
+ if find(preamble,"*",1,true) then
-- todo: lpeg but not now
preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
@@ -35,7 +39,14 @@ function commands.presettabulate(preamble)
texsetcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
texsetcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
for i=1,m,2 do
- context.settabulateentry(t[i],t[i+1])
+ ctx_settabulateentry(t[i],t[i+1])
end
- context.settabulatelastentry(t[m+1])
+ ctx_settabulatelastentry(t[m+1])
end
+
+interfaces.implement {
+ name = "presettabulate",
+ actions = presettabulate,
+ arguments = "string",
+ scope = "private",
+}
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index cd5efa7f7..ae4f03825 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -331,7 +331,7 @@
{\dowithnextboxcs\tabl_tabulate_shaped_par_finish\vbox\bgroup}
\def\tabl_tabulate_shaped_par_finish
- {\ctxcommand{doreshapeframedbox(\number\nextbox)}%
+ {\clf_doreshapeframedbox\nextbox\relax
\ifvmode\unvbox\else\box\fi\nextbox}
\let\tabl_tabulate_shaped_par_end\egroup
@@ -429,7 +429,8 @@
\aligntab
\tabl_tabulate_column_vrule_inject
\tabl_tabulate_color_side_left
- \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+% \tabl_tabulate_inject_pre_skip{\the\s_tabl_tabulate_pre}%
+ \tabl_tabulate_inject_pre_skip{\the\dimexpr\s_tabl_tabulate_pre}% get rid of plus
\alignmark\alignmark
\aligntab
\tabl_tabulate_color_side_both
@@ -496,7 +497,7 @@
\egroup
\aligntab
\noexpand\dostoptagged
- \tabl_tabulate_inject_post_skip{\the\s_tabl_tabulate_post}%
+ \tabl_tabulate_inject_post_skip{\the\dimexpr\s_tabl_tabulate_post}% get rid of plus
\alignmark\alignmark
}}%
\t_tabl_tabulate_dummy\expandafter{\the\t_tabl_tabulate_dummy\NC}%
@@ -598,17 +599,17 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_preskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_pre#1\d_tabl_tabulate_unit\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_pre.5\d_tabl_tabulate_unit\tabl_tabulate_set_preamble#1}}
\def\tabl_tabulate_set_posskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_post#1\d_tabl_tabulate_unit\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_post.5\d_tabl_tabulate_unit\tabl_tabulate_set_preamble#1}}
\def\tabl_tabulate_set_preposskip#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\s_tabl_tabulate_pre#1\d_tabl_tabulate_unit\s_tabl_tabulate_post\s_tabl_tabulate_pre\tabl_tabulate_set_preamble }
{\s_tabl_tabulate_pre.5\d_tabl_tabulate_unit\s_tabl_tabulate_post\s_tabl_tabulate_pre\tabl_tabulate_set_preamble#1}}
@@ -650,7 +651,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_pickup_width
- {\doifnextparenthesiselse\tabl_tabulate_set_width_indeed\tabl_tabulate_set_preamble}
+ {\doifelsenextparenthesis\tabl_tabulate_set_width_indeed\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_width
{\setfalse\c_tabl_tabulate_pwidth_set
@@ -663,7 +664,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_paragraph
- {\doifnextparenthesiselse
+ {\doifelsenextparenthesis
{\c_tabl_tabulate_modus\plusone
\settrue\c_tabl_tabulate_pwidth_set
\tabl_tabulate_pickup_width}
@@ -748,7 +749,7 @@
\tabl_tabulate_set_preamble}
\def\tabl_tabulate_set_vrule_command#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_vrulethickness#1\d_tabl_tabulate_vrulethickness_default}
{\xdef\m_tabl_tabulate_vrule_color{#1}}}
@@ -1048,23 +1049,34 @@
\def\tabl_tabulate_insert_foot_content
{\tabulatenoalign{\global\settrue\c_tabl_tabulate_someamble}%
- \csname\??tabulatehead\currenttabulation\endcsname
+ \csname\??tabulatefoot\currenttabulation\endcsname
\tabulatenoalign{\global\setfalse\c_tabl_tabulate_someamble}}%
\def\tabl_tabulate_check_full_content % - needed, else confusion with \c!header
{\ifcsname\??tabulatehead\currenttabulation\endcsname
- \let\tabl_tabulate_insert_head\tabl_tabulate_insert_head_content
+ \expandafter\ifx\csname\??tabulatehead\currenttabulation\endcsname\empty
+ \let\tabl_tabulate_insert_head\empty
+ \else
+ \let\tabl_tabulate_insert_head\tabl_tabulate_insert_head_content
+ \fi
\else
\let\tabl_tabulate_insert_head\empty
\fi
\ifcsname\??tabulatefoot\currenttabulation\endcsname
- \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_foot_content
+ \expandafter\ifx\csname\??tabulatefoot\currenttabulation\endcsname\empty
+ \let\tabl_tabulate_insert_foot\empty
+ \else
+ \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_head_content
+ \fi
\else
\let\tabl_tabulate_insert_foot\empty
\fi}
\def\tabl_tabulate_insert_content
{\tabl_tabulate_insert_head
+ \ifcase\c_tabl_tabulate_repeathead \else
+ \tabulatenoalign{\penalty\zerocount}% added 7/5/2014 WS mail
+ \fi
\tabl_tabulate_insert_body
\tabl_tabulate_insert_foot
\tabl_tabulate_remove_funny_line}
@@ -1075,8 +1087,10 @@
\tabulatenoalign{\kern-\lineheight}%
\fi}
-\setuvalue{\e!start\v!tabulatehead}{\doifnextoptionalelse\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
-\setuvalue{\e!start\v!tabulatetail}{\doifnextoptionalelse\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
+% todo: make footer synonym to tail
+
+\setuvalue{\e!start\v!tabulatehead}{\doifelsenextoptionalcs\tabl_tabulate_start_head_yes\tabl_tabulate_start_head_nop}
+\setuvalue{\e!start\v!tabulatetail}{\doifelsenextoptionalcs\tabl_tabulate_start_foot_yes\tabl_tabulate_start_foot_nop}
\let\m_tabl_tabulate_data\empty
@@ -1086,30 +1100,30 @@
\def\tabl_tabulate_start_foot_yes[#1]%
{\processcontent{\e!stop\v!tabulatetail}\m_tabl_tabulate_data{\letvalue{\??tabulatefoot#1}\m_tabl_tabulate_data}}
-% \def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[\v!tabulate]}
-% \def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[\v!tabulate]}
+\def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[\v!tabulate]}
+\def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[\v!tabulate]}
-\def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[]}
-\def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[]}
+% \def\tabl_tabulate_start_head_nop{\tabl_tabulate_start_head_yes[]}
+% \def\tabl_tabulate_start_foot_nop{\tabl_tabulate_start_foot_yes[]}
% \unexpanded\def\tabl_start_defined[#1]%
% {\bgroup
% \edef\currenttabulationparent{#1}%
% \let\currenttabulation\currenttabulationparent
-% \doifnextoptionalelse\tabl_start_defined_yes\tabl_start_defined_nop}
-%
-% \def\tabl_start_defined_yes[#1]%
-% {\edef\currenttabulation{\currenttabulation:#1}%
-% \tabl_tabulate_start_building}
-%
-% \def\tabl_start_defined_nop
-% {\tabl_tabulate_start_building}
+% \dodoubleargument\tabl_start_defined_indeed}
\unexpanded\def\tabl_start_defined[#1]%
{\bgroup
\edef\currenttabulationparent{#1}%
\let\currenttabulation\currenttabulationparent
- \dodoubleargument\tabl_start_defined_indeed}
+ \edef\p_format{\tabulationparameter\c!format}%
+ \ifx\p_format\v!none
+ % this is special case: we need to define the generic english
+ % \starttabulate in other interfaces as well
+ \expandafter\dodoubleempty \expandafter\tabl_start_regular
+ \else
+ \expandafter\dodoubleargument\expandafter\tabl_start_defined_indeed
+ \fi}
\def\tabl_start_defined_indeed
{\iffirstargument
@@ -1123,7 +1137,7 @@
\fi}
\def\tabl_start_defined_one[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\setuptabulation[\currenttabulation][#1]}%
{\edef\currenttabulation{\currenttabulation:#1}}%
\tabl_tabulate_start_building}
@@ -1140,26 +1154,11 @@
\unexpanded\setuvalue{\e!start\v!tabulate}%
{\bgroup % whole thing
+ \let\currenttabulationparent\empty
\dodoubleempty\tabl_start_regular}
-% \def\tabl_start_regular[#1][#2]%
-% {%\let\currenttabulationparent\v!tabulate
-% \let\currenttabulationparent\empty
-% \let\currenttabulation\currenttabulationparent
-% \def\p_format{#1}%
-% \ifx\p_format\empty
-% \def\p_format{|l|p|}%
-% \fi
-% \lettabulationparameter\c!format\p_format
-% \ifsecondargument
-% \setupcurrenttabulation[#2]%
-% \fi
-% \tabl_tabulate_start_building}
-
\def\tabl_start_regular
- {%\let\currenttabulationparent\v!tabulate
- \let\currenttabulationparent\empty
- \let\currenttabulation\currenttabulationparent
+ {\let\currenttabulation\currenttabulationparent
\ifsecondargument
\expandafter\tabl_start_regular_two
\else
@@ -1167,7 +1166,7 @@
\fi}
\def\tabl_start_regular_one[#1][#2]%
- {\doifassignmentelse{#1}
+ {\doifelseassignment{#1}
{\setupcurrenttabulation[\c!format={|l|p|},#1]}
{\def\p_format{#1}%
\ifx\p_format\empty
@@ -1317,7 +1316,7 @@
\def\tabl_tabulate_set_color_column#1% overloaded
{\unskip
- \doiffastoptionalcheckelse{\tabl_tabulate_set_color_column_yes#1}{\tabl_tabulate_set_color_column_nop#1}}
+ \doifelsefastoptionalcheck{\tabl_tabulate_set_color_column_yes#1}{\tabl_tabulate_set_color_column_nop#1}}
\def\tabl_tabulate_set_color_column_nop
{\tabl_tabulate_column_normal}
@@ -1356,13 +1355,13 @@
% \fi\fi
\global\let\m_tabl_tabulate_vrule_color_local\m_tabl_tabulate_vrule_color_default
\global\d_tabl_tabulate_vrulethickness_local\d_tabl_tabulate_vrulethickness_default
- \doiffastoptionalcheckelse{\tabl_tabulate_column_vruled_yes#1}{\tabl_tabulate_column_vruled_nop#1}}
+ \doifelsefastoptionalcheck{\tabl_tabulate_column_vruled_yes#1}{\tabl_tabulate_column_vruled_nop#1}}
\def\tabl_tabulate_column_vruled_nop
{\tabl_tabulate_column_normal}
\def\tabl_tabulate_column_vruled_step#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_vrulethickness_local#1\d_tabl_tabulate_vrulethickness_default}
{\xdef\m_tabl_tabulate_vrule_color_local{#1}}}
@@ -1450,15 +1449,15 @@
\def\tabl_tabulate_hrule_spec_ignore#1%
{%\global\let\currenttabulationlocalhrulecolor\empty
%\global\d_tabl_tabulate_hrulethickness_local\d_tabl_tabulate_hrulethickness_default
- \doiffastoptionalcheckelse#1#1}
+ \doifelsefastoptionalcheck#1#1}
\def\tabl_tabulate_hrule_spec_pickup#1%
{\global\let\currenttabulationlocalhrulecolor\m_tabl_tabulate_hrule_color_default
\global\d_tabl_tabulate_hrulethickness_local\d_tabl_tabulate_hrulethickness_default
- \doiffastoptionalcheckelse{\tabl_tabulate_hrule_preset#1}#1}
+ \doifelsefastoptionalcheck{\tabl_tabulate_hrule_preset#1}#1}
\def\tabl_tabulate_hrule_preset_step#1%
- {\doifnumberelse{#1}
+ {\doifelsenumber{#1}
{\global\d_tabl_tabulate_hrulethickness_local#1\d_tabl_tabulate_hrulethickness_default}
{\xdef\currenttabulationlocalhrulecolor{#1}}}
@@ -1527,7 +1526,7 @@
\unexpanded\def\tabl_tabulate_color_set#1% we could store the attributes at the cost of a lua call
{\begingroup
- \node_backgrounds_align_initialize % name might change
+ \clf_enablebackgroundalign % was \node_backgrounds_align_initialize
\global\let\tabl_tabulate_color_repeat\tabl_tabulate_color_repeat_second
\global\settrue\c_tabl_tabulate_has_colors
\ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_colorcolumn
@@ -2066,9 +2065,9 @@
\donetrue
\fi\fi
\ifdone
- \c_tabl_tabulate_repeathead\executeifdefined{\??tabulateheader\tabulationparameter\c!header}\zerocount
+ \global\c_tabl_tabulate_repeathead\executeifdefined{\??tabulateheader\tabulationparameter\c!header}\zerocount
\else
- \c_tabl_tabulate_repeathead\zerocount
+ \global\c_tabl_tabulate_repeathead\zerocount
\fi
%
\the\t_tabl_tabulate_initializers_first % collect more here
@@ -2102,7 +2101,7 @@
\c_tabl_tabulate_nofcolumns \zerocount
\c_tabl_tabulate_has_rule_spec_first\zerocount
\c_tabl_tabulate_has_rule_spec_last \zerocount
- \ctxcommand{presettabulate(\!!bs\detokenizedtabulationparameter\c!format\!!es)}%
+ \clf_presettabulate{\detokenizedtabulationparameter\c!format}%
%
% \edef\totaltabulatecolumns{\the\numexpr3*\c_tabl_tabulate_columns+\plusfour}%
\d_tabl_tabulate_width\zeropoint
@@ -2168,7 +2167,7 @@
\global\setbox\b_tabl_tabulate\vbox \bgroup
\fi
%
- \dostarttagged\t!tabulate\empty
+ \dostarttaggedchained\t!tabulate\empty\??tabulation
\dostarttagged\t!tabulaterow\empty
\setfalse\inhibitmargindata % new per 2012.06.13 ... really needed
\everycr\expandafter{\the\everycr\dostoptagged\dostarttagged\t!tabulaterow\empty}%
@@ -2448,4 +2447,10 @@
%D \stopwhatever
%D \stoptyping
+%D This is needed because we soemtimes use the english command in
+%D tracing macros. In fact, most detailed tracing macros that
+%D are done with \LUA\ only work in the english interface anyway.
+
+\definetabulate[tabulate] \setuptabulate[tabulate][\c!format=\v!none] % so no \v! here
+
\protect \endinput
diff --git a/tex/context/base/tabl-tsp.mkiv b/tex/context/base/tabl-tsp.mkiv
index 0138697af..64ab94a67 100644
--- a/tex/context/base/tabl-tsp.mkiv
+++ b/tex/context/base/tabl-tsp.mkiv
@@ -195,7 +195,7 @@
\normalexpanded{\egroup\noexpand\edef\noexpand\extrasplitfloatlines{\the\noflines}}%
\global\settrue\usesamefloatnumber
\else
- \doifnumberelse\extrasplitfloatlines\donothing{\def\extrasplitfloatlines{1}}%
+ \doifelsenumber\extrasplitfloatlines\donothing{\def\extrasplitfloatlines{1}}%
\fi}
\unexpanded\def\doifnotinsidesplitfloat
diff --git a/tex/context/base/tabl-xnt.mkvi b/tex/context/base/tabl-xnt.mkvi
index ffa1f501e..fd2de5a13 100644
--- a/tex/context/base/tabl-xnt.mkvi
+++ b/tex/context/base/tabl-xnt.mkvi
@@ -130,6 +130,6 @@
{\bgroup
\tabl_x_prepare{#settings}%
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \buff_pickup\tabl_x_current_buffer{bTABLE}{eTABLE}\relax\tabl_x_process}
+ \buff_pickup\tabl_x_current_buffer{bTABLE}{eTABLE}\relax\tabl_x_process\zerocount}
\protect \endinput
diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua
index 488ef5b78..4bf8e3107 100644
--- a/tex/context/base/tabl-xtb.lua
+++ b/tex/context/base/tabl-xtb.lua
@@ -24,38 +24,57 @@ this mechamism will be improved so that it can replace its older cousin.
]]--
-- todo: use linked list instead of r/c array
+-- todo: we can use the sum of previously forced widths for column spans
-local commands, context, tex, node = commands, context, tex, node
+local tonumber, next = tonumber, next
-local texgetcount = tex.getcount
-local texsetcount = tex.setcount
-local texgetbox = tex.getbox
-local texgetdimen = tex.getdimen
-local texsetdimen = tex.setdimen
-local texget = tex.get
+local commands = commands
+local context = context
+local tex = tex
-local format = string.format
-local concat = table.concat
-local points = number.points
+local implement = interfaces.implement
+
+local texgetcount = tex.getcount
+local texsetcount = tex.setcount
+local texgetdimen = tex.getdimen
+local texsetdimen = tex.setdimen
+local texget = tex.get
+
+local format = string.format
+local concat = table.concat
+local points = number.points
+
+local todimen = string.todimen
-local context = context
local context_beginvbox = context.beginvbox
local context_endvbox = context.endvbox
local context_blank = context.blank
local context_nointerlineskip = context.nointerlineskip
+local context_dummyxcell = context.dummyxcell
local variables = interfaces.variables
local setmetatableindex = table.setmetatableindex
local settings_to_hash = utilities.parsers.settings_to_hash
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local vpack_node_list = node.vpack
-local slide_node_list = node.slide
-local flush_node_list = node.flush_list
+local nuts = nodes.nuts -- here nuts gain hardly nothing
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local getbox = nuts.getbox
-local nodepool = nodes.pool
+local setfield = nuts.setfield
+
+local copy_node_list = nuts.copy_list
+local hpack_node_list = nuts.hpack
+local flush_node_list = nuts.flush_list
+local takebox = nuts.takebox
+
+local nodepool = nuts.pool
local new_glue = nodepool.glue
local new_kern = nodepool.kern
@@ -69,6 +88,12 @@ local v_height = variables.height
local v_repeat = variables["repeat"]
local v_max = variables.max
local v_fixed = variables.fixed
+local v_auto = variables.auto
+local v_before = variables.before
+local v_after = variables.after
+local v_both = variables.both
+local v_samepage = variables.samepage
+local v_tight = variables.tight
local xtables = { }
typesetters.xtables = xtables
@@ -96,36 +121,40 @@ local stack, data = { }, nil
function xtables.create(settings)
table.insert(stack,data)
- local rows = { }
- local widths = { }
- local heights = { }
- local depths = { }
- local spans = { }
- local distances = { }
- local autowidths = { }
- local modes = { }
- local fixedrows = { }
- local fixedcolumns = { }
- local frozencolumns = { }
- local options = { }
+ local rows = { }
+ local widths = { }
+ local heights = { }
+ local depths = { }
+ local spans = { }
+ local distances = { }
+ local autowidths = { }
+ local modes = { }
+ local fixedrows = { }
+ local fixedcolumns = { }
+ -- local fixedcspans = { }
+ local frozencolumns = { }
+ local options = { }
+ local rowproperties = { }
data = {
- rows = rows,
- widths = widths,
- heights = heights,
- depths = depths,
- spans = spans,
- distances = distances,
- modes = modes,
- autowidths = autowidths,
- fixedrows = fixedrows,
- fixedcolumns = fixedcolumns,
- frozencolumns = frozencolumns,
- options = options,
- nofrows = 0,
- nofcolumns = 0,
- currentrow = 0,
- currentcolumn = 0,
- settings = settings or { },
+ rows = rows,
+ widths = widths,
+ heights = heights,
+ depths = depths,
+ spans = spans,
+ distances = distances,
+ modes = modes,
+ autowidths = autowidths,
+ fixedrows = fixedrows,
+ fixedcolumns = fixedcolumns,
+ -- fixedcspans = fixedcspans,
+ frozencolumns = frozencolumns,
+ options = options,
+ nofrows = 0,
+ nofcolumns = 0,
+ currentrow = 0,
+ currentcolumn = 0,
+ settings = settings or { },
+ rowproperties = rowproperties,
}
local function add_zero(t,k)
t[k] = 0
@@ -141,6 +170,9 @@ function xtables.create(settings)
nx = 0,
ny = 0,
list = false,
+ wd = 0,
+ ht = 0,
+ dp = 0,
}
row[c] = cell
if c > data.nofcolumns then
@@ -166,24 +198,28 @@ function xtables.create(settings)
setmetatableindex(fixedrows,add_zero)
setmetatableindex(fixedcolumns,add_zero)
setmetatableindex(options,add_table)
+ -- setmetatableindex(fixedcspans,add_table)
+ --
+ local globaloptions = settings_to_hash(settings.option)
--
- settings.columndistance = tonumber(settings.columndistance) or 0
- settings.rowdistance = tonumber(settings.rowdistance) or 0
- settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
+ settings.columndistance = tonumber(settings.columndistance) or 0
+ settings.rowdistance = tonumber(settings.rowdistance) or 0
+ settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0
- settings.options = settings_to_hash(settings.option)
- settings.textwidth = tonumber(settings.textwidth) or texget("hsize")
- settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight")
- settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
+ settings.options = globaloptions
+ settings.textwidth = tonumber(settings.textwidth) or texget("hsize")
+ settings.lineheight = tonumber(settings.lineheight) or texgetdimen("lineheight")
+ settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
-- if #stack > 0 then
-- settings.textwidth = texget("hsize")
-- end
data.criterium_v = 2 * data.settings.lineheight
data.criterium_h = .75 * data.settings.textwidth
-
+ --
+ data.tight = globaloptions[v_tight] and true or false
end
-function xtables.initialize_reflow_width(option)
+function xtables.initialize_reflow_width(option,width)
local r = data.currentrow
local c = data.currentcolumn + 1
local drc = data.rows[r][c]
@@ -204,8 +240,7 @@ function xtables.initialize_reflow_width(option)
data.currentcolumn = c
end
--- local function rather_fixed(n)
--- for n in node.
+-- todo: we can better set the cell values in one go
function xtables.set_reflow_width()
local r = data.currentrow
@@ -215,43 +250,111 @@ function xtables.set_reflow_width()
while row[c].span do -- can also be previous row ones
c = c + 1
end
- local tb = texgetbox("b_tabl_x")
+ local tb = getbox("b_tabl_x")
local drc = row[c]
--
drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
--
- local widths, width = data.widths, tb.width
- if width > widths[c] then
- widths[c] = width
- end
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
+ local width = getfield(tb,"width")
+ local height = getfield(tb,"height")
+ local depth = getfield(tb,"depth")
+ --
+ local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
+ local cspan = drc.nx
+ if cspan < 2 then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ else
+ local options = data.options[r][c]
+ if data.tight then
+ -- no check
+ elseif not options then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ elseif not options[v_tight] then
+ if width > widths[c] then
+ widths[c] = width
+ end
+ end
end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
+ -- if cspan > 1 then
+ -- local f = data.fixedcspans[c]
+ -- local w = f[cspan] or 0
+ -- if width > w then
+ -- f[cspan] = width -- maybe some day a solution for autospanmax and so
+ -- end
+ -- end
+ if drc.ny < 2 then
+ if height > heights[r] then
+ heights[r] = height
+ end
+ if depth > depths[r] then
+ depths[r] = depth
+ end
end
--
+ drc.wd = width
+ drc.ht = height
+ drc.dp = depth
+ --
local dimensionstate = texgetcount("frameddimensionstate")
local fixedcolumns = data.fixedcolumns
local fixedrows = data.fixedrows
if dimensionstate == 1 then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
+ if cspan > 1 then
+ -- ignore width
+ elseif width > fixedcolumns[c] then -- how about a span here?
+ fixedcolumns[c] = width
+ end
elseif dimensionstate == 2 then
fixedrows[r] = height
elseif dimensionstate == 3 then
fixedrows[r] = height -- width
fixedcolumns[c] = width -- height
- else -- probably something frozen, like an image -- we could parse the list
- if width <= data.criterium_h and height >= data.criterium_v then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
+ elseif width <= data.criterium_h and height >= data.criterium_v then
+ -- somewhat tricky branch
+ if width > fixedcolumns[c] then -- how about a span here?
+ -- maybe an image, so let's fix
+ fixedcolumns[c] = width
end
end
+--
+-- -- this fails so not good enough predictor
+--
+-- -- \startxtable
+-- -- \startxrow
+-- -- \startxcell knuth \stopxcell
+-- -- \startxcell \input knuth \stopxcell
+-- -- \stopxrow
+--
+-- else
+-- local o = data.options[r][c]
+-- if o and o[v_auto] then -- new per 5/5/2014 - removed per 15/07/2014
+-- data.autowidths[c] = true
+-- else
+-- -- no dimensions are set in the cell
+-- if width <= data.criterium_h and height >= data.criterium_v then
+-- -- somewhat tricky branch
+-- if width > fixedcolumns[c] then -- how about a span here?
+-- -- maybe an image, so let's fix
+-- fixedcolumns[c] = width
+-- end
+-- else
+-- -- safeguard as it could be text that can be recalculated
+-- -- and the previous branch could have happened in a previous
+-- -- row and then forces a wrong one-liner in a multiliner
+-- if width > fixedcolumns[c] then
+-- data.autowidths[c] = true -- new per 5/5/2014 - removed per 15/07/2014
+-- end
+-- end
+-- end
+-- end
+--
+ --
drc.dimensionstate = dimensionstate
--
local nx, ny = drc.nx, drc.ny
@@ -306,6 +409,8 @@ function xtables.initialize_reflow_height()
elseif data.autowidths[c] then
-- width has changed so we need to recalculate the height
texsetcount("c_tabl_x_skip_mode",0)
+ elseif data.fixedcolumns[c] then
+ texsetcount("c_tabl_x_skip_mode",0) -- new
else
texsetcount("c_tabl_x_skip_mode",1)
end
@@ -319,18 +424,30 @@ function xtables.set_reflow_height()
-- while row[c].span do -- we could adapt drc.nx instead
-- c = c + 1
-- end
- local tb = texgetbox("b_tabl_x")
+ local tb = getbox("b_tabl_x")
local drc = row[c]
- if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
- end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
+ --
+ local width = getfield(tb,"width")
+ local height = getfield(tb,"height")
+ local depth = getfield(tb,"depth")
+ --
+ if drc.ny < 2 then
+ if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
+ local heights = data.heights
+ local depths = data.depths
+ if height > heights[r] then
+ heights[r] = height
+ end
+ if depth > depths[r] then
+ depths[r] = depth
+ end
end
end
+ --
+ drc.wd = width
+ drc.ht = height
+ drc.dp = depth
+ --
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -344,23 +461,35 @@ function xtables.initialize_construct()
c = c + 1
end
data.currentcolumn = c
- local widths = data.widths
+ local widths = data.widths
local heights = data.heights
- local depths = data.depths
- local w = widths[c]
- local h = heights[r]
- local d = depths[r]
+ local depths = data.depths
+ --
local drc = row[c]
+ local wd = drc.wd
+ local ht = drc.ht
+ local dp = drc.dp
+ --
+ local width = widths[c]
+ local height = heights[r]
+ local depth = depths[r]
+ --
for x=1,drc.nx-1 do
- w = w + widths[c+x]
+ width = width + widths[c+x]
end
- for y=1,drc.ny-1 do
- h = h + heights[r+y]
- d = d + depths[r+y]
+ --
+ local total = height + depth
+ local ny = drc.ny
+ if ny > 1 then
+ for y=1,ny-1 do
+ local nxt = r + y
+ total = total + heights[nxt] + depths[nxt]
+ end
end
- texsetdimen("d_tabl_x_width",w)
- texsetdimen("d_tabl_x_height",h + d)
- texsetdimen("d_tabl_x_depth",0)
+ --
+ texsetdimen("d_tabl_x_width",width)
+ texsetdimen("d_tabl_x_height",total)
+ texsetdimen("d_tabl_x_depth",0) -- for now
end
function xtables.set_construct()
@@ -373,7 +502,7 @@ function xtables.set_construct()
-- end
local drc = row[c]
-- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(texgetbox("b_tabl_x"))
+ drc.list = takebox("b_tabl_x")
-- c = c + drc.nx - 1
-- data.currentcolumn = c
end
@@ -383,7 +512,7 @@ local function showwidths(where,widths,autowidths)
for i=1,#widths do
result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ")
end
- return report_xtable("%s : %s",where,concat(result," "))
+ return report_xtable("%s widths: %s",where,concat(result," "))
end
function xtables.reflow_width()
@@ -406,6 +535,8 @@ function xtables.reflow_width()
local maxwidth = settings.maxwidth
-- calculate width
local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
local distances = data.distances
local autowidths = data.autowidths
local fixedcolumns = data.fixedcolumns
@@ -419,6 +550,7 @@ function xtables.reflow_width()
showwidths("stage 1",widths,autowidths)
end
local noffrozen = 0
+ -- inspect(data.fixedcspans)
if options[v_max] then
for c=1,nofcolumns do
width = width + widths[c]
@@ -541,16 +673,51 @@ function xtables.reflow_width()
--
data.currentrow = 0
data.currentcolumn = 0
+ --
+-- inspect(data)
end
function xtables.reflow_height()
data.currentrow = 0
data.currentcolumn = 0
local settings = data.settings
+ --
+ -- analyze ny
+ --
+ local nofrows = data.nofrows
+ local nofcolumns = data.nofcolumns
+ local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
+ --
+ for r=1,nofrows do
+ for c=1,nofcolumns do
+ local drc = data.rows[r][c]
+ if drc then
+ local ny = drc.ny
+ if ny > 1 then
+ local height = heights[r]
+ local depth = depths[r]
+ local total = height + depth
+ local htdp = drc.ht + drc.dp
+ for y=1,ny-1 do
+ local nxt = r + y
+ total = total + heights[nxt] + depths[nxt]
+ end
+ local delta = htdp - total
+ if delta > 0 then
+ delta = delta / ny
+ for y=0,ny-1 do
+ local nxt = r + y
+ heights[nxt] = heights[nxt] + delta
+ end
+ end
+ end
+ end
+ end
+ end
+ --
if settings.options[v_height] then
- local heights = data.heights
- local depths = data.depths
- local nofrows = data.nofrows
local totalheight = 0
local totaldepth = 0
for i=1,nofrows do
@@ -560,14 +727,16 @@ function xtables.reflow_height()
local total = totalheight + totaldepth
local leftover = settings.textheight - total
if leftover > 0 then
- local leftheight = (totalheight / total ) * leftover / #heights
- local leftdepth = (totaldepth / total ) * leftover / #depths
+ local leftheight = (totalheight / total) * leftover / #heights
+ local leftdepth = (totaldepth / total) * leftover / #depths
for i=1,nofrows do
heights[i] = heights[i] + leftheight
depths [i] = depths [i] + leftdepth
end
end
end
+ --
+-- inspect(data)
end
local function showspans(data)
@@ -607,6 +776,7 @@ function xtables.construct()
local rowdistance = settings.rowdistance
local leftmargindistance = settings.leftmargindistance
local rightmargindistance = settings.rightmargindistance
+ local rowproperties = data.rowproperties
-- ranges can be mixes so we collect
if trace_xtable then
@@ -646,23 +816,23 @@ function xtables.construct()
end
local list = drc.list
if list then
- list.shift = list.height + list.depth
+ setfield(list,"shift",getfield(list,"height") + getfield(list,"depth"))
-- list = hpack_node_list(list) -- is somehow needed
- -- list.width = 0
- -- list.height = 0
- -- list.depth = 0
+ -- setfield(list,"width",0)
+ -- setfield(list,"height",0)
+ -- setfield(list,"depth",0)
-- faster:
local h = new_hlist()
- h.list = list
+ setfield(h,"list",list)
list = h
--
if start then
- stop.next = list
- list.prev = stop
+ setfield(stop,"next",list)
+ setfield(list,"prev",stop)
else
start = list
end
- stop = list -- one node anyway, so not needed: slide_node_list(list)
+ stop = list
end
local step = widths[c]
if c < nofcolumns then
@@ -670,8 +840,8 @@ function xtables.construct()
end
local kern = new_kern(step)
if stop then
- stop.next = kern
- kern.prev = stop
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
else -- can be first spanning next row (ny=...)
start = kern
end
@@ -680,8 +850,8 @@ function xtables.construct()
if start then
if rightmargindistance > 0 then
local kern = new_kern(rightmargindistance)
- stop.next = kern
- kern.prev = stop
+ setfield(stop,"next",kern)
+ setfield(kern,"prev",stop)
-- stop = kern
end
return start, heights[r] + depths[r], hasspan
@@ -699,14 +869,33 @@ function xtables.construct()
result[nofr][4] = true
end
nofr = nofr + 1
+ local rp = rowproperties[r]
+ -- we have a direction issue here but hpack_node_list(list,0,"exactly","TLT") cannot be used
+ -- due to the fact that we need the width
+ local hbox = hpack_node_list(list)
+ setfield(hbox,"dir","TLT")
result[nofr] = {
- hpack_node_list(list),
+ hbox,
size,
i < nofrange and rowdistance > 0 and rowdistance or false, -- might move
- false
+ false,
+ rp and rp.samepage or false,
}
end
end
+ if nofr > 0 then
+ -- the [5] slot gets the after break
+ result[1] [5] = false
+ result[nofr][5] = false
+ for i=2,nofr-1 do
+ local r = result[i]
+ if r == v_both or r == v_before then
+ result[i-1][5] = true
+ elseif r == v_after then
+ result[i][5] = true
+ end
+ end
+ end
return result
end
local body = collect_range(ranges[body_mode])
@@ -721,7 +910,7 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
+ texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
end
end
@@ -734,22 +923,28 @@ local function inject(row,copy,package)
end
if package then
context_beginvbox()
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
context_endvbox()
context_nointerlineskip() -- figure out a better way
if row[4] then
-- nothing as we have a span
+ elseif row[5] then
+ if row[3] then
+ context_blank { v_samepage, row[3] .. "sp" }
+ else
+ context_blank { v_samepage }
+ end
elseif row[3] then
- context_blank(row[3] .. "sp") -- why blank ?
+ context_blank { row[3] .. "sp" } -- why blank ?
else
- context(new_glue(0))
+ context(tonode(new_glue(0)))
end
else
- context(list)
- context(new_kern(row[2]))
+ context(tonode(list))
+ context(tonode(new_kern(row[2])))
if row[3] then
- context(new_glue(row[3]))
+ context(tonode(new_glue(row[3])))
end
end
end
@@ -794,23 +989,23 @@ local function spanheight(body,i)
end
function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank
- local vsize = directives.vsize
- local method = directives.method or v_normal
- local settings = data.settings
- local results = data.results
- local rowdistance = settings.rowdistance
- local head = results[head_mode]
- local foot = results[foot_mode]
- local more = results[more_mode]
- local body = results[body_mode]
+ local height = directives.height
+ local method = directives.method or v_normal
+ local settings = data.settings
+ local results = data.results
+ local rowdistance = settings.rowdistance
+ local head = results[head_mode]
+ local foot = results[foot_mode]
+ local more = results[more_mode]
+ local body = results[body_mode]
local repeatheader = settings.header == v_repeat
local repeatfooter = settings.footer == v_repeat
- if vsize and vsize > 0 then
+ if height and height > 0 then
context_beginvbox()
local bodystart = data.bodystart or 1
local bodystop = data.bodystop or #body
if bodystart > 0 and bodystart <= bodystop then
- local bodysize = vsize
+ local bodysize = height
local footsize = total(foot,rowdistance)
local headsize = total(head,rowdistance)
local moresize = total(more,rowdistance)
@@ -822,7 +1017,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],repeatheader)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
if not repeatheader then
results[head_mode] = { }
@@ -835,7 +1030,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(more[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
elseif headsize > 0 and repeatheader then -- following chunk gets head
@@ -845,7 +1040,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],true)
end
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
end
else -- following chunk gets nothing
@@ -872,7 +1067,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- all is flushed and footer fits
if footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -886,7 +1081,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
-- todo: try to flush a few more lines
if repeatfooter and footsize > 0 then
if rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i],true)
@@ -897,7 +1092,7 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
texsetcount("global","c_tabl_x_state",2)
end
else
- if firstsize > vsize then
+ if firstsize > height then
-- get rid of the too large cell
for s=1,firstspans do
inject(body[bodystart])
@@ -921,13 +1116,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i],false,true)
end
if #head > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
+ context_blank { rowdistance .. "sp" }
end
for i=1,#body do
inject(body[i],false,true)
end
if #foot > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
+ context_blank { rowdistance .. "sp" }
end
for i=1,#foot do
inject(foot[i],false,true)
@@ -938,13 +1133,13 @@ function xtables.flush(directives) -- todo split by size / no inbetween then ..
inject(head[i])
end
if #head > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#body do
inject(body[i])
end
if #foot > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
+ context(tonode(new_glue(rowdistance)))
end
for i=1,#foot do
inject(foot[i])
@@ -964,31 +1159,92 @@ function xtables.cleanup()
flush_node_list(r[1])
end
end
+
+ -- local rows = data.rows
+ -- for i=1,#rows do
+ -- local row = rows[i]
+ -- for i=1,#row do
+ -- local cell = row[i]
+ -- local list = cell.list
+ -- if list then
+ -- cell.width = getfield(list,"width")
+ -- cell.height = getfield(list,"height")
+ -- cell.depth = getfield(list,"depth")
+ -- cell.list = true
+ -- end
+ -- end
+ -- end
+ -- data.result = nil
+ -- inspect(data)
+
data = table.remove(stack)
end
-function xtables.next_row()
+function xtables.next_row(specification)
local r = data.currentrow + 1
data.modes[r] = texgetcount("c_tabl_x_mode")
data.currentrow = r
data.currentcolumn = 0
+ data.rowproperties[r] = specification
+end
+
+function xtables.finish_row()
+ local n = data.nofcolumns - data.currentcolumn
+ if n > 0 then
+ -- message
+ for i=1,n do
+ context_dummyxcell()
+ end
+ end
end
-- eventually we might only have commands
-commands.x_table_create = xtables.create
-commands.x_table_reflow_width = xtables.reflow_width
-commands.x_table_reflow_height = xtables.reflow_height
-commands.x_table_construct = xtables.construct
-commands.x_table_flush = xtables.flush
-commands.x_table_cleanup = xtables.cleanup
-commands.x_table_next_row = xtables.next_row
-commands.x_table_init_reflow_width = xtables.initialize_reflow_width
-commands.x_table_init_reflow_height = xtables.initialize_reflow_height
-commands.x_table_init_construct = xtables.initialize_construct
-commands.x_table_set_reflow_width = xtables.set_reflow_width
-commands.x_table_set_reflow_height = xtables.set_reflow_height
-commands.x_table_set_construct = xtables.set_construct
-
-commands.x_table_r = function() context(data.currentrow or 0) end
-commands.x_table_c = function() context(data.currentcolumn or 0) end
+implement {
+ name = "x_table_create",
+ actions = xtables.create,
+ arguments = {
+ {
+ { "option" },
+ { "textwidth", "dimen" },
+ { "textheight", "dimen" },
+ { "maxwidth", "dimen" },
+ { "lineheight", "dimen" },
+ { "columndistance", "dimen" },
+ { "leftmargindistance", "dimen" },
+ { "rightmargindistance", "dimen" },
+ { "rowdistance", "dimen" },
+ { "header" },
+ { "footer" },
+ }
+ }
+}
+
+implement {
+ name = "x_table_flush",
+ actions = xtables.flush,
+ arguments = {
+ {
+ { "method" },
+ { "height", "dimen" }
+ }
+ }
+}
+
+implement { name = "x_table_reflow_width", actions = xtables.reflow_width }
+implement { name = "x_table_reflow_height", actions = xtables.reflow_height }
+implement { name = "x_table_construct", actions = xtables.construct }
+implement { name = "x_table_cleanup", actions = xtables.cleanup }
+implement { name = "x_table_next_row", actions = xtables.next_row }
+implement { name = "x_table_next_row_option", actions = xtables.next_row, arguments = "string" }
+implement { name = "x_table_finish_row", actions = xtables.finish_row }
+implement { name = "x_table_init_reflow_width", actions = xtables.initialize_reflow_width }
+implement { name = "x_table_init_reflow_height", actions = xtables.initialize_reflow_height }
+implement { name = "x_table_init_reflow_width_option", actions = xtables.initialize_reflow_width, arguments = "string" }
+implement { name = "x_table_init_reflow_height_option", actions = xtables.initialize_reflow_height, arguments = "string" }
+implement { name = "x_table_init_construct", actions = xtables.initialize_construct }
+implement { name = "x_table_set_reflow_width", actions = xtables.set_reflow_width }
+implement { name = "x_table_set_reflow_height", actions = xtables.set_reflow_height }
+implement { name = "x_table_set_construct", actions = xtables.set_construct }
+implement { name = "x_table_r", actions = function() context(data.currentrow or 0) end }
+implement { name = "x_table_c", actions = function() context(data.currentcolumn or 0) end }
diff --git a/tex/context/base/tabl-xtb.mkvi b/tex/context/base/tabl-xtb.mkvi
index 556bec5ce..73ba91045 100644
--- a/tex/context/base/tabl-xtb.mkvi
+++ b/tex/context/base/tabl-xtb.mkvi
@@ -29,6 +29,8 @@
% - maybe only tag the box
% - scale to fit
%
+% - buffers permit verbatim but are not always handy
+
%D This module started as an afternoon experiment and surprisingly could be
%D mostly finished the same evening. Of course it builds upon existing
%D functionality. The main reason for writing it is that we occasionally
@@ -78,8 +80,13 @@
\let\dotagxtablesignal\relax % names will change
\appendtoks
- \def\dotagxtablecell {\taggedctxcommand{settagtablecell(\number\tablecellrows,\number\tablecellcolumns,\number\raggedstatus)}}%
- \def\dotagxtablesignal{\char\zerocount}% not used
+ \def\dotagxtablecell
+ {\clf_settagtablecell
+ \numexpr\tablecellrows\relax
+ \numexpr\tablecellcolumns\relax
+ \numexpr\raggedstatus\relax}%
+ \def\dotagxtablesignal
+ {\char\zerocount}% not used
\to \everyenableelements
\newdimen\d_tabl_x_width
@@ -96,8 +103,8 @@
\newcount\c_tabl_x_skip_mode % 1 = skip
\newdimen\d_tabl_x_textwidth
-\def\currentxtablerow {\ctxcommand{x_table_r()}}
-\def\currentxtablecolumn{\ctxcommand{x_table_c()}}
+\let\currentxtablerow \clf_x_table_r
+\let\currentxtablecolumn\clf_x_table_c
% \setupxtable[one][parent][a=b,c=d]
% \setupxtable[one] [a=b,c=d]
@@ -159,7 +166,7 @@
{\bgroup
\tabl_x_prepare{#settings}%
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \buff_pickup{\tabl_x_current_buffer}{startxtable}{stopxtable}\relax\tabl_x_process}
+ \buff_pickup{\tabl_x_current_buffer}{startxtable}{stopxtable}\relax\tabl_x_process\zerocount}
\unexpanded\def\processxtablebuffer
{\dosingleempty\tabl_x_process_buffer_directly}
@@ -175,7 +182,7 @@
{\bgroup
\let\tabl_x_start_table\tabl_x_process_buffer
\edef\tabl_x_current_buffer{#name}%
- \tabl_x_get_buffer % pickup settings
+ \tabl_x_get_buffer % settings
\tabl_x_process}
\unexpanded\def\tabl_x_start_ignore[#settings]%
@@ -194,7 +201,7 @@
\unexpanded\def\tabl_x_embedded_start[#settings]#content\stopembeddedxtable
{\tabl_x_prepare{#settings}%
- \ctxcommand{assignbuffer("embedded_x_table",\!!bs\detokenize{#content}\!!es)}%
+ \clf_assignbuffer{embedded_x_table}{\detokenize{#content}}\catcodetable\relax
\bgroup
\let\tabl_x_start_table\tabl_x_process_buffer
\edef\tabl_x_current_buffer{embedded_x_table}%
@@ -216,14 +223,14 @@
\unexpanded\def\tabl_x_start_named_indeed[#settings]%
{\advance\c_tabl_x_nesting\plusone
- \dostarttagged\t!table\empty
+ \dostarttaggedchained\t!table\empty\??xtable
\iffirstargument
\setupcurrentxtable[#settings]%
\fi
\tabl_x_check_textwidth
- \forgetall
+ %\forgetall % else whitespace mess
\edef\tabl_x_current_buffer{\tabl_x_default_buffer}%
- \normalexpanded{\buff_pickup{\tabl_x_current_buffer}{\e!start\currentxtable}{\e!stop\currentxtable}\relax\tabl_x_process}}
+ \normalexpanded{\buff_pickup{\tabl_x_current_buffer}{\e!start\currentxtable}{\e!stop\currentxtable}\relax\tabl_x_process\zerocount}}
\unexpanded\def\tabl_x_stop_named
{}
@@ -240,15 +247,15 @@
\unexpanded\def\tabl_x_prepare#settings% assumes \iffirstargument to be set
{\advance\c_tabl_x_nesting\plusone
- \dostarttagged\t!table\empty
+ \dostarttaggedchained\t!table\empty\??xtable
\iffirstargument
\tabl_x_set_checked{#settings}%
\fi
\tabl_x_check_textwidth
- \forgetall}
+ }% else whitespace mess
\def\tabl_x_get_buffer
- {\ctxcommand{gettexbuffer("\tabl_x_current_buffer")}}
+ {\clf_gettexbuffer{\tabl_x_current_buffer}}
\let\tabl_x_start_row_yes \relax
\let\tabl_x_start_row_nop \relax
@@ -259,23 +266,24 @@
\unexpanded\def\tabl_x_process
{\begingroup % *
- \doifsomething{\xtableparameter\c!bodyfont}
- {\setupbodyfont[\xtableparameter\c!bodyfont]}%
+ \forgetall % moved here
+ \dontcomplain % for the moment here till we figure out where we get the overflow
+ \usebodyfontparameter\xtableparameter
\setbox\scratchbox\vbox
- {\xtableparameter\c!spaceinbetween}%
- \ctxcommand{x_table_create {
- option = "\xtableparameter\c!option",
- textwidth = \number\d_tabl_x_textwidth,
- textheight = \number\dimexpr\xtableparameter\c!textheight,
- maxwidth = \number\dimexpr\xtableparameter\c!maxwidth,
- lineheight = \number\openlineheight,
- columndistance = \number\dimexpr\xtableparameter\c!columndistance,
- leftmargindistance = \number\dimexpr\xtableparameter\c!leftmargindistance,
- rightmargindistance = \number\dimexpr\xtableparameter\c!rightmargindistance,
- rowdistance = \number\ht\scratchbox,
- header = "\xtableparameter\c!header",
- footer = "\xtableparameter\c!footer",
- } }%
+ {\doifsomething{\xtableparameter\c!spaceinbetween}{\blank[\xtableparameter\c!spaceinbetween]}}%
+ \clf_x_table_create
+ option {\xtableparameter\c!option}%
+ textwidth \d_tabl_x_textwidth
+ textheight \dimexpr\xtableparameter\c!textheight\relax
+ maxwidth \dimexpr\xtableparameter\c!maxwidth\relax
+ lineheight \openlineheight
+ columndistance \dimexpr\xtableparameter\c!columndistance\relax
+ leftmargindistance \dimexpr\xtableparameter\c!leftmargindistance\relax
+ rightmargindistance \dimexpr\xtableparameter\c!rightmargindistance\relax
+ rowdistance \ht\scratchbox
+ header {\xtableparameter\c!header}%
+ footer {\xtableparameter\c!footer}%
+ \relax
%
\letxtableparameter\c!option\empty
% not so nice but needed as we use this in the setup
@@ -290,7 +298,7 @@
\let\tabl_x_stop_cell \tabl_x_stop_cell_reflow_width
\settrialtypesetting
\tabl_x_get_buffer
- \ctxcommand{x_table_reflow_width()}\relax
+ \clf_x_table_reflow_width
\endgroup
\begingroup
\let\tabl_x_start_row_yes \tabl_x_start_row_reflow_height_yes
@@ -301,7 +309,7 @@
\let\tabl_x_stop_cell \tabl_x_stop_cell_reflow_height
\settrialtypesetting
\tabl_x_get_buffer
- \ctxcommand{x_table_reflow_height()}\relax
+ \clf_x_table_reflow_height
\endgroup
\begingroup
\let\tabl_x_start_row_yes \tabl_x_start_row_construct_yes
@@ -311,7 +319,7 @@
\let\tabl_x_start_cell_nop\tabl_x_start_cell_construct_nop
\let\tabl_x_stop_cell \tabl_x_stop_cell_construct
\tabl_x_get_buffer
- \ctxcommand{x_table_construct()}\relax
+ \clf_x_table_construct
\endgroup
\endgroup % *
\ifinsidesplitfloat
@@ -321,7 +329,7 @@
\else
\tabl_x_flush_text_checked
\fi\fi
- \ctxcommand{x_table_cleanup()}%
+ \clf_x_table_cleanup
\dostoptagged
\resetbuffer[\tabl_x_current_buffer]%
\egroup}
@@ -336,19 +344,31 @@
% in text flow: headers and footers only once
\setvalue{\??xtableflushsplit\v!yes}%
- {\ctxcommand{x_table_flush{ method = "\v!split" }}}
+ {\clf_x_table_flush
+ method {\v!split}%
+ \relax}
% in text flow: headers and footers only once
\setvalue{\??xtableflushsplit\v!no}%
- {\dontleavehmode % else no leftskip etc
- \ctxcommand{x_table_flush{ method = "\v!normal" }}}
+ {% \noindent % gives extra line after table
+ % \noindentation % messes up the next indentation
+ % \dontleavehmode % no leftskip
+ \kern\zeropoint % yet another guess
+ \ignorespaces
+ \clf_x_table_flush
+ method {\v!normal}%
+ \relax
+ \removeunwantedspaces}
% in text flow: headers and footers get repeated
\setvalue{\??xtableflushsplit\v!repeat}%
{\doloop
- {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi }}%
+ {\clf_x_table_flush
+ method {\v!split}%
+ height \ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi
+ \relax
\ifcase\c_tabl_x_state
\exitloop
\else
@@ -370,7 +390,10 @@
% \stopsetups
%
% \unexpanded\def\xtablesplitflush
-% {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr\xtablesplitvsize\relax}}\relax}
+% {\clf_x_table_flush
+% method {\v!split}%
+% height \dimexpr\xtablesplitvsize\relax
+% \relax}
%
% \def\xtablesplitvsize
% {\ifdim\pagegoal=\maxdimen\textheight\else\pagegoal\fi}
@@ -380,7 +403,9 @@
\let\extratxtablesplitheight\zeropoint % might disappear so don't depend on it
\unexpanded\def\tabl_x_flush_float_normal
- {\ctxcommand{x_table_flush{ method = "\v!normal" }}}
+ {\clf_x_table_flush
+ method {\v!normal}%
+ \relax}
\unexpanded\def\tabl_x_flush_float_split
{\resetdirecttsplit
@@ -391,9 +416,12 @@
\let\tsplitdirectwidth \d_tabl_x_final_width
\handledirecttsplit}
-\unexpanded\def\tabl_x_split_splitter#vsize%
+\unexpanded\def\tabl_x_split_splitter#height%
{\setbox\tsplitresult\vbox
- {\ctxcommand{x_table_flush{ method = "\v!split", vsize = \number\dimexpr#vsize }}}%
+ {\clf_x_table_flush
+ method {\v!split}%
+ height \dimexpr#height\relax
+ \relax}%
\ifcase\c_tabl_x_state
\global\setfalse\somenextsplitofffloat
\else
@@ -402,14 +430,14 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_reflow_width_yes[#settings]%
{\setupcurrentxtable[#settings]%
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row}
\unexpanded\def\tabl_x_start_row_reflow_width_nop
- {\ctxcommand{x_table_next_row()}}
+ {\clf_x_table_next_row}
\unexpanded\def\tabl_x_stop_row_reflow_width
{}
@@ -421,27 +449,36 @@
\unexpanded\def\tabl_x_start_row_construct_yes[#settings]%
{\setupcurrentxtable[#settings]%
\dostarttagged\t!tablerow\empty
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row_option{\xtableparameter\c!samepage}}
\unexpanded\def\tabl_x_start_row_construct_nop
{\dostarttagged\t!tablerow\empty
- \ctxcommand{x_table_next_row()}}
+ \clf_x_table_next_row}
\unexpanded\def\tabl_x_stop_row_construct
- {\dostoptagged}
+ {\clf_x_table_finish_row
+ \dostoptagged}
\unexpanded\def\stopxrow
{\tabl_x_stop_row
\endgroup}
\unexpanded\def\startxcell
- {\doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ {\doifelsenextoptionalcs\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell}
+\unexpanded\def\dummyxcell#1%
+ {\tabl_x_start_cell_nop
+ \tabl_x_stop_cell}
+
+\unexpanded\def\dummyxcell
+ {\tabl_x_start_cell_nop
+ \tabl_x_stop_cell}
+
\unexpanded\def\tabl_x_begin_of_cell
- {\inhibitblank
+ {%\inhibitblank % already in framed
\everypar{\delayedbegstrut}}
\unexpanded\def\tabl_x_end_of_cell
@@ -467,8 +504,16 @@
% Although this becomes kind of messy. It saves already time that we only check
% for it when we have settings.
-\def\tabl_x_set_hsize
- {\hsize.25\maxdimen} % let's be reasonable
+% \def\tabl_x_set_hsize
+% {\hsize.25\maxdimen} % let's be reasonable
+
+% \def\tabl_x_set_hsize
+% {\edef\p_width{\xtableparameter\c!width}%
+% \ifx\p_width\empty
+% \hsize.25\maxdimen % is this really needed
+% \fi}
+
+\let\tabl_x_set_hsize\relax
\unexpanded\def\tabl_x_start_cell_reflow_width_yes[#settings]%
{\setbox\b_tabl_x\hbox\bgroup
@@ -494,7 +539,7 @@
\fi
%
\d_tabl_x_distance\xtableparameter\c!distance\relax
- \ctxcommand{x_table_init_reflow_width("\xtableparameter\c!option")}%
+ \clf_x_table_init_reflow_width_option{\xtableparameter\c!option}%
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\tabl_x_set_hsize}
@@ -508,7 +553,7 @@
\c_tabl_x_nx\plusone
\c_tabl_x_ny\plusone
\d_tabl_x_distance\xtableparameter\c!distance\relax
- \ctxcommand{x_table_init_reflow_width()}%
+ \clf_x_table_init_reflow_width
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\tabl_x_set_hsize}
@@ -517,11 +562,11 @@
{\tabl_x_end_of_cell
\egroup
\egroup
- \ctxcommand{x_table_set_reflow_width()}}
+ \clf_x_table_set_reflow_width}
\unexpanded\def\tabl_x_start_cell_reflow_height_yes[#settings]%
{\setbox\b_tabl_x\hbox\bgroup
- \ctxcommand{x_table_init_reflow_height()}%
+ \clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!height\v!fit % overloads given height
@@ -535,7 +580,7 @@
\unexpanded\def\tabl_x_start_cell_reflow_height_nop
{\setbox\b_tabl_x\hbox\bgroup
- \ctxcommand{x_table_init_reflow_height()}%
+ \clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!height\v!fit % overloads given height
@@ -552,7 +597,7 @@
\egroup
\fi
\egroup
- \ctxcommand{x_table_set_reflow_height()}}
+ \clf_x_table_set_reflow_height}
\unexpanded\def\tabl_x_start_cell_construct_yes[#settings]%
{\dostarttagged\t!tablecell\empty % can't we just tag the box
@@ -560,7 +605,7 @@
\setupcurrentxtable[#settings]%
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
\letxtableparameter\c!height\d_tabl_x_height % overloads given height
- \ctxcommand{x_table_init_construct()}%
+ \clf_x_table_init_construct
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\dotagxtablecell}
@@ -569,8 +614,8 @@
{\dostarttagged\t!tablecell\empty % can't we just tag the box
\setbox\b_tabl_x\hbox\bgroup
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
- \letxtableparameter\c!height\d_tabl_x_height % overloads given height
- \ctxcommand{x_table_init_construct()}%
+ \letxtableparameter\c!height\d_tabl_x_height % overloads given height (commenting it ... nice option)
+ \clf_x_table_init_construct
\inheritedxtableframed\bgroup
\tabl_x_begin_of_cell
\dotagxtablecell}
@@ -580,7 +625,7 @@
\egroup
\dotagxtablesignal % harmless spot
\egroup
- \ctxcommand{x_table_set_construct()}%
+ \clf_x_table_set_construct
\dostoptagged}
\unexpanded\def\startxcellgroup
@@ -677,7 +722,7 @@
\unexpanded\def\startxgroup
{\begingroup
- \doifnextoptionalelse\tabl_x_start_group_delayed_one\relax}
+ \doifelsenextoptionalcs\tabl_x_start_group_delayed_one\relax}
\unexpanded\def\stopxgroup
{\endgroup}
@@ -695,7 +740,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\setupcurrentxtable\relax}
+ \doifelsenextoptionalcs\setupcurrentxtable\relax}
\let\startxrowgroup \startxgroup
\let\stopxrowgroup \stopxgroup
@@ -706,7 +751,7 @@
\unexpanded\def\startxcell
{\begingroup
- \doifnextoptionalelse\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
+ \doifelsenextoptionalcs\tabl_x_start_cell_delayed_one\tabl_x_start_cell_nop}
\unexpanded\def\tabl_x_start_cell_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -721,7 +766,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
+ \doifelsenextoptionalcs\tabl_x_start_cell_yes\tabl_x_start_cell_nop}
\unexpanded\def\stopxcell
{\tabl_x_stop_cell
@@ -731,7 +776,7 @@
\unexpanded\def\startxrow
{\begingroup
- \doifnextoptionalelse\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_delayed_one\tabl_x_start_row_nop}
\unexpanded\def\tabl_x_start_row_delayed_one[#tag]%
% {\ifcsname\namedxtablehash{#tag}\s!parent\endcsname
@@ -746,7 +791,7 @@
\chaintocurrentxtable{#tag}%
\fi
\edef\currentxtable{#tag}%
- \doifnextoptionalelse\tabl_x_start_row_yes\tabl_x_start_row_nop}
+ \doifelsenextoptionalcs\tabl_x_start_row_yes\tabl_x_start_row_nop}
\unexpanded\def\stopxrow
{\tabl_x_stop_row
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index 3447214bd..062f0208f 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -18,11 +18,14 @@ if not modules then modules = { } end modules ['task-ini'] = {
-- not apply the font handler, we can remove all checks for subtypes 255
local tasks = nodes.tasks
+local prependaction = tasks.prependaction
local appendaction = tasks.appendaction
local disableaction = tasks.disableaction
+local enableaction = tasks.enableaction
local freezegroup = tasks.freezegroup
local freezecallbacks = callbacks.freeze
+appendaction("processors", "normalizers", "typesetters.wrappers.handler") -- disabled
appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
@@ -35,11 +38,14 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
+appendaction("processors", "words", "languages.replacements.handler") -- disabled
+
+appendaction("processors", "words", "languages.hyphenators.handler") -- always on
+
appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
appendaction("processors", "words", "typesetters.initials.handler") -- disabled -- might move up
-appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move up
+appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move down
appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
@@ -55,9 +61,12 @@ appendaction("processors", "lists", "typesetters.spacings.handler")
appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-------------("processors", "lists", "typesetters.initials.handler") -- disabled
+appendaction("processors", "lists", "languages.visualizediscretionaries") -- disabled
+
+-- appendaction("processors", "lists", "typesetters.initials.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
+appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled
@@ -99,27 +108,35 @@ appendaction("math", "builders", "builders.kernel.mlist_to_hlist")
------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
appendaction("math", "builders", "typesetters.directions.processmath") -- disabled (has to happen pretty late)
--- quite experimental (nodes.handlers.graphicvadjust might go away)
-
appendaction("finalizers", "lists", "builders.paragraphs.keeptogether")
-appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
+------------("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental
appendaction("finalizers", "lists", "builders.paragraphs.tag")
-- still experimental
-appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") --
+appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate")
+
appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last !
+appendaction("mvlbuilders", "normalizers", "builders.profiling.pagehandler") -- here !
-appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
+appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler")
+appendaction("vboxbuilders", "normalizers", "builders.profiling.vboxhandler") -- here !
-- experimental too
-appendaction("mvlbuilders", "normalizers","typesetters.checkers.handler")
-appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
+appendaction("mvlbuilders", "normalizers", "typesetters.checkers.handler")
+appendaction("vboxbuilders", "normalizers", "typesetters.checkers.handler")
+
+-- rather special (this might get hardcoded):
+
+prependaction("processors", "before", "nodes.properties.attach") -- enabled but optimized for quick abort
+appendaction ("shipouts", "normalizers", "nodes.properties.delayed") -- enabled but optimized for quick abort
-- speedup: only kick in when used
+disableaction("processors", "typesetters.wrappers.handler")
+disableaction("processors", "languages.replacements.handler")
disableaction("processors", "typesetters.characteralign.handler")
disableaction("processors", "scripts.autofontfeature.handler")
disableaction("processors", "scripts.splitters.handler")
@@ -138,8 +155,10 @@ disableaction("processors", "typesetters.firstlines.handler")
disableaction("processors", "typesetters.spacings.handler")
disableaction("processors", "typesetters.kerns.handler")
disableaction("processors", "typesetters.italics.handler")
+disableaction("processors", "languages.visualizediscretionaries")
disableaction("processors", "nodes.handlers.stripping")
+disableaction("shipouts", "builders.paragraphs.expansion.trace")
disableaction("shipouts", "typesetters.alignments.handler")
disableaction("shipouts", "nodes.rules.handler")
disableaction("shipouts", "nodes.shifts.handler")
@@ -154,7 +173,6 @@ disableaction("shipouts", "nodes.visualizers.handler")
disableaction("shipouts", "nodes.handlers.accessibility")
disableaction("shipouts", "nodes.handlers.backgrounds")
disableaction("shipouts", "nodes.handlers.alignbackgrounds")
-disableaction("shipouts", "nodes.handlers.cleanuppage")
disableaction("shipouts", "nodes.references.handler")
disableaction("shipouts", "nodes.destinations.handler")
@@ -167,7 +185,7 @@ disableaction("processors", "builders.paragraphs.solutions.splitters.split")
disableaction("finalizers", "builders.paragraphs.keeptogether")
disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
+-------------("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
disableaction("finalizers", "builders.paragraphs.tag")
disableaction("math", "noads.handlers.showtree")
@@ -177,7 +195,10 @@ disableaction("math", "noads.handlers.classes")
disableaction("math", "typesetters.directions.processmath")
disableaction("mvlbuilders", "typesetters.checkers.handler")
+disableaction("mvlbuilders", "builders.profiling.pagehandler")
+
disableaction("vboxbuilders","typesetters.checkers.handler")
+disableaction("vboxbuilders","builders.profiling.vboxhandler")
freezecallbacks("find_.*_file", "find file using resolver")
freezecallbacks("read_.*_file", "read file at once")
@@ -206,3 +227,18 @@ freezegroup("vboxbuilders", "normalizers")
freezegroup("math", "normalizers")
freezegroup("math", "builders")
+
+-- new: disabled here
+
+disableaction("processors", "builders.kernel.ligaturing")
+disableaction("processors", "builders.kernel.kerning")
+
+directives.register("nodes.basepass", function(v)
+ if v then
+ disableaction("processors", "builders.kernel.ligaturing")
+ disableaction("processors", "builders.kernel.kerning")
+ else
+ enableaction("processors", "builders.kernel.ligaturing")
+ enableaction("processors", "builders.kernel.kerning")
+ end
+end)
diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua
index 0f0c016f8..f8b945890 100644
--- a/tex/context/base/toks-ini.lua
+++ b/tex/context/base/toks-ini.lua
@@ -5,338 +5,252 @@ if not modules then modules = { } end modules ['toks-ini'] = {
license = "see context related readme files"
}
-local context, commands = context, commands
-local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
-local format, gsub = string.format, string.gsub
-
---[[ldx--
-This code is experimental and needs a cleanup. The visualizers will move to
-a module.
---ldx]]--
-
--- 1 = command, 2 = modifier (char), 3 = controlsequence id
---
--- callback.register('token_filter', token.get_next)
---
--- token.get_next()
--- token.expand()
--- token.create()
--- token.csname_id()
--- token.csname_name(v)
--- token.command_id()
--- token.command_name(v)
--- token.is_expandable()
--- token.is_activechar()
--- token.lookup(v)
-
--- actually, we can use token registers to store tokens
-
-local token, tex = token, tex
-
-local createtoken = token.create
-local csname_id = token.csname_id
-local command_id = token.command_id
-local command_name = token.command_name
-local get_next = token.get_next
-local expand = token.expand
-local is_activechar = token.is_activechar
-local csname_name = token.csname_name
-
-tokens = tokens or { }
-local tokens = tokens
-
-tokens.vbox = createtoken("vbox")
-tokens.hbox = createtoken("hbox")
-tokens.vtop = createtoken("vtop")
-tokens.bgroup = createtoken(utfbyte("{"), 1)
-tokens.egroup = createtoken(utfbyte("}"), 2)
-
-tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end
-tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end
-
-tokens.letters = function(str)
- local t, n = { }, 0
- for chr in utfvalues(str) do
- n = n + 1
- t[n] = createtoken(chr, 11)
- end
- return t
-end
-
-tokens.collectors = tokens.collectors or { }
-local collectors = tokens.collectors
-
-collectors.data = collectors.data or { }
-local collectordata = collectors.data
+tokens = tokens or { }
+
+local tokens = tokens
+local tostring = tostring
+local utfchar = utf.char
+local char = string.char
+local printtable = table.print
+local concat = table.concat
+
+if newtoken then
+
+ if setinspector then
+
+ local istoken = newtoken.is_token
+ local simple = { letter = "letter", other_char = "other" }
+
+ local function astable(t)
+ if t and istoken(t) then
+ local cmdname = t.cmdname
+ local simple = simple[cmdname]
+ if simple then
+ return {
+ category = simple,
+ character = utfchar(t.mode) or nil,
+ }
+ else
+ return {
+ command = t.command,
+ id = t.id,
+ tok = t.tok,
+ csname = t.csname,
+ active = t.active,
+ expandable = t.expandable,
+ protected = t.protected,
+ mode = t.mode,
+ cmdname = cmdname,
+ }
+ end
+ end
+ end
-collectors.registered = collectors.registered or { }
-local registered = collectors.registered
+ tokens.istoken = istoken
+ tokens.astable = astable
-local function printlist(data)
- callbacks.push('token_filter', function ()
- callbacks.pop('token_filter') -- tricky but the nil assignment helps
- return data
- end)
-end
+ setinspector(function(v) if istoken(v) then printtable(astable(v),tostring(v)) return true end end)
-tex.printlist = printlist -- will change to another namespace
+ end
-function collectors.flush(tag)
- printlist(collectordata[tag])
-end
+ local scan_toks = newtoken.scan_toks
+ local scan_string = newtoken.scan_string
+ local scan_int = newtoken.scan_int
+ local scan_code = newtoken.scan_code
+ local scan_dimen = newtoken.scan_dimen
+ local scan_glue = newtoken.scan_glue
+ local scan_keyword = newtoken.scan_keyword
+ local scan_token = newtoken.scan_token
+ local scan_word = newtoken.scan_word
+ local scan_number = newtoken.scan_number
-function collectors.test(tag)
- printlist(collectordata[tag])
-end
+ local get_next = newtoken.get_next
-function collectors.register(name)
- registered[csname_id(name)] = name
-end
+ local set_macro = newtoken.set_macro
-local call = command_id("call")
-local letter = command_id("letter")
-local other = command_id("other_char")
-
-function collectors.install(tag,end_cs)
- local data, d = { }, 0
- collectordata[tag] = data
- local endcs = csname_id(end_cs)
- while true do
- local t = get_next()
- local a, b = t[1], t[3]
- if b == endcs then
- context["end_cs"]()
- return
- elseif a == call and registered[b] then
- expand()
+ set_macro = function(k,v,g)
+ if g == "global" then
+ context.setgvalue(k,v or '')
else
- d = d + 1
- data[d] = t
+ context.setvalue(k,v or '')
end
end
-end
-
-function collectors.handle(tag,handle,flush)
- collectordata[tag] = handle(collectordata[tag])
- if flush then
- collectors.flush(tag)
- end
-end
-local show_methods = { }
-collectors.show_methods = show_methods
+ local bits = {
+ escape = 2^ 0,
+ begingroup = 2^ 1,
+ endgroup = 2^ 2,
+ mathshift = 2^ 3,
+ alignment = 2^ 4,
+ endofline = 2^ 5,
+ parameter = 2^ 6,
+ superscript = 2^ 7,
+ subscript = 2^ 8,
+ ignore = 2^ 9,
+ space = 2^10, -- 1024
+ letter = 2^11,
+ other = 2^12,
+ active = 2^13,
+ comment = 2^14,
+ invalid = 2^15,
+ --
+ character = 2^11 + 2^12,
+ whitespace = 2^13 + 2^10, -- / needs more checking
+ --
+ open = 2^10 + 2^1, -- space + begingroup
+ close = 2^10 + 2^2, -- space + endgroup
+ }
+
+ -- for k, v in next, bits do bits[v] = k end
+
+ tokens.bits = bits
+
+ local space_bits = bits.space
+
+ -- words are space or \relax terminated and the trailing space is gobbled; a word
+ -- can contain any non-space letter/other
+
+ local t = { } -- small optimization, a shared variable that is not reset
+
+ if scan_word then
+
+ scan_number = function(base)
+ local s = scan_word()
+ if not s then
+ return nil
+ elseif base then
+ return tonumber(s,base)
+ else
+ return tonumber(s)
+ end
+ end
-function collectors.show(tag, method)
- if type(tag) == "table" then
- show_methods[method or 'a'](tag)
else
- show_methods[method or 'a'](collectordata[tag])
- end
-end
-
-function collectors.defaultwords(t,str)
- local n = #t
- n = n + 1
- t[n] = tokens.bgroup
- n = n + 1
- t[n] = createtoken("red")
- for i=1,#str do
- n = n + 1
- t[n] = tokens.other('*')
- end
- n = n + 1
- t[n] = tokens.egroup
-end
-function collectors.dowithwords(tag,handle)
- local t, w, tn, wn = { }, { }, 0, 0
- handle = handle or collectors.defaultwords
- local tagdata = collectordata[tag]
- for k=1,#tagdata do
- local v = tagdata[k]
- if v[1] == letter then
- wn = wn + 1
- w[wn] = v[2]
- else
- if wn > 0 then
- handle(t,w)
- wn = 0
+ scan_word = function()
+ local n = 0
+ while true do
+ local c = scan_code()
+ if c then
+ n = n + 1
+ t[n] = utfchar(c)
+ elseif scan_code(space_bits) then
+ if n > 0 then
+ break
+ end
+ elseif n > 0 then
+ break
+ else
+ return
+ end
end
- tn = tn + 1
- t[tn] = v
+ return concat(t,"",1,n)
end
- end
- if wn > 0 then
- handle(t,w)
- end
- collectordata[tag] = t
-end
-local function showtoken(t)
- if t then
- local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or ""
- if cmd == letter or cmd == other then
- return format("%s-> %s -> %s", name, chr, utfchar(chr))
- elseif id > 0 then
- cs = csname_name(t) or nil
- if cs then
- return format("%s-> %s", name, cs)
- elseif tonumber(chr) < 0 then
- return format("%s-> %s", name, id)
+ -- so we gobble the space (like scan_int) (number has to be space or non-char terminated
+ -- as we accept 0xabcd and such so there is no clear separator for a keyword
+
+ scan_number = function(base)
+ local n = 0
+ while true do
+ local c = scan_code()
+ if c then
+ n = n + 1
+ t[n] = char(c)
+ elseif scan_code(space_bits) then
+ if n > 0 then
+ break
+ end
+ elseif n > 0 then
+ break
+ else
+ return
+ end
+ end
+ local s = concat(t,"",1,n)
+ if base then
+ return tonumber(s,base)
else
- return format("%s-> (%s,%s)", name, chr, id)
+ return tonumber(s)
end
- else
- return format("%s", name)
end
- else
- return "no node"
- end
-end
-
-collectors.showtoken = showtoken
-
-function collectors.trace()
- local t = get_next()
- logs.report("tokenlist",showtoken(t))
- return t
-end
-
--- these might move to a runtime module
-show_methods.a = function(data) -- no need to store the table, just pass directly
- local function row(one,two,three,four,five)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context(four)
- context.NC() context(five)
- context.NC() context.NR()
end
- context.starttabulate { "|T|Tr|cT|Tr|T|" }
- row("cmd","chr","","id","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- if cs ~= "" then cs = "\\string " .. cs end
- else
- id = ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- end
- if tonumber(chr) < 0 then
- row(name,"",sym,id,cs)
- else
- row(name,chr,sym,id,cs)
- end
- end
- context.stoptabulate()
-end
-local function show_b_c(data,swap) -- no need to store the table, just pass directly
- local function row(one,two,three)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context.NR()
- end
- if swap then
- context.starttabulate { "|Tl|Tl|Tr|" }
- else
- context.starttabulate { "|Tl|Tr|Tl|" }
- end
- row("cmd","chr","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- elseif cs == "" then
- -- okay
- elseif is_activechar(v) then
- sym = "\\string " .. cs
+ -- -- the next one cannot handle \iftrue true\else false\fi
+ --
+ -- local function scan_boolean()
+ -- if scan_keyword("true") then
+ -- return true
+ -- elseif scan_keyword("false") then
+ -- return false
+ -- else
+ -- return nil
+ -- end
+ -- end
+
+ local function scan_boolean()
+ local kw = scan_word()
+ if kw == "true" then
+ return true
+ elseif kw == "false" then
+ return false
else
- sym = "\\string\\" .. cs
- end
- if swap then
- row(name,sym,chr)
- elseif tonumber(chr) < 0 then
- row(name,"",sym)
- else
- row(name,chr,sym)
+ return nil
end
end
- context.stoptabulate()
-end
--- Even more experimental ...
+ tokens.scanners = { -- these expand
+ token = scan_token or get_next,
+ toks = scan_toks,
+ tokens = scan_toks,
+ dimen = scan_dimen,
+ dimension = scan_dimen,
+ glue = scan_glue,
+ skip = scan_glue,
+ integer = scan_int,
+ count = scan_int,
+ string = scan_string,
+ code = scan_code,
+ word = scan_word,
+ number = scan_number,
+ boolean = scan_boolean,
+ keyword = scan_keyword,
+ }
+
+ tokens.getters = { -- these don't expand
+ token = get_next,
+ count = tex.getcount,
+ dimen = tex.getdimen,
+ box = tex.getbox,
+ }
+
+ tokens.setters = {
+ macro = set_macro,
+ count = tex.setcount,
+ dimen = tex.setdimen,
+ box = tex.setbox,
+ }
-show_methods.b = function(data) show_b_c(data,false) end
-show_methods.c = function(data) show_b_c(data,true ) end
-
-local remapper = { } -- namespace
-collectors.remapper = remapper
-
-local remapperdata = { } -- user mappings
-remapper.data = remapperdata
-
-function remapper.store(tag,class,key)
- local s = remapperdata[class]
- if not s then
- s = { }
- remapperdata[class] = s
- end
- s[key] = collectordata[tag]
- collectordata[tag] = nil
end
-function remapper.convert(tag,toks)
- local data = remapperdata[tag]
- local leftbracket, rightbracket = utfbyte('['), utfbyte(']')
- local skipping = 0
- -- todo: math
- if data then
- local t, n = { }, 0
- for s=1,#toks do
- local tok = toks[s]
- local one, two = tok[1], tok[2]
- if one == 11 or one == 12 then
- if two == leftbracket then
- skipping = skipping + 1
- n = n + 1 ; t[n] = tok
- elseif two == rightbracket then
- skipping = skipping - 1
- n = n + 1 ; t[n] = tok
- elseif skipping == 0 then
- local new = data[two]
- if new then
- if #new > 1 then
- for n=1,#new do
- n = n + 1 ; t[n] = new[n]
- end
- else
- n = n + 1 ; t[n] = new[1]
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- end
- return t
- else
- return toks
- end
-end
+-- static int run_scan_token(lua_State * L)
+-- {
+-- saved_tex_scanner texstate;
+-- save_tex_scanner(texstate);
+-- get_x_token();
+-- make_new_token(L, cur_cmd, cur_chr, cur_cs);
+-- unsave_tex_scanner(texstate);
+-- return 1;
+-- }
+--
+-- static int run_get_future(lua_State * L)
+-- {
+-- /* saved_tex_scanner texstate; */
+-- /* save_tex_scanner(texstate); */
+-- get_token();
+-- make_new_token(L, cur_cmd, cur_chr, cur_cs);
+-- back_input();
+-- /* unsave_tex_scanner(texstate); */
+-- return 1;
+-- }
diff --git a/tex/context/base/toks-ini.mkiv b/tex/context/base/toks-ini.mkiv
index 4eb756b75..49625a939 100644
--- a/tex/context/base/toks-ini.mkiv
+++ b/tex/context/base/toks-ini.mkiv
@@ -14,57 +14,11 @@
\writestatus{loading}{ConTeXt Token Support / Initialization}
\registerctxluafile{toks-ini}{1.001}
+\registerctxluafile{toks-scn}{1.001}
+\registerctxluafile{cldf-scn}{1.001}
\unprotect
-%D Handy for manuals \unknown
-
-\unexpanded\def\starttokens [#1]{\ctxlua{tokens.collectors.install("#1","stoptokens")}}
- \let\stoptokens \relax
- \def\flushtokens [#1]{\ctxlua{tokens.collectors.flush("#1")}}
- \def\showtokens [#1]{\ctxlua{tokens.collectors.show("#1")}}
- \def\testtokens [#1]{\ctxlua{tokens.collectors.with_words("#1")}}
- \def\registertoken #1{\ctxlua{tokens.collectors.register("#1")}}
-
-%D Inspired by a prototype by Taco for Thomas cum suis.
-
-% \defineremapper[babelgreek]
-%
-% \remapcharacter[babelgreek][`a]{\alpha}
-% \remapcharacter[babelgreek][`b]{\beta}
-% \remapcharacter[babelgreek][`c]{\gamma}
-% \remapcharacter[babelgreek][`d]{OEPS}
-%
-% \starttext
-%
-% [\startbabelgreek
-% a b c some stuff here \blank[big] oeps b d
-% \stopbabelgreek]
-%
-% [\babelgreek{some stuff here}]
-%
-% \stoptext
-
-\unexpanded\def\defineremapper[#1]%
- {\setuevalue{\e!start#1}{\toks_start_remapper{#1}}%
- \setuevalue{\e!stop #1}{\toks_stop_remapper {#1}}%
- \letvalue{#1}\relax
- \normalexpanded{\expandafter\def\csname#1\endcsname##1{\csname\e!start#1\endcsname##1\csname\e!stop#1\endcsname}}}
-
-\unexpanded\def\toks_start_remapper#1%
- {\ctxlua{tokens.collectors.install("#1", "\e!stop#1")}}
-
-\unexpanded\def\toks_stop_remapper#1%
- {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end, true)}}
-
-\unexpanded\def\remaptokens#1%
- {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end)}}
-
-\unexpanded\def\remapcharacter
- {\dodoubleempty\toks_remap_character}
-
-\def\toks_remap_character[#1][#2]#3%
- {\ctxlua{tokens.collectors.install("store", "ctxlua")}#3%
- \ctxlua{tokens.collectors.remapper.store("store","#1",\number#2)}}
+% nothing yet
\protect \endinput
diff --git a/tex/context/base/toks-map.lua b/tex/context/base/toks-map.lua
new file mode 100644
index 000000000..9120c2084
--- /dev/null
+++ b/tex/context/base/toks-map.lua
@@ -0,0 +1,70 @@
+if not modules then modules = { } end modules ['toks-map'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Even more experimental ... this used to be part of toks-ini but as
+-- this kind of remapping has not much use it is not loaded in the
+-- core. We just keep it here for old times sake.
+
+-- local remapper = { } -- namespace
+-- collectors.remapper = remapper
+--
+-- local remapperdata = { } -- user mappings
+-- remapper.data = remapperdata
+--
+-- function remapper.store(tag,class,key)
+-- local s = remapperdata[class]
+-- if not s then
+-- s = { }
+-- remapperdata[class] = s
+-- end
+-- s[key] = collectordata[tag]
+-- collectordata[tag] = nil
+-- end
+--
+-- function remapper.convert(tag,toks)
+-- local data = remapperdata[tag]
+-- local leftbracket = utfbyte('[')
+-- local rightbracket = utfbyte(']')
+-- local skipping = 0
+-- -- todo: math
+-- if data then
+-- local t, n = { }, 0
+-- for s=1,#toks do
+-- local tok = toks[s]
+-- local one, two = tok[1], tok[2]
+-- if one == 11 or one == 12 then
+-- if two == leftbracket then
+-- skipping = skipping + 1
+-- n = n + 1 ; t[n] = tok
+-- elseif two == rightbracket then
+-- skipping = skipping - 1
+-- n = n + 1 ; t[n] = tok
+-- elseif skipping == 0 then
+-- local new = data[two]
+-- if new then
+-- if #new > 1 then
+-- for n=1,#new do
+-- n = n + 1 ; t[n] = new[n]
+-- end
+-- else
+-- n = n + 1 ; t[n] = new[1]
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- else
+-- n = n + 1 ; t[n] = tok
+-- end
+-- end
+-- return t
+-- else
+-- return toks
+-- end
+-- end
diff --git a/tex/context/base/toks-map.mkiv b/tex/context/base/toks-map.mkiv
new file mode 100644
index 000000000..f1b63a68b
--- /dev/null
+++ b/tex/context/base/toks-map.mkiv
@@ -0,0 +1,63 @@
+%D \module
+%D [ file=toks-map, % experimental moved from toks-ini
+%D version=2007.03.03,
+%D title=\CONTEXT\ Token Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% \writestatus{loading}{ConTeXt Token Support / Remapping}
+%
+% \registerctxluafile{toks-map}{1.001}
+%
+% \unprotect
+%
+% %D Inspired by a prototype by Taco for Thomas cum suis.
+% %D
+% %D \starttyping
+% %D \defineremapper[babelgreek]
+% %D
+% %D \remapcharacter[babelgreek][`a]{\alpha}
+% %D \remapcharacter[babelgreek][`b]{\beta}
+% %D \remapcharacter[babelgreek][`c]{\gamma}
+% %D \remapcharacter[babelgreek][`d]{OEPS}
+% %D
+% %D \starttext
+% %D
+% %D [\startbabelgreek
+% %D a b c some stuff here \blank[big] oeps b d
+% %D \stopbabelgreek]
+% %D
+% %D [\babelgreek{some stuff here}]
+% %D
+% %D \stoptext
+% %D \stoptyping
+%
+% \unexpanded\def\defineremapper[#1]%
+% {\setuevalue{\e!start#1}{\toks_start_remapper{#1}}%
+% \setuevalue{\e!stop #1}{\toks_stop_remapper {#1}}%
+% \letvalue{#1}\relax
+% \normalexpanded{\expandafter\def\csname#1\endcsname##1{\csname\e!start#1\endcsname##1\csname\e!stop#1\endcsname}}}
+%
+% \unexpanded\def\toks_start_remapper#1%
+% {\ctxlua{tokens.collectors.install("#1", "\e!stop#1")}}
+%
+% \unexpanded\def\toks_stop_remapper#1%
+% {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end, true)}}
+%
+% \unexpanded\def\remaptokens#1%
+% {\ctxlua{tokens.collectors.handle("#1",function(str) return tokens.collectors.remapper.convert("#1",str) end)}}
+%
+% \unexpanded\def\remapcharacter
+% {\dodoubleempty\toks_remap_character}
+%
+% \def\toks_remap_character[#1][#2]#3%
+% {\ctxlua{tokens.collectors.install("store", "ctxlua")}#3%
+% \ctxlua{tokens.collectors.remapper.store("store","#1",\number#2)}}
+%
+% \protect \endinput
diff --git a/tex/context/base/toks-scn.lua b/tex/context/base/toks-scn.lua
new file mode 100644
index 000000000..84924c694
--- /dev/null
+++ b/tex/context/base/toks-scn.lua
@@ -0,0 +1,437 @@
+if not modules then modules = { } end modules ['toks-scn'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Writing this kind of code (and completing the newtoken code base) is fun. I did
+-- so with the brilliant film music from The Girl with the Dragon Tattoo running in a
+-- loop in the background (three cd's by Trent Reznor and Atticus Ross). An alien
+-- feeling helps with alien code.
+
+-- todo: more \let's at the tex end
+
+local type, next, tostring, tonumber = type, next, tostring, tonumber
+
+local formatters = string.formatters
+local concat = table.concat
+
+local scanners = tokens.scanners
+local tokenbits = tokens.bits
+
+local scanstring = scanners.string
+local scaninteger = scanners.integer
+local scannumber = scanners.number
+local scankeyword = scanners.keyword
+local scanword = scanners.word
+local scancode = scanners.code
+local scanboolean = scanners.boolean
+local scandimen = scanners.dimen
+
+local todimen = number.todimen
+local toboolean = toboolean
+
+local lpegmatch = lpeg.match
+local p_unquoted = lpeg.Cs(lpeg.patterns.unquoted)
+
+local trace_compile = false trackers.register("tokens.compile", function(v) trace_compile = v end)
+local report_compile = logs.reporter("tokens","compile")
+local report_scan = logs.reporter("tokens","scan")
+
+local open = tokenbits.open
+local close = tokenbits.close
+
+local function scanopen()
+ while true do
+ local c = scancode(open)
+ if c == 123 then
+ return true
+ -- elseif c ~= 32 then
+ elseif not c then
+ return
+ end
+ end
+end
+
+local function scanclose()
+ while true do
+ local c = scancode(close)
+ if c == 125 then
+ return true
+ -- elseif c ~= 32 then
+ elseif not c then
+ return
+ end
+ end
+end
+
+scanners.scanopen = scanopen
+scanners.scanclose = scanclose
+
+local function scanlist()
+ local wrapped = scanopen()
+ local list = { }
+ local size = 0
+ while true do
+ local entry = scanstring()
+ if entry then
+ size = size + 1
+ list[size] = entry
+ else
+ break
+ end
+ end
+ if wrapped then
+ scanclose()
+ end
+ return list
+end
+
+local function scanconditional()
+ local kw = scanword()
+ if kw == "true" then
+ return true
+ end
+ if kw == "false" then
+ return false
+ end
+ local c = scaninteger()
+ if c then
+ return c == 0 -- with a conditional 0=true
+ end
+ return nil
+end
+
+scanners.list = scanlist
+scanners.conditional = scanconditional
+
+local shortcuts = {
+ tokens = tokens,
+ bits = tokenbits,
+ open = open,
+ close = close,
+ scanners = scanners,
+ scanstring = scanstring,
+ scaninteger = scaninteger,
+ scannumber = scannumber,
+ scankeyword = scankeyword,
+ scanword = scanword,
+ scancode = scancode,
+ scanboolean = scanboolean,
+ scandimen = scandimen,
+ scandimension = scandimen,
+ scanconditional = scanconditional,
+ scanopen = scanopen,
+ scanclose = scanclose,
+ scanlist = scanlist,
+ todimen = todimen,
+ tonumber = tonumber,
+ tostring = tostring,
+ toboolean = toboolean,
+ inspect = inspect,
+ report = report_scan,
+}
+
+tokens.shortcuts = shortcuts
+
+local load = load
+local dump = string.dump
+
+local function loadstripped(code)
+ return load(code,nil,nil,shortcuts)
+ -- return load(dump(load(code),true),nil,nil,shortcuts)
+end
+
+tokens.converters = {
+ tonumber = "tonumber",
+ tostring = "tostring",
+ toboolean = "toboolean",
+ todimen = "todimen",
+ toglue = "todimen",
+}
+
+local f_if = formatters[ " if scankeyword('%s') then data['%s'] = scan%s()"]
+local f_elseif = formatters[" elseif scankeyword('%s') then data['%s'] = scan%s()"]
+local f_local = formatters["local scan%s = scanners.%s"]
+local f_scan = formatters["scan%s()"]
+local f_shortcut = formatters["local %s = scanners.converters.%s"]
+
+local f_if_c = formatters[ " if scankeyword('%s') then data['%s'] = %s(scan%s())"]
+local f_elseif_c = formatters[" elseif scankeyword('%s') then data['%s'] = %s(scan%s())"]
+local f_scan_c = formatters["%s(scan%s())"]
+
+local f_any = formatters[" else local key = scanword() if key then data[key] = scan%s() else break end end"]
+local f_any_c = formatters[" else local key = scanword() if key then data[key] = %s(scan%s()) else break end end"]
+local s_done = " else break end"
+
+local f_any_all = formatters[" local key = scanword() if key then data[key] = scan%s() else break end"]
+local f_any_all_c= formatters[" local key = scanword() if key then data[key] = %s(scan%s()) else break end"]
+
+local f_table = formatters["%\nt\nreturn function()\n local data = { }\n%s\n return %s\nend\n"]
+local f_sequence = formatters["%\nt\n%\nt\n%\nt\nreturn function()\n return %s\nend\n"]
+local f_simple = formatters["%\nt\nreturn function()\n return %s\nend\n"]
+local f_string = formatters["%q"]
+local f_action_f = formatters["action%s(%s)"]
+local f_action_s = formatters["local action%s = tokens._action[%s]"]
+local f_nested = formatters["local function scan%s()\n local data = { }\n%s\n return data\nend\n"]
+
+-- local f_check = formatters[ [[
+-- local wrapped = false
+-- while true do
+-- local c = scancode(open)
+-- if c == 123 then
+-- wrapped = true
+-- break
+-- elseif c ~= 32 then
+-- break
+-- end
+-- end
+-- while true do
+-- ]] .. "%\nt\n" .. [[
+-- %s
+-- end
+-- if wrapped then
+-- while true do
+-- local c = scancode(close)
+-- if c == 125 then
+-- break
+-- elseif c ~= 32 then
+-- break
+-- end
+-- end
+-- end
+-- ]] ]
+
+local f_check = formatters[ [[
+ local wrapped = scanopen()
+ while true do
+ ]] .. "%\nt\n" .. [[
+ %s
+ end
+ if wrapped then
+ scanclose()
+ end
+]] ]
+
+function tokens.compile(specification)
+ local f = { }
+ local n = 0
+ local c = { }
+ local t = specification.arguments or specification
+ local a = specification.actions or nil
+ if type(a) == "function" then
+ a = { a }
+ end
+ local code
+ local function compile(t,nested)
+ local done = s_done
+ local r = { }
+ local m = 0
+ for i=1,#t do
+ local ti = t[i]
+ if ti == "*" and i == 1 then
+ done = f_any_all("string")
+ else
+ local t1 = ti[1]
+ local t2 = ti[2] or "string"
+ if type(t2) == "table" then
+ n = n + 1
+ f[n] = compile(t2,n)
+ t2 = n
+ end
+ local t3 = ti[3]
+ if type(t3) == "function" then
+ -- todo: also create shortcut
+ elseif t3 then
+ c[t3] = f_shortcut(t3,t3)
+ if t1 == "*" then
+ if i == 1 then
+ done = f_any_all_c(t3,t2)
+ break
+ else
+ done = f_any_c(t3,t2)
+ end
+ else
+ m = m + 1
+ r[m] = (m > 1 and f_elseif_c or f_if_c)(t1,t1,t3,t2)
+ end
+ else
+ if t1 == "*" then
+ if i == 1 then
+ done = f_any_all(t2)
+ break
+ else
+ done = f_any(t2)
+ end
+ else
+ m = m + 1
+ r[m] = (m > 1 and f_elseif or f_if )(t1,t1,t2)
+ end
+ end
+ end
+ end
+ local c = f_check(r,done)
+ if nested then
+ return f_nested(nested,c)
+ else
+ return c
+ end
+ end
+ local tt = type(t)
+ if tt == "string" then
+ if a then
+ local s = lpegmatch(p_unquoted,t)
+ if s and t ~= s then
+ code = t
+ else
+ code = f_scan(t)
+ end
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ code = f_simple(f,code)
+ else
+ return scanners[t]
+ end
+ elseif tt ~= "table" then
+ return
+ elseif #t == 1 then
+ local ti = t[1]
+ if type(ti) == "table" then
+ ti = compile(ti)
+ code = "data"
+ if a then
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ end
+ code = f_table(f,ti,code)
+ elseif a then
+ code = f_scan(ti)
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ code = f_simple(f,code)
+ else
+ return scanners[ti]
+ end
+ else
+ local r = { }
+ local p = { }
+ local m = 0
+ for i=1,#t do
+ local ti = t[i]
+ local tt = type(ti)
+ if tt == "table" then
+ if ti[1] == "_constant_" then
+ local v = ti[2]
+ if type(v) == "string" then
+ r[i] = f_string(v)
+ else
+ r[i] = tostring(v)
+ end
+ else
+ m = m + 1
+ p[m] = compile(ti,100+m)
+ r[i] = f_scan(100+m)
+ end
+ elseif tt == "number" then
+ r[i] = tostring(ti)
+ elseif tt == "boolean" then
+ r[i] = tostring(ti)
+ else
+ local s = lpegmatch(p_unquoted,ti)
+ if s and ti ~= s then
+ r[i] = ti -- a string, given as "'foo'" or '"foo"'
+ elseif scanners[ti] then
+ r[i] = f_scan(ti)
+ else
+ report_compile("unknown scanner %a",ti)
+ r[i] = ti
+ end
+ end
+ end
+ code = concat(r,",")
+ if a then
+ tokens._action = a
+ for i=1,#a do
+ code = f_action_f(i,code)
+ f[#f+1] = f_action_s(i,i)
+ end
+ end
+ code = f_sequence(c,f,p,code)
+ end
+ if not code then
+ return
+ end
+ if trace_compile then
+ report_compile("code: %s",code)
+ end
+ local code, message = loadstripped(code)
+ if code then
+ code = code() -- sets action
+ else
+ report_compile("error in code: %s",code)
+ report_compile("error message: %s",message)
+ end
+ if a then
+ tokens._action = nil
+ end
+ if code then
+ return code
+ end
+end
+
+-- local fetch = tokens.compile {
+-- "string",
+-- "string",
+-- {
+-- { "data", "string" },
+-- { "tab", "string" },
+-- { "method", "string" },
+-- { "foo", {
+-- { "method", "integer" },
+-- { "compact", "number" },
+-- { "nature" },
+-- { "*" }, -- any key
+-- } },
+-- { "compact", "string", "tonumber" },
+-- { "nature", "boolean" },
+-- { "escape", "string" },
+-- { "escape" },
+-- }
+-- "boolean",
+-- }
+--
+-- os.exit()
+
+function tokens.scantable(t,data)
+ if not data then
+ data = { }
+ end
+ local wrapped = scanopen()
+ while true do
+ local key = scanword()
+ if key then
+ local get = t[key]
+ if get then
+ data[key] = get()
+ else
+ -- catch all we can get
+ end
+ else
+ break
+ end
+ end
+ if wrapped then
+ scanclose()
+ end
+ return data
+end
diff --git a/tex/context/base/toks-tra.lua b/tex/context/base/toks-tra.lua
new file mode 100644
index 000000000..bf2b91d38
--- /dev/null
+++ b/tex/context/base/toks-tra.lua
@@ -0,0 +1,298 @@
+if not modules then modules = { } end modules ['toks-ini'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
+local format, gsub = string.format, string.gsub
+local tostring = tostring
+
+local tokens = tokens
+local newtoken = newtoken
+local tex = tex
+local context = context
+local commands = commands
+
+tokens.collectors = tokens.collectors or { }
+local collectors = tokens.collectors
+
+collectors.data = collectors.data or { }
+local collectordata = collectors.data
+
+collectors.registered = collectors.registered or { }
+local registered = collectors.registered
+
+local report = logs.reporter("tokens","collectors")
+
+if newtoken then
+
+ -- todo:
+ --
+ -- register : macros that will be expanded (only for demo-ing)
+ -- flush : print back to tex
+ -- test : fancy stuff
+
+ local get_next = newtoken.get_next
+ local create = newtoken.create
+
+ function collectors.install(tag,end_cs)
+ local data, d = { }, 0
+ collectordata[tag] = data
+ end_cs = gsub(end_cs,"^\\","")
+ while true do
+ local t = get_next()
+ if t.csname == end_cs then
+ context[end_cs]()
+ return
+ else
+ d = d + 1
+ data[d] = t
+ end
+ end
+ end
+
+ local simple = { letter = "letter", other_char = "other" }
+
+ function collectors.show(data)
+ -- We no longer have methods as we only used (in demos) method a
+ -- so there is no need to burden the core with this. We have a
+ -- different table anyway.
+ if type(data) == "string" then
+ data = collectordata[data]
+ end
+ if not data then
+ return
+ end
+ local ctx_NC = context.NC
+ local ctx_NR = context.NR
+ local ctx_bold = context.bold
+ local ctx_verbatim = context.verbatim
+ context.starttabulate { "|Tl|Tc|Tl|" }
+ ctx_NC() ctx_bold("cmd")
+ ctx_NC() ctx_bold("meaning")
+ ctx_NC() ctx_bold("properties")
+ ctx_NC() ctx_NR()
+ context.HL()
+ for i=1,#data do
+ local token = data[i]
+ local cmdname = token.cmdname
+ local simple = simple[cmdname]
+ ctx_NC()
+ ctx_verbatim(simple or cmdname)
+ ctx_NC()
+ ctx_verbatim(simple and utfchar(token.mode) or token.csname)
+ ctx_NC()
+ if token.active then context("active ") end
+ if token.expandable then context("expandable ") end
+ if token.protected then context("protected ") end
+ ctx_NC()
+ ctx_NR()
+ end
+ context.stoptabulate()
+ end
+
+ local function printlist(data)
+ if data and #data > 0 then
+ report("not supported (yet): printing back to tex")
+ end
+ end
+
+ tokens.printlist = printlist -- will change to another namespace
+
+ function collectors.flush(tag)
+ printlist(collectordata[tag])
+ end
+
+ function collectors.test(tag,handle)
+ report("not supported (yet): testing")
+ end
+
+ function collectors.register(name)
+ report("not supported (yet): registering")
+ end
+
+else
+
+ -- 1 = command, 2 = modifier (char), 3 = controlsequence id
+
+ local create = token.create
+ local csname_id = token.csname_id
+ local command_id = token.command_id
+ local command_name = token.command_name
+ local get_next = token.get_next
+ local expand = token.expand
+ local csname_name = token.csname_name
+
+ local function printlist(data)
+ if data and #data > 0 then
+ callbacks.push('token_filter', function ()
+ callbacks.pop('token_filter') -- tricky but the nil assignment helps
+ return data
+ end)
+ end
+ end
+
+ tokens.printlist = printlist -- will change to another namespace
+
+ function collectors.flush(tag)
+ printlist(collectordata[tag])
+ end
+
+ function collectors.register(name)
+ registered[csname_id(name)] = name
+ end
+
+ local call = command_id("call")
+ local letter = command_id("letter")
+ local other = command_id("other_char")
+
+ function collectors.install(tag,end_cs)
+ local data, d = { }, 0
+ collectordata[tag] = data
+ end_cs = gsub(end_cs,"^\\","")
+ local endcs = csname_id(end_cs)
+ while true do
+ local t = get_next()
+ local a, b = t[1], t[3]
+ if b == endcs then
+ context[end_cs]()
+ return
+ elseif a == call and registered[b] then
+ expand()
+ else
+ d = d + 1
+ data[d] = t
+ end
+ end
+ end
+
+ function collectors.show(data)
+ -- We no longer have methods as we only used (in demos) method a
+ -- so there is no need to burden the core with this.
+ if type(data) == "string" then
+ data = collectordata[data]
+ end
+ if not data then
+ return
+ end
+ local ctx_NC = context.NC
+ local ctx_NR = context.NR
+ local ctx_bold = context.bold
+ local ctx_verbatim = context.verbatim
+ context.starttabulate { "|T|Tr|cT|Tr|T|" }
+ ctx_NC() ctx_bold("cmd")
+ ctx_NC() ctx_bold("chr")
+ ctx_NC()
+ ctx_NC() ctx_bold("id")
+ ctx_NC() ctx_bold("name")
+ ctx_NC() ctx_NR()
+ context.HL()
+ for i=1,#data do
+ local token = data[i]
+ local cmd = token[1]
+ local chr = token[2]
+ local id = token[3]
+ local name = command_name(token)
+ ctx_NC()
+ ctx_verbatim(name)
+ ctx_NC()
+ if tonumber(chr) >= 0 then
+ ctx_verbatim(chr)
+ end
+ ctx_NC()
+ if cmd == letter or cmd == other then
+ ctx_verbatim(utfchar(chr))
+ end
+ ctx_NC()
+ if id > 0 then
+ ctx_verbatim(id)
+ end
+ ctx_NC()
+ if id > 0 then
+ ctx_verbatim(csname_name(token) or "")
+ end
+ ctx_NC() ctx_NR()
+ end
+ context.stoptabulate()
+ end
+
+ function collectors.test(tag,handle)
+ local t, w, tn, wn = { }, { }, 0, 0
+ handle = handle or collectors.defaultwords
+ local tagdata = collectordata[tag]
+ for k=1,#tagdata do
+ local v = tagdata[k]
+ if v[1] == letter then
+ wn = wn + 1
+ w[wn] = v[2]
+ else
+ if wn > 0 then
+ handle(t,w)
+ wn = 0
+ end
+ tn = tn + 1
+ t[tn] = v
+ end
+ end
+ if wn > 0 then
+ handle(t,w)
+ end
+ collectordata[tag] = t
+ end
+
+end
+
+-- Interfacing:
+
+commands.collecttokens = collectors.install
+commands.showtokens = collectors.show
+commands.flushtokens = collectors.flush
+commands.testtokens = collectors.test
+commands.registertoken = collectors.register
+
+-- Redundant:
+
+-- function collectors.test(tag)
+-- printlist(collectordata[tag])
+-- end
+
+-- For old times sake:
+
+collectors.dowithwords = collectors.test
+
+-- This is only used in old articles ... will move to a module:
+
+local create = newtoken and newtoken.create or token.create
+
+tokens.vbox = create("vbox")
+tokens.hbox = create("hbox")
+tokens.vtop = create("vtop")
+tokens.bgroup = create(utfbyte("{"),1)
+tokens.egroup = create(utfbyte("}"),2)
+
+tokens.letter = function(chr) return create(utfbyte(chr),11) end
+tokens.other = function(chr) return create(utfbyte(chr),12) end
+
+tokens.letters = function(str)
+ local t, n = { }, 0
+ for chr in utfvalues(str) do
+ n = n + 1
+ t[n] = create(chr, 11)
+ end
+ return t
+end
+
+function collectors.defaultwords(t,str)
+ if t then
+ local n = #t
+ n = n + 1 ; t[n] = tokens.bgroup
+ n = n + 1 ; t[n] = create("red")
+ for i=1,#str do
+ n = n + 1 ; t[n] = tokens.other('*')
+ end
+ n = n + 1 ; t[n] = tokens.egroup
+ end
+end
diff --git a/tex/context/base/toks-tra.mkiv b/tex/context/base/toks-tra.mkiv
new file mode 100644
index 000000000..a3e27eaf8
--- /dev/null
+++ b/tex/context/base/toks-tra.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=toks-tra, % was toks-ini
+%D version=2007.03.03,
+%D title=\CONTEXT\ Token Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Token Support / Tracing}
+
+\registerctxluafile{toks-tra}{1.001}
+
+\unprotect
+
+%D Handy for manuals \unknown\ but not really used in practice, so it might
+%D become a runtime loaded module instead.
+
+\unexpanded\def\starttokens [#1]{\ctxcommand{collecttokens("#1","stoptokens")}}
+ \let\stoptokens \relax
+ \def\flushtokens [#1]{\ctxcommand{flushtokens("#1")}}
+ \def\showtokens [#1]{\ctxcommand{showtokens("#1")}}
+ \def\testtokens [#1]{\ctxcommand{testtokens("#1")}}
+ \def\registertoken #1{\ctxcommand{registertoken("#1")}}
+
+
+\protect \endinput
diff --git a/tex/context/base/trac-ctx.lua b/tex/context/base/trac-ctx.lua
index 706e7a244..493ce7936 100644
--- a/tex/context/base/trac-ctx.lua
+++ b/tex/context/base/trac-ctx.lua
@@ -6,15 +6,15 @@ if not modules then modules = { } end modules ['trac-ctx'] = {
license = "see context related readme files"
}
-local commands = commands
-local context = context
-local register = trackers.register
+local context = context
+local implement = interfaces.implement
+local register = trackers.register
local textrackers = tex.trackers or { }
local texdirectives = tex.directives or { }
-tex.trackers = textrackers
-tex.directives = texdirectives
+tex.trackers = textrackers
+tex.directives = texdirectives
storage.register("tex/trackers", textrackers, "tex.trackers")
storage.register("tex/directives",texdirectives,"tex.directives")
@@ -39,10 +39,32 @@ local function install(category,register,tag,enable,disable)
register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
end
-function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end
-function commands.initializetexdirectives() initialize(texdirectives,directives.register) end
+implement {
+ name = "initializetextrackers",
+ actions = function()
+ initialize(textrackers,trackers.register)
+ end
+}
--- commands.install(tag,enable,disable):
+implement {
+ name = "initializetexdirectives",
+ actions = function()
+ initialize(texdirectives,directives.register)
+ end
+}
-function commands.installtextracker (...) install(textrackers ,trackers .register,...) end
-function commands.installtexdirective(...) install(texdirectives,directives.register,...) end
+implement {
+ name = "installtextracker",
+ actions = function(tag,enable,disable)
+ install(textrackers,trackers.register,tag,enable,disable)
+ end,
+ arguments = { "string", "string", "string" }
+}
+
+implement {
+ name = "installtexdirective",
+ actions = function(tag,enable,disable)
+ install(texdirectives,directives.register,tag,enable,disable)
+ end,
+ arguments = { "string", "string", "string" }
+}
diff --git a/tex/context/base/trac-ctx.mkiv b/tex/context/base/trac-ctx.mkiv
index 3baddede2..4240281a6 100644
--- a/tex/context/base/trac-ctx.mkiv
+++ b/tex/context/base/trac-ctx.mkiv
@@ -1,4 +1,3 @@
-
%D \module
%D [ file=trac-ctx,
%D version=2012.07.13,
@@ -19,14 +18,14 @@
\unprotect
\unexpanded\def\installtextracker#1#2#3%
- {\ctxcommand{installtextracker("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+ {\clf_installtextracker{#1}{\detokenize{#2}}{\detokenize{#3}}}
\unexpanded\def\installtexdirective#1#2#3%
- {\ctxcommand{installtexdirective("#1",\!!bs\detokenize{#2}\!!es,\!!bs\detokenize{#3}\!!es)}}
+ {\clf_installtexdirective{#1}{\detokenize{#2}}{\detokenize{#3}}}
\appendtoks
- \ctxcommand{initializetextrackers ()}%
- \ctxcommand{initializetexdirectives()}%
+ \clf_initializetextrackers
+ \clf_initializetexdirectives
\to \everyjob
\protect \endinput
diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua
index 4cc48c4a5..d998295c1 100644
--- a/tex/context/base/trac-deb.lua
+++ b/tex/context/base/trac-deb.lua
@@ -9,27 +9,32 @@ if not modules then modules = { } end modules ['trac-deb'] = {
local lpeg, status = lpeg, status
local lpegmatch = lpeg.match
-local format, concat, match = string.format, table.concat, string.match
+local format, concat, match, find = string.format, table.concat, string.match, string.find
local tonumber, tostring = tonumber, tostring
-- maybe tracers -> tracers.tex (and tracers.lua for current debugger)
-local report_system = logs.reporter("system","tex")
+----- report_tex = logs.reporter("tex error")
+----- report_lua = logs.reporter("lua error")
+local report_nl = logs.newline
+local report_str = logs.writer
-tracers = tracers or { }
-local tracers = tracers
+tracers = tracers or { }
+local tracers = tracers
-tracers.lists = { }
-local lists = tracers.lists
+tracers.lists = { }
+local lists = tracers.lists
-tracers.strings = { }
-local strings = tracers.strings
+tracers.strings = { }
+local strings = tracers.strings
-local texgetdimen = tex.getdimen
-local texgettoks = tex.gettoks
-local texgetcount = tex.getcount
+local texgetdimen = tex.getdimen
+local texgettoks = tex.gettoks
+local texgetcount = tex.getcount
-strings.undefined = "undefined"
+local implement = interfaces.implement
+
+strings.undefined = "undefined"
lists.scratch = {
0, 2, 4, 6, 8
@@ -96,7 +101,19 @@ function tracers.knownlist(name)
return l and #l > 0
end
-function tracers.showlines(filename,linenumber,offset,errorstr)
+local savedluaerror = nil
+
+local function errorreporter(luaerror)
+ if luaerror then
+ logs.enable("lua error") --
+ return logs.reporter("lua error")
+ else
+ logs.enable("tex error")
+ return logs.reporter("tex error")
+ end
+end
+
+function tracers.showlines(filename,linenumber,offset,luaerrorline)
local data = io.loaddata(filename)
if not data or data == "" then
local hash = url.hashed(filename)
@@ -109,35 +126,21 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
local lines = data and string.splitlines(data)
if lines and #lines > 0 then
- -- This does not work completely as we cannot access the last Lua error using
- -- table.print(status.list()). This is on the agenda. Eventually we will
- -- have a sequence of checks here (tex, lua, mp) at this end.
- --
- -- Actually, in 0.75+ the lua error message is even weirder as you can
- -- get:
- --
- -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
- --
- -- \endgroup \directlua {
- --
- -- So there is some work to be done in the LuaTeX engine.
- --
- local what, where = match(errorstr,[[LuaTeX error :(%d+)]])
- or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
- if where then
+ if luaerrorline and luaerrorline > 0 then
-- lua error: linenumber points to last line
local start = "\\startluacode"
local stop = "\\stopluacode"
- local where = tonumber(where)
- if lines[linenumber] == start then
- local n = linenumber
- for i=n,1,-1 do
- if lines[i] == start then
- local n = i + where
- if n <= linenumber then
- linenumber = n
- end
+ local n = linenumber
+ for i=n,1,-1 do
+ local line = lines[i]
+ if not line then
+ break
+ elseif find(line,start) then
+ n = i + luaerrorline - 1
+ if n <= linenumber then
+ linenumber = n
end
+ break
end
end
end
@@ -159,30 +162,84 @@ function tracers.showlines(filename,linenumber,offset,errorstr)
end
end
-function tracers.printerror(offset)
- local inputstack = resolvers.inputstack
- local filename = inputstack[#inputstack] or status.filename
- local linenumber = tonumber(status.linenumber) or 0
+-- this will work ok in >=0.79
+
+-- todo: last tex error has ! prepended
+-- todo: some nested errors have two line numbers
+-- todo: collect errorcontext in string (after code cleanup)
+-- todo: have a separate status.lualinenumber
+
+-- todo: \starttext bla \blank[foo] bla \stoptext
+
+local function processerror(offset)
+ local inputstack = resolvers.inputstack
+ local filename = inputstack[#inputstack] or status.filename
+ local linenumber = tonumber(status.linenumber) or 0
+ --
+ -- print("[[ last tex error: " .. tostring(status.lasterrorstring) .. " ]]")
+ -- print("[[ last lua error: " .. tostring(status.lastluaerrorstring) .. " ]]")
+ -- print("[[ start errorcontext ]]")
+ -- tex.show_context()
+ -- print("\n[[ stop errorcontext ]]")
+ --
+ local lasttexerror = status.lasterrorstring or "?"
+ local lastluaerror = status.lastluaerrorstring or lasttexerror
+ local luaerrorline = match(lastluaerror,[[lua%]?:.-(%d+)]]) or (lastluaerror and find(lastluaerror,"?:0:",1,true) and 0)
+ local report = errorreporter(luaerrorline)
+ tracers.printerror {
+ filename = filename,
+ linenumber = linenumber,
+ lasttexerror = lasttexerror,
+ lastluaerror = lastluaerror,
+ luaerrorline = luaerrorline,
+ offset = tonumber(offset) or 10,
+ }
+end
+
+-- so one can overload the printer if (really) needed
+
+function tracers.printerror(specification)
+ local filename = specification.filename
+ local linenumber = specification.linenumber
+ local lasttexerror = specification.lasttexerror
+ local lastluaerror = specification.lastluaerror
+ local luaerrorline = specification.luaerrorline
+ local offset = specification.offset
+ local report = errorreporter(luaerrorline)
if not filename then
- report_system("error not related to input file: %s ...",status.lasterrorstring)
+ report("error not related to input file: %s ...",lasttexerror)
elseif type(filename) == "number" then
- report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
+ report("error on line %s of filehandle %s: %s ...",linenumber,lasttexerror)
else
- -- currently we still get the error message printed to the log/console so we
- -- add a bit of spacing around our variant
- texio.write_nl("\n")
- local errorstr = status.lasterrorstring or "?"
- -- inspect(status.list())
- report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
- texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
+ report_nl()
+ if luaerrorline then
+ report("error on line %s in file %s:\n\n%s",linenumber,filename,lastluaerror)
+ -- report("error on line %s in file %s:\n\n%s",linenumber,filename,lasttexerror)
+ else
+ report("error on line %s in file %s: %s",linenumber,filename,lasttexerror)
+ if tex.show_context then
+ report_nl()
+ tex.show_context()
+ end
+ end
+ report_nl()
+ report_str(tracers.showlines(filename,linenumber,offset,tonumber(luaerrorline)))
+ report_nl()
end
end
+local nop = function() end
+
directives.register("system.errorcontext", function(v)
+ local register = callback.register
if v then
- callback.register('show_error_hook', function() tracers.printerror(v) end)
+ register('show_error_message', nop)
+ register('show_error_hook', function() processerror(v) end)
+ register('show_lua_error_hook', nop)
else
- callback.register('show_error_hook', nil)
+ register('show_error_message', nil)
+ register('show_error_hook', nil)
+ register('show_lua_error_hook', nil)
end
end)
@@ -249,3 +306,20 @@ local function trace_calls(n)
end
directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+
+implement { name = "showtrackers", actions = trackers.show }
+implement { name = "enabletrackers", actions = trackers.enable, arguments = "string" }
+implement { name = "disabletrackers", actions = trackers.disable, arguments = "string" }
+implement { name = "resettrackers", actions = trackers.reset }
+
+implement { name = "showdirectives", actions = directives.show }
+implement { name = "enabledirectives", actions = directives.enable, arguments = "string" }
+implement { name = "disabledirectives", actions = directives.disable, arguments = "string" }
+
+implement { name = "showexperiments", actions = experiments.show }
+implement { name = "enableexperiments", actions = experiments.enable, arguments = "string" }
+implement { name = "disableexperiments", actions = experiments.disable, arguments = "string" }
+
+implement { name = "showdebuginfo", actions = lmx.showdebuginfo }
+implement { name = "overloaderror", actions = lmx.overloaderror }
+implement { name = "showlogcategories", actions = logs.show }
diff --git a/tex/context/base/trac-deb.mkiv b/tex/context/base/trac-deb.mkiv
index fe5dd02dc..1e61a3512 100644
--- a/tex/context/base/trac-deb.mkiv
+++ b/tex/context/base/trac-deb.mkiv
@@ -13,25 +13,31 @@
\writestatus{loading}{ConTeXt Tracing Macros / Debugger}
+\unprotect
+
%registerctxluafile{trac-lmx}{1.001}
\registerctxluafile{trac-deb}{1.001}
-\unexpanded\def\breakpoint{\showdebuginfo\wait}
+\unexpanded\def\breakpoint {\showdebuginfo\wait}
+
+\unexpanded\def\showtrackers {\clf_showtrackers}
+\unexpanded\def\enabletrackers [#1]{\clf_enabletrackers{#1}}
+\unexpanded\def\disabletrackers [#1]{\clf_disabletrackers{#1}}
+\unexpanded\def\resettrackers {\clf_resettrackers}
+
+\unexpanded\def\showdirectives {\clf_showdirectives}
+\unexpanded\def\enabledirectives [#1]{\clf_enabledirectives{#1}}
+\unexpanded\def\disabledirectives [#1]{\clf_disabledirectives{#1}}
-\unexpanded\def\showtrackers {\ctxlua{trackers.show()}}
-\unexpanded\def\enabletrackers [#1]{\ctxlua{trackers.enable("#1")}}
-\unexpanded\def\disabletrackers [#1]{\ctxlua{trackers.disable("#1")}}
-\unexpanded\def\resettrackers {\ctxlua{trackers.reset()}}
+\unexpanded\def\showexperiments {\clf_showexperiments}
+\unexpanded\def\enableexperiments [#1]{\clf_enableexperiments{#1}}
+\unexpanded\def\disableexperiments[#1]{\clf_disableexperiments{#1}}
-\unexpanded\def\showdirectives {\ctxlua{directives.show()}}
-\unexpanded\def\enabledirectives [#1]{\ctxlua{directives.enable("#1")}}
-\unexpanded\def\disabledirectives [#1]{\ctxlua{directives.disable("#1")}}
+\unexpanded\def\showdebuginfo {\clf_showdebuginfo}
+\unexpanded\def\overloaderror {\clf_overloaderror}
-\unexpanded\def\showexperiments {\ctxlua{experiments.show()}}
-\unexpanded\def\enableexperiments [#1]{\ctxlua{experiments.enable("#1")}}
-\unexpanded\def\disableexperiments[#1]{\ctxlua{experiments.disable("#1")}}
+\unexpanded\def\showlogcategories {\clf_showlogcategories}
-\unexpanded\def\showdebuginfo {\ctxlua{lmx.showdebuginfo()}}
-\unexpanded\def\overloaderror {\ctxlua{lmx.overloaderror()}} % \enabledirectives[system.showerror]
+% \enabledirectives[system.showerror]
-\unexpanded\def\showlogcategories {\ctxlua{logs.show()}}
+\protect \endinput
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index 802f2e667..5497e54eb 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -12,7 +12,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- and rawget.
local type, tonumber, select = type, tonumber, select
-local format, lower = string.format, string.lower
+local format, lower, find = string.format, string.lower, string.find
local concat = table.concat
local clock = os.gettimeofday or os.clock -- should go in environment
@@ -123,7 +123,8 @@ function statistics.show()
-- this code will move
local register = statistics.register
register("used platform", function()
- return format("%s, type: %s, binary subtree: %s",os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
+ return format("%s, type: %s, binary subtree: %s",
+ os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
end)
register("luatex banner", function()
return lower(status.banner)
@@ -136,16 +137,25 @@ function statistics.show()
return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
end)
if jit then
- local status = { jit.status() }
- if status[1] then
- register("luajit status", function()
- return concat(status," ",2)
- end)
+ local jitstatus = { jit.status() }
+ if jitstatus[1] then
+ register("luajit options", concat(jitstatus," ",2))
end
end
-- so far
-- collectgarbage("collect")
- register("current memory usage",statistics.memused)
+ register("lua properties",function()
+ local list = status.list()
+ local hashchar = tonumber(list.luatex_hashchars)
+ local mask = lua.mask or "ascii"
+ return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ jit and "luajit" or "lua",
+ statistics.memused(),
+ list.luatex_hashtype or "default",
+ hashchar and 2^hashchar or "unknown",
+ mask,
+ mask == "utf" and "τεχ" or "tex")
+ end)
register("runtime",statistics.runtime)
logs.newline() -- initial newline
for i=1,#statusinfo do
@@ -197,17 +207,3 @@ function statistics.tracefunction(base,tag,...)
statistics.register(formatters["%s.%s"](tag,name),function() return serialize(stat,"calls") end)
end
end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 38220a752..0c92848dc 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -14,14 +14,30 @@ typesetters.checkers = checkers
local a_alignstate = attributes.private("alignstate")
local a_justification = attributes.private("justification")
-local tracers = nodes.tracers
-local tracedrule = tracers.rule
-
-local new_rule = nodes.pool.rule
-local new_hlist = nodes.pool.hlist
-local new_glue = nodes.pool.glue
-local new_kern = nodes.pool.kern
-local get_list_dimensions = node.dimensions
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local setlist = nuts.setlist
+
+local traverse_id = nuts.traverse_id
+local get_list_dimensions = nuts.dimensions
+local linked_nodes = nuts.linked
+local copy_node = nuts.copy
+
+local tracedrule = nodes.tracers.pool.nuts.rule
+
+local nodepool = nuts.pool
+
+local new_rule = nodepool.rule
+local new_hlist = nodepool.hlist
+local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+
local hlist_code = nodes.nodecodes.hlist
local texsetattribute = tex.setattribute
@@ -46,9 +62,10 @@ end
checkers.set = set
checkers.reset = reset
-function commands.showjustification(n)
- set(n)
-end
+interfaces.implement {
+ name = "showjustification",
+ actions = set
+}
trackers.register("visualizers.justification", function(v)
if v then
@@ -59,34 +76,35 @@ trackers.register("visualizers.justification", function(v)
end)
function checkers.handler(head)
- for current in node.traverse_id(hlist_code,head) do
- if current[a_justification] == 1 then
- current[a_justification] = 0
- local width = current.width
+ for current in traverse_id(hlist_code,tonut(head)) do
+ if getattr(current,a_justification) == 1 then
+ setattr(current,a_justification,0) -- kind of reset
+ local width = getfield(current,"width")
if width > 0 then
- local list = current.list
+ local list = getlist(current)
if list then
local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
local delta = naturalwidth - width
if naturalwidth == 0 or delta == 0 then
-- special box
elseif delta >= max_threshold then
- local rule = tracedrule(delta,naturalheight,naturaldepth,list.glue_set == 1 and "trace:dr"or "trace:db")
- current.list = list .. new_hlist(rule)
+ local rule = tracedrule(delta,naturalheight,naturaldepth,getfield(list,"glue_set") == 1 and "trace:dr" or "trace:db")
+ setfield(current,"list",linked_nodes(list,new_hlist(rule)))
elseif delta <= min_threshold then
- local alignstate = list[a_alignstate]
+ local alignstate = getattr(list,a_alignstate)
if alignstate == 1 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dc")
- current.list = new_hlist(rule) .. list
+ setfield(current,"list",linked_nodes(new_hlist(rule),list))
elseif alignstate == 2 then
- local rule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
- current.list = new_hlist(rule^1) .. list .. new_kern(delta/2) .. new_hlist(rule)
+ local lrule = tracedrule(-delta/2,naturalheight,naturaldepth,"trace:dy")
+ local rrule = copy_node(lrule)
+ setfield(current,"list",linked_nodes(new_hlist(lrule),list,new_kern(delta/2),new_hlist(rrule)))
elseif alignstate == 3 then
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dm")
- current.list = list .. new_kern(delta) .. new_hlist(rule)
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
else
local rule = tracedrule(-delta,naturalheight,naturaldepth,"trace:dg")
- current.list = list .. new_kern(delta) .. new_hlist(rule)
+ setfield(current,"list",linked_nodes(list,new_kern(delta),new_hlist(rule)))
end
end
end
diff --git a/tex/context/base/trac-jus.mkiv b/tex/context/base/trac-jus.mkiv
index 7a5347da8..d0823d311 100644
--- a/tex/context/base/trac-jus.mkiv
+++ b/tex/context/base/trac-jus.mkiv
@@ -20,6 +20,6 @@
\definesystemattribute[justification] [public]
\unexpanded\def\showjustification
- {\ctxcommand{showjustification()}}
+ {\clf_showjustification} % currently no argument (default 1)
\protect \endinput
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 41d930536..4f4ea62c4 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -530,7 +530,7 @@ do_nested_include = function(data) -- also used in include
return lpegmatch(pattern_1,data)
end
-function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
+local function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
data = data or ""
local known = cache[data]
if not known then
@@ -608,7 +608,7 @@ function lmx.convertfile(templatefile,variables,nocache)
return lmxresult(converter,variables)
end
-function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
+local function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
if trace_variables then -- will become templates
report_lmx("converting file %a",templatefile)
end
@@ -665,14 +665,13 @@ function lmx.color(r,g,b,a)
end
end
-
-- these can be overloaded
lmx.lmxfile = string.itself
lmx.htmfile = string.itself
lmx.popupfile = os.launch
-function lmxmake(name,variables)
+local function lmxmake(name,variables)
local lmxfile = lmx.lmxfile(name)
local htmfile = lmx.htmfile(name)
if lmxfile == htmfile then
@@ -682,7 +681,7 @@ function lmxmake(name,variables)
return htmfile
end
-lmxmake = lmx.make
+lmx.make = lmxmake
function lmx.show(name,variables)
local htmfile = lmxmake(name,variables)
diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua
index 0d0b66260..ce620e6cf 100644
--- a/tex/context/base/trac-log.lua
+++ b/tex/context/base/trac-log.lua
@@ -6,76 +6,31 @@ if not modules then modules = { } end modules ['trac-log'] = {
license = "see context related readme files"
}
--- if tex and (tex.jobname or tex.formatname) then
---
--- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
--- -- still needed for luajittex
---
--- local texio_write_nl = texio.write_nl
--- local texio_write = texio.write
--- local io_write = io.write
-
--- local write_nl = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write_nl("log",...)
--- texio_write_nl("term","")
--- io_write(...)
--- elseif target == "log" then
--- texio_write_nl("log",...)
--- elseif target == "term" then
--- texio_write_nl("term","")
--- io_write(...)
--- else
--- texio_write_nl("log",target,...)
--- texio_write_nl("term","")
--- io_write(target,...)
--- end
--- end
-
--- local write = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write("log",...)
--- io_write(...)
--- elseif target == "log" then
--- texio_write("log",...)
--- elseif target == "term" then
--- io_write(...)
--- else
--- texio_write("log",target,...)
--- io_write(target,...)
--- end
--- end
-
--- texio.write = write
--- texio.write_nl = write_nl
---
--- else
---
--- -- texlua or just lua
---
--- end
-
--- todo: less categories, more subcategories (e.g. nodes)
--- todo: split into basics and ctx specific
+-- In fact all writes could go through lua and we could write the console and
+-- terminal handler in lua then. Ok, maybe it's slower then, so a no-go.
+local next, type, select, print = next, type, select, print
local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
local format, gmatch, find = string.format, string.gmatch, string.find
local concat, insert, remove = table.concat, table.insert, table.remove
local topattern = string.topattern
-local next, type, select = next, type, select
local utfchar = utf.char
+local datetime = os.date
+local openfile = io.open
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
local texgetcount = tex and tex.getcount
+-- variant is set now
+
+local variant = "default"
+-- local variant = "ansi"
+
+-- todo: less categories, more subcategories (e.g. nodes)
+-- todo: split into basics and ctx specific
+
--[[ldx--
This is a prelude to a more extensive logging module. We no longer
provide based logging as parsing is relatively easy anyway.
@@ -109,12 +64,12 @@ wiki : http://contextgarden.net
-- [[local chruni = utilities.strings.chruni]]
-- )
-utilities.strings.formatters.add (
+formatters.add (
formatters, "unichr",
[["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
)
-utilities.strings.formatters.add (
+formatters.add (
formatters, "chruni",
[[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
)
@@ -147,20 +102,127 @@ setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
local report, subreport, status, settarget, setformats, settranslations
-local direct, subdirect, writer, pushtarget, poptarget, setlogfile, settimedlog, setprocessor, setformatters
+local direct, subdirect, writer, pushtarget, poptarget, setlogfile, settimedlog, setprocessor, setformatters, newline
+
+-- we use formatters but best check for % then because for simple messages but
+-- we don't want this overhead for single messages (not that there are that
+-- many; we could have a special weak table)
if tex and (tex.jobname or tex.formatname) then
- -- local format = string.formatter
+ local function useluawrites()
+
+ -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
+ -- still needed for luajittex .. luatex should not have that ^^ mess
+
+ local texio_write_nl = texio.write_nl
+ local texio_write = texio.write
+ local io_write = io.write
+
+ write_nl = function(target,...)
+ if not io_write then
+ io_write = io.write
+ end
+ if target == "term and log" then
+ texio_write_nl("log",...)
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target == "log" then
+ texio_write_nl("log",...)
+ elseif target == "term" then
+ texio_write_nl("term","")
+ io_write(...)
+ elseif target ~= "none" then
+ texio_write_nl("log",target,...)
+ texio_write_nl("term","")
+ io_write(target,...)
+ end
+ end
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
+ write = function(target,...)
+ if not io_write then
+ io_write = io.write
+ end
+ if target == "term and log" then
+ texio_write("log",...)
+ io_write(...)
+ elseif target == "log" then
+ texio_write("log",...)
+ elseif target == "term" then
+ io_write(...)
+ elseif target ~= "none" then
+ texio_write("log",target,...)
+ io_write(target,...)
+ end
+ end
- local target = "term and log"
+ texio.write = write
+ texio.write_nl = write_nl
- logs.flush = io.flush
+ useluawrites = ignore
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
+ end
+
+ -- local format = string.formatter
+
+ local whereto = "both"
+ local target = nil
+ local targets = nil
+
+ local formats = table.setmetatableindex("self")
+ local translations = table.setmetatableindex("self")
+
+ local report_yes, subreport_yes, direct_yes, subdirect_yes, status_yes
+ local report_nop, subreport_nop, direct_nop, subdirect_nop, status_nop
+
+ local variants = {
+ default = {
+ formats = {
+ report_yes = formatters["%-15s > %s\n"],
+ report_nop = formatters["%-15s >\n"],
+ direct_yes = formatters["%-15s > %s"],
+ direct_nop = formatters["%-15s >"],
+ subreport_yes = formatters["%-15s > %s > %s\n"],
+ subreport_nop = formatters["%-15s > %s >\n"],
+ subdirect_yes = formatters["%-15s > %s > %s"],
+ subdirect_nop = formatters["%-15s > %s >"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ targets = {
+ logfile = "log",
+ log = "log",
+ file = "log",
+ console = "term",
+ terminal = "term",
+ both = "term and log",
+ },
+ },
+ ansi = {
+ formats = {
+ report_yes = formatters["[0;33m%-15s [0;1m>[0m %s\n"],
+ report_nop = formatters["[0;33m%-15s [0;1m>[0m\n"],
+ direct_yes = formatters["[0;33m%-15s [0;1m>[0m %s"],
+ direct_nop = formatters["[0;33m%-15s [0;1m>[0m"],
+ subreport_yes = formatters["[0;33m%-15s [0;1m>[0;35m %s [0;1m>[0m %s\n"],
+ subreport_nop = formatters["[0;33m%-15s [0;1m>[0;35m %s [0;1m>[0m\n"],
+ subdirect_yes = formatters["[0;33m%-15s [0;1m>[0;35m %s [0;1m>[0m %s"],
+ subdirect_nop = formatters["[0;33m%-15s [0;1m>[0;35m %s [0;1m>[0m"],
+ status_yes = formatters["[0;33m%-15s [0;1m:[0m %s\n"],
+ status_nop = formatters["[0;33m%-15s [0;1m:[0m\n"],
+ },
+ targets = {
+ logfile = "none",
+ log = "none",
+ file = "none",
+ console = "term",
+ terminal = "term",
+ both = "term",
+ },
+ }
+ }
+
+ logs.flush = io.flush
writer = function(...)
write_nl(target,...)
@@ -170,13 +232,6 @@ if tex and (tex.jobname or tex.formatname) then
write_nl(target,"\n")
end
- local report_yes = formatters["%-15s > %s\n"]
- local report_nop = formatters["%-15s >\n"]
-
- -- we can use formatters but best check for % then because for simple messages
- -- we con't want this overhead for single messages (not that there are that
- -- many; we could have a special weak table)
-
report = function(a,b,c,...)
if c then
write_nl(target,report_yes(translations[a],formatters[formats[b]](c,...)))
@@ -189,9 +244,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local direct_yes = formatters["%-15s > %s"]
- local direct_nop = formatters["%-15s >"]
-
direct = function(a,b,c,...)
if c then
return direct_yes(translations[a],formatters[formats[b]](c,...))
@@ -204,9 +256,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local subreport_yes = formatters["%-15s > %s > %s\n"]
- local subreport_nop = formatters["%-15s > %s >\n"]
-
subreport = function(a,s,b,c,...)
if c then
write_nl(target,subreport_yes(translations[a],translations[s],formatters[formats[b]](c,...)))
@@ -219,9 +268,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local subdirect_yes = formatters["%-15s > %s > %s"]
- local subdirect_nop = formatters["%-15s > %s >"]
-
subdirect = function(a,s,b,c,...)
if c then
return subdirect_yes(translations[a],translations[s],formatters[formats[b]](c,...))
@@ -234,9 +280,6 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local status_yes = formatters["%-15s : %s\n"]
- local status_nop = formatters["%-15s :\n"]
-
status = function(a,b,c,...)
if c then
write_nl(target,status_yes(translations[a],formatters[formats[b]](c,...)))
@@ -249,17 +292,13 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
+ settarget = function(askedwhereto)
+ whereto = askedwhereto or whereto or "both"
+ target = targets[whereto]
+ if not target then
+ whereto = "both"
+ target = targets[whereto]
+ end
if target == "term" or target == "term and log" then
logs.flush = io.flush
else
@@ -295,24 +334,81 @@ if tex and (tex.jobname or tex.formatname) then
end
end
- setformatters = function(f)
- report_yes = f.report_yes or report_yes
- report_nop = f.report_nop or report_nop
- subreport_yes = f.subreport_yes or subreport_yes
- subreport_nop = f.subreport_nop or subreport_nop
- direct_yes = f.direct_yes or direct_yes
- direct_nop = f.direct_nop or direct_nop
- subdirect_yes = f.subdirect_yes or subdirect_yes
- subdirect_nop = f.subdirect_nop or subdirect_nop
- status_yes = f.status_yes or status_yes
- status_nop = f.status_nop or status_nop
+ setformatters = function(specification)
+ local t = nil
+ local f = nil
+ local d = variants.default
+ if not specification then
+ --
+ elseif type(specification) == "table" then
+ t = specification.targets
+ f = specification.formats or specification
+ else
+ local v = variants[specification]
+ if v then
+ t = v.targets
+ f = v.formats
+ variant = specification
+ end
+ end
+ targets = t or d.targets
+ target = targets[whereto] or target
+ if f then
+ d = d.formats
+ else
+ f = d.formats
+ d = f
+ end
+ setmetatableindex(f,d)
+ report_yes = f.report_yes
+ report_nop = f.report_nop
+ subreport_yes = f.subreport_yes
+ subreport_nop = f.subreport_nop
+ direct_yes = f.direct_yes
+ direct_nop = f.direct_nop
+ subdirect_yes = f.subdirect_yes
+ subdirect_nop = f.subdirect_nop
+ status_yes = f.status_yes
+ status_nop = f.status_nop
+ if variant == "ansi" then
+ useluawrites() -- because tex escapes ^^
+ end
+ settarget(whereto)
end
+ setformatters(variant)
+
setlogfile = ignore
settimedlog = ignore
else
+ local report_yes, subreport_yes, status_yes
+ local report_nop, subreport_nop, status_nop
+
+ local variants = {
+ default = {
+ formats = {
+ report_yes = formatters["%-15s | %s"],
+ report_nop = formatters["%-15s |"],
+ subreport_yes = formatters["%-15s | %s | %s"],
+ subreport_nop = formatters["%-15s | %s |"],
+ status_yes = formatters["%-15s : %s\n"],
+ status_nop = formatters["%-15s :\n"],
+ },
+ },
+ ansi = {
+ formats = {
+ report_yes = formatters["[0;32m%-15s [0;1m|[0m %s"],
+ report_nop = formatters["[0;32m%-15s [0;1m|[0m"],
+ subreport_yes = formatters["[0;32m%-15s [0;1m|[0;31m %s [0;1m|[0m %s"],
+ subreport_nop = formatters["[0;32m%-15s [0;1m|[0;31m %s [0;1m|[0m"],
+ status_yes = formatters["[0;32m%-15s [0;1m:[0m %s\n"],
+ status_nop = formatters["[0;32m%-15s [0;1m:[0m\n"],
+ },
+ },
+ }
+
logs.flush = ignore
writer = function(s)
@@ -323,9 +419,6 @@ else
write_nl("\n")
end
- local report_yes = formatters["%-15s | %s"]
- local report_nop = formatters["%-15s |"]
-
report = function(a,b,c,...)
if c then
write_nl(report_yes(a,formatters[b](c,...)))
@@ -338,9 +431,6 @@ else
end
end
- local subreport_yes = formatters["%-15s | %s | %s"]
- local subreport_nop = formatters["%-15s | %s |"]
-
subreport = function(a,sub,b,c,...)
if c then
write_nl(subreport_yes(a,sub,formatters[b](c,...)))
@@ -353,9 +443,6 @@ else
end
end
- local status_yes = formatters["%-15s : %s\n"]
- local status_nop = formatters["%-15s :\n"]
-
status = function(a,b,c,...) -- not to be used in lua anyway
if c then
write_nl(status_yes(a,formatters[b](c,...)))
@@ -384,15 +471,36 @@ else
end
end
- setformatters = function(f)
- report_yes = f.report_yes or report_yes
- report_nop = f.report_nop or report_nop
- subreport_yes = f.subreport_yes or subreport_yes
- subreport_nop = f.subreport_nop or subreport_nop
- status_yes = f.status_yes or status_yes
- status_nop = f.status_nop or status_nop
+ setformatters = function(specification)
+ local f = nil
+ local d = variants.default
+ if specification then
+ if type(specification) == "table" then
+ f = specification.formats or specification
+ else
+ local v = variants[specification]
+ if v then
+ f = v.formats
+ end
+ end
+ end
+ if f then
+ d = d.formats
+ else
+ f = d.formats
+ d = f
+ end
+ setmetatableindex(f,d)
+ report_yes = f.report_yes
+ report_nop = f.report_nop
+ subreport_yes = f.subreport_yes
+ subreport_nop = f.subreport_nop
+ status_yes = f.status_yes
+ status_nop = f.status_nop
end
+ setformatters(variant)
+
setlogfile = function(name,keepopen)
if name and name ~= "" then
local localtime = os.localtime
@@ -535,9 +643,10 @@ local function setblocked(category,value)
v.state = value
end
else
- states = utilities.parsers.settings_to_hash(category)
+ states = utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
for c, _ in next, states do
- if data[c] then
+ local v = data[c]
+ if v then
v.state = value
else
c = topattern(c,true,true)
@@ -722,13 +831,13 @@ logs.simpleline = simple
-- obsolete
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
+logs.setprogram = ignore -- obsolete
+logs.extendbanner = ignore -- obsolete
+logs.reportlines = ignore -- obsolete
+logs.reportbanner = ignore -- obsolete
+logs.reportline = ignore -- obsolete
+logs.simplelines = ignore -- obsolete
+logs.help = ignore -- obsolete
-- applications
@@ -841,10 +950,12 @@ end
-- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
-- end
-function logs.system(whereto,process,jobname,category,...)
- local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+local f_syslog = formatters["%s %s => %s => %s => %s\r"]
+
+function logs.system(whereto,process,jobname,category,fmt,arg,...)
+ local message = f_syslog(datetime("%d/%m/%y %H:%m:%S"),process,jobname,category,arg == nil and fmt or format(fmt,arg,...))
for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keeping the file open
+ local f = openfile(whereto,"a") -- we can consider keeping the file open
if f then
f:write(message)
f:close()
diff --git a/tex/context/base/trac-par.lua b/tex/context/base/trac-par.lua
index 262a9cc33..aab57ce5c 100644
--- a/tex/context/base/trac-par.lua
+++ b/tex/context/base/trac-par.lua
@@ -1,8 +1,25 @@
--- for the moment here:
+if not modules then modules = { } end modules ['trac-par'] = {
+ version = 1.001,
+ comment = "companion to node-par.mkiv",
+ author = "Hans Hagen",
+ copyright = "ConTeXt Development Team",
+ license = "see context related readme files",
+ comment = "a translation of the built in parbuilder, initial convertsin by Taco Hoekwater",
+}
local utfchar = utf.char
local concat = table.concat
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getlist = nuts.getlist
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -42,14 +59,14 @@ local function colorize(n)
-- tricky: the built-in method creates dummy fonts and the last line normally has the
-- original font and that one then has ex.auto set
while n do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local ne = n.expansion_factor
+ local ne = getfield(n,"expansion_factor")
if ne == 0 then
if length > 0 then flush() end
setnodecolor(n,"hz:zero")
else
- local f = n.font
+ local f = getfont(n)
if f ~= font then
if length > 0 then
flush()
@@ -79,8 +96,8 @@ local function colorize(n)
end
if trace_verbose then
length = length + 1
- list[length] = utfchar(n.char)
- width = width + n.width -- no kerning yet
+ list[length] = utfchar(getchar(n))
+ width = width + getfield(n,"width") -- no kerning yet
end
end
end
@@ -88,13 +105,13 @@ local function colorize(n)
if length > 0 then
flush()
end
- colorize(n.list,flush)
+ colorize(getlist(n),flush)
else -- nothing to show on kerns
if length > 0 then
flush()
end
end
- n = n.next
+ n = getnext(n)
end
if length > 0 then
flush()
@@ -104,14 +121,14 @@ end
builders.paragraphs.expansion = builders.paragraphs.expansion or { }
function builders.paragraphs.expansion.trace(head)
- colorize(head,true)
+ colorize(tonut(head),true)
return head
end
local tasks = nodes.tasks
-tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
-tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
+-- tasks.prependaction("shipouts","normalizers","builders.paragraphs.expansion.trace")
+-- tasks.disableaction("shipouts","builders.paragraphs.expansion.trace")
local function set(v)
if v then
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
index d6e0d0339..897b6a15c 100644
--- a/tex/context/base/trac-pro.lua
+++ b/tex/context/base/trac-pro.lua
@@ -26,7 +26,8 @@ local registered = { }
local function report_index(k,name)
if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("reference to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("reference to %a in protected namespace %a",k,name)
end
@@ -34,7 +35,8 @@ end
local function report_newindex(k,name)
if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ report_system("assignment to %a in protected namespace %a: %s",k,name)
+ debugger.showtraceback(report_system)
else
report_system("assignment to %a in protected namespace %a",k,name)
end
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 95fdc43b3..9e2bf8758 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -37,7 +37,7 @@ function setters.initialize(filename,name,values) -- filename only for diagnosti
local data = setter.data
if data then
for key, newvalue in next, values do
- local newvalue = is_boolean(newvalue,newvalue)
+ local newvalue = is_boolean(newvalue,newvalue,true) -- strict
local functions = data[key]
if functions then
local oldvalue = functions.value
@@ -97,7 +97,7 @@ local function set(t,what,newvalue)
elseif not value then
value = false -- catch nil
else
- value = is_boolean(value,value)
+ value = is_boolean(value,value,true) -- strict
end
w = topattern(w,true,true)
for name, functions in next, data do
@@ -258,6 +258,7 @@ function setters.new(name) -- we could use foo:bar syntax (but not used that oft
report = function(...) setters.report (setter,...) end,
enable = function(...) enable (setter,...) end,
disable = function(...) disable (setter,...) end,
+ reset = function(...) reset (setter,...) end, -- can be dangerous
register = function(...) register(setter,...) end,
list = function(...) list (setter,...) end,
show = function(...) show (setter,...) end,
diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua
index 7e3406073..5fe4754cb 100644
--- a/tex/context/base/trac-tex.lua
+++ b/tex/context/base/trac-tex.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['trac-tex'] = {
-- moved from trac-deb.lua
-local format = string.format
+local next = next
local texhashtokens = tex.hashtokens
@@ -20,23 +20,70 @@ function trackers.savehash()
saved = texhashtokens()
end
-function trackers.dumphashtofile(filename,delta)
- local list, hash, command_name = { }, texhashtokens(), token.command_name
- for name, token in next, hash do
- if not delta or not saved[name] then
- -- token: cmd, chr, csid -- combination cmd,chr determines name
- local category = command_name(token)
- local dk = list[category]
- if not dk then
- -- a bit funny names but this sorts better (easier to study)
- dk = { names = { }, found = 0, code = token[1] }
- list[category] = dk
+if newtoken then
+
+ function trackers.dumphashtofile(filename,delta)
+ local list = { }
+ local hash = tex.hashtokens()
+ local create = newtoken.create
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ if token[2] ~= 0 then -- still old interface
+ local token = create(name)
+ -- inspect(token)
+ local category = token.cmdname
+ local dk = list[category]
+ if not dk then
+ dk = {
+ names = { },
+ found = 0,
+ -- code = token[1],
+ }
+ list[category] = dk
+ end
+ if token.protected then
+ if token.expandable then
+ dk.names[name] = "ep"
+ else
+ dk.names[name] = "-p"
+ end
+ else
+ if token.expandable then
+ dk.names[name] = "ep"
+ else
+ dk.names[name] = "--"
+ end
+ end
+ dk.found = dk.found + 1
+ end
end
- dk.names[name] = { token[2], token[3] }
- dk.found = dk.found + 1
end
+ table.save(filename or tex.jobname .. "-hash.log",list)
end
- io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true))
+
+else
+
+ function trackers.dumphashtofile(filename,delta)
+ local list = { }
+ local hash = texhashtokens()
+ local getname = token.command_name
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ -- token: cmd, chr, csid -- combination cmd,chr determines name
+ local category = getname(token)
+ local dk = list[category]
+ if not dk then
+ -- a bit funny names but this sorts better (easier to study)
+ dk = { names = { }, found = 0, code = token[1] }
+ list[category] = dk
+ end
+ dk.names[name] = { token[2], token[3] }
+ dk.found = dk.found + 1
+ end
+ end
+ table.save(filename or tex.jobname .. "-hash.log",list)
+ end
+
end
local delta = nil
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index 15ac9bf1b..b4744291c 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -88,7 +88,7 @@ local function convert(name)
delta = factor/delta
end
for k=1,#s do
- s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
+ s[k] = format("(%.3f,%.3f)",k,(s[k]-b)*delta)
end
paths[tagname] = concat(s,"--")
end
diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua
index dc8bcc5e7..a20e42d1a 100644
--- a/tex/context/base/trac-vis.lua
+++ b/tex/context/base/trac-vis.lua
@@ -32,8 +32,10 @@ local formatters = string.formatters
-- todo: global switch (so no attributes)
-- todo: maybe also xoffset, yoffset of glyph
-- todo: inline concat (more efficient)
+-- todo: tags can also be numbers (just add to hash)
local nodecodes = nodes.nodecodes
+local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
@@ -42,6 +44,7 @@ local glue_code = nodecodes.glue
local penalty_code = nodecodes.penalty
local whatsit_code = nodecodes.whatsit
local user_code = nodecodes.user
+local math_code = nodecodes.math
local gluespec_code = nodecodes.gluespec
local kerncodes = nodes.kerncodes
@@ -57,31 +60,54 @@ local leftskip_code = gluecodes.leftskip
local rightskip_code = gluecodes.rightskip
local whatsitcodes = nodes.whatsitcodes
-
-local hpack_nodes = node.hpack
-local vpack_nodes = node.vpack
-local fast_hpack_string = nodes.typesetters.fast_hpack
-local copy_node = node.copy
-local copy_list = node.copy_list
-local free_node = node.free
-local free_node_list = node.flush_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local fast_hpack = nodes.fasthpack
-local traverse_nodes = node.traverse
+local mathcodes = nodes.mathcodes
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getbox = nuts.getbox
+local getlist = nuts.getlist
+local getleader = nuts.getleader
+
+local hpack_nodes = nuts.hpack
+local vpack_nodes = nuts.vpack
+local copy_node = nuts.copy
+local copy_list = nuts.copy_list
+local free_node = nuts.free
+local free_node_list = nuts.flush_list
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local traverse_nodes = nuts.traverse
+local linked_nodes = nuts.linked
+
+local fast_hpack = nuts.fasthpack
+local fast_hpack_string = nuts.typesetters.fast_hpack
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
-local texgetbox = tex.getbox
+
local unsetvalue = attributes.unsetvalue
local current_font = font.current
-local exheights = fonts.hashes.exheights
-local emwidths = fonts.hashes.emwidths
+local fonthashes = fonts.hashes
+local chardata = fonthashes.characters
+local exheights = fonthashes.exheights
+local emwidths = fonthashes.emwidths
local pt_factor = number.dimenfactors.pt
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -117,6 +143,7 @@ local trace_fontkern
local trace_strut
local trace_whatsit
local trace_user
+local trace_math
local report_visualize = logs.reporter("visualize")
@@ -136,21 +163,22 @@ local modes = {
simplevbox = 1024 + 2,
simplevtop = 1024 + 4,
user = 2048,
+ math = 4096,
}
local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" }
local modes_boxes = { "hbox", "vbox" }
-local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" }
+local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user", "math" }
local usedfont, exheight, emwidth
-local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user
+local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user, l_math
local enabled = false
local layers = { }
local preset_boxes = modes.hbox + modes.vbox
local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty
-local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user
+local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user + modes.math
function visualizers.setfont(id)
usedfont = id or current_font()
@@ -162,7 +190,7 @@ end
local function enable()
if not usedfont then
- -- we use a narrow monospaced font
+ -- we use a narrow monospaced font -- infofont ?
visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") })
end
for mode, value in next, modes do
@@ -187,6 +215,7 @@ local function enable()
l_whatsit = layers.whatsit
l_glyph = layers.glyph
l_user = layers.user
+ l_math = layers.math
nodes.tasks.enableaction("shipouts","nodes.visualizers.handler")
report_visualize("enabled")
enabled = true
@@ -251,13 +280,6 @@ function visualizers.setlayer(n)
texsetattribute(a_layer,layers[n] or unsetvalue)
end
-commands.setvisual = visualizers.setvisual
-commands.setlayer = visualizers.setlayer
-
-function commands.visual(n)
- context(setvisual(n))
-end
-
local function set(mode,v)
texsetattribute(a_visual,setvisual(mode,texgetattribute(a_visual),v))
end
@@ -266,10 +288,13 @@ for mode, value in next, modes do
trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
end
-trackers.register("visualizers.reset", function(v) set("reset", v) end)
-trackers.register("visualizers.all", function(v) set("all", v) end)
-trackers.register("visualizers.makeup",function(v) set("makeup",v) end)
-trackers.register("visualizers.boxes", function(v) set("boxes", v) end)
+local fraction = 10
+
+trackers .register("visualizers.reset", function(v) set("reset", v) end)
+trackers .register("visualizers.all", function(v) set("all", v) end)
+trackers .register("visualizers.makeup", function(v) set("makeup",v) end)
+trackers .register("visualizers.boxes", function(v) set("boxes", v) end)
+directives.register("visualizers.fraction", function(v) fraction = tonumber(v) or fraction end)
local c_positive = "trace:b"
local c_negative = "trace:r"
@@ -279,7 +304,9 @@ local c_space = "trace:y"
local c_skip_a = "trace:c"
local c_skip_b = "trace:m"
local c_glyph = "trace:o"
+local c_ligature = "trace:s"
local c_white = "trace:w"
+local c_math = "trace:r"
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
@@ -289,43 +316,48 @@ local c_space_d = "trace:dy"
local c_skip_a_d = "trace:dc"
local c_skip_b_d = "trace:dm"
local c_glyph_d = "trace:do"
+local c_ligature_d = "trace:ds"
local c_white_d = "trace:dw"
+local c_math_d = "trace:dr"
-local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
+local function sometext(str,layer,color,textcolor,lap) -- we can just paste verbatim together .. no typesteting needed
local text = fast_hpack_string(str,usedfont)
- local size = text.width
+ local size = getfield(text,"width")
local rule = new_rule(size,2*exheight,exheight/2)
local kern = new_kern(-size)
if color then
setcolor(rule,color)
end
if textcolor then
- setlistcolor(text.list,textcolor)
+ setlistcolor(getlist(text),textcolor)
end
- local info = rule .. kern .. text
+ local info = linked_nodes(rule,kern,text)
setlisttransparency(info,c_zero)
info = fast_hpack(info)
+ local width = getfield(info,"width")
+ if lap then
+ info = fast_hpack(linked_nodes(new_kern(-width),info))
+ end
if layer then
- info[a_layer] = layer
+ setattr(info,a_layer,layer)
end
- local width = info.width
- info.width = 0
- info.height = 0
- info.depth = 0
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
return info, width
end
local f_cache = { }
local function fontkern(head,current)
- local kern = current.kern
+ local kern = getfield(current,"kern")
local info = f_cache[kern]
if info then
-- print("hit fontkern")
else
local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
- local rule = new_rule(emwidth/10,6*exheight,2*exheight)
- local list = text.list
+ local rule = new_rule(emwidth/fraction,6*exheight,2*exheight)
+ local list = getlist(text)
if kern > 0 then
setlistcolor(list,c_positive_d)
elseif kern < 0 then
@@ -335,13 +367,12 @@ local function fontkern(head,current)
end
setlisttransparency(list,c_text_d)
settransparency(rule,c_text_d)
- text.shift = -5 * exheight
- info = rule .. text
- info = fast_hpack(info)
- info[a_layer] = l_fontkern
- info.width = 0
- info.height = 0
- info.depth = 0
+ setfield(text,"shift",-5 * exheight)
+ info = fast_hpack(linked_nodes(rule,text))
+ setattr(info,a_layer,l_fontkern)
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
f_cache[kern] = info
end
head = insert_node_before(head,current,copy_list(info))
@@ -349,8 +380,7 @@ local function fontkern(head,current)
end
local w_cache = { }
-
-local tags = {
+local tags = {
open = "FIC",
write = "FIW",
close = "FIC",
@@ -382,7 +412,7 @@ local tags = {
}
local function whatsit(head,current)
- local what = current.subtype
+ local what = getsubtype(current)
local info = w_cache[what]
if info then
-- print("hit whatsit")
@@ -390,22 +420,45 @@ local function whatsit(head,current)
local tag = whatsitcodes[what]
-- maybe different text colors per tag
info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
- info[a_layer] = l_whatsit
+ setattr(info,a_layer,l_whatsit)
w_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
return head, current
end
+local u_cache = { }
+
local function user(head,current)
- local what = current.subtype
- local info = w_cache[what]
+ local what = getsubtype(current)
+ local info = u_cache[what]
if info then
-- print("hit user")
else
info = sometext(formatters["U:%s"](what),usedfont)
- info[a_layer] = l_user
- w_cache[what] = info
+ setattr(info,a_layer,l_user)
+ u_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local m_cache = { }
+local tags = {
+ beginmath = "B",
+ endmath = "E",
+}
+
+local function math(head,current)
+ local what = getsubtype(current)
+ local info = m_cache[what]
+ if info then
+ -- print("hit math")
+ else
+ local tag = mathcodes[what]
+ info = sometext(formatters["M:%s"](tag and tags[tag] or what),usedfont,nil,c_math_d)
+ setattr(info,a_layer,l_math)
+ m_cache[what] = info
end
head, current = insert_node_after(head,current,copy_list(info))
return head, current
@@ -414,15 +467,15 @@ end
local b_cache = { }
local function ruledbox(head,current,vertical,layer,what,simple,previous)
- local wd = current.width
+ local wd = getfield(current,"width")
if wd ~= 0 then
- local ht = current.height
- local dp = current.depth
- local next = current.next
- local prev = previous -- current.prev ... prev can be wrong in math mode
- current.next = nil
- current.prev = nil
- local linewidth = emwidth/10
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local next = getnext(current)
+ local prev = previous -- getprev(current) ... prev can be wrong in math mode < 0.78.3
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local linewidth = emwidth/fraction
local baseline, baseskip
if dp ~= 0 and ht ~= 0 then
if wd > 20*linewidth then
@@ -430,16 +483,16 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
if not baseline then
-- due to an optimized leader color/transparency we need to set the glue node in order
-- to trigger this mechanism
- local leader = new_glue(2*linewidth) .. new_rule(6*linewidth,linewidth,0) .. new_glue(2*linewidth)
+ local leader = linked_nodes(new_glue(2*linewidth),new_rule(6*linewidth,linewidth,0),new_glue(2*linewidth))
-- setlisttransparency(leader,c_text)
leader = fast_hpack(leader)
-- setlisttransparency(leader,c_text)
baseline = new_glue(0)
- baseline.leader = leader
- baseline.subtype = cleaders_code
- local spec = baseline.spec
- spec.stretch = 65536
- spec.stretch_order = 2
+ setfield(baseline,"leader",leader)
+ setfield(baseline,"subtype",cleaders_code)
+ local spec = getfield(baseline,"spec")
+ setfield(spec,"stretch",65536)
+ setfield(spec,"stretch_order",2)
setlisttransparency(baseline,c_text)
b_cache.baseline = baseline
end
@@ -461,47 +514,49 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
this = b_cache[what]
if not this then
local text = fast_hpack_string(what,usedfont)
- this = new_kern(-text.width) .. text
+ this = linked_nodes(new_kern(-getfield(text,"width")),text)
setlisttransparency(this,c_text)
this = fast_hpack(this)
- this.width = 0
- this.height = 0
- this.depth = 0
+ setfield(this,"width",0)
+ setfield(this,"height",0)
+ setfield(this,"depth",0)
b_cache[what] = this
end
end
-- we need to trigger the right mode (else sometimes no whatits)
- local info =
- (this and copy_list(this) or nil) ..
- new_rule(linewidth,ht,dp) ..
- new_rule(wd-2*linewidth,-dp+linewidth,dp) ..
- new_rule(linewidth,ht,dp) ..
- new_kern(-wd+linewidth) ..
+ local info = linked_nodes(
+ this and copy_list(this) or nil,
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-2*linewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
new_rule(wd-2*linewidth,ht,-ht+linewidth)
+ )
if baseskip then
- info = info .. baseskip .. baseline
+ info = linked_nodes(info,baseskip,baseline)
end
setlisttransparency(info,c_text)
info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = layer
- local info = current .. new_kern(-wd) .. info
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
+ setattr(info,a_layer,layer)
+ local info = linked_nodes(current,new_kern(-wd),info)
info = fast_hpack(info,wd)
if vertical then
info = vpack_nodes(info)
end
if next then
- info.next = next
- next.prev = info
+ setfield(info,"next",next)
+ setfield(next,"prev",info)
end
if prev then
- if prev.id == gluespec_code then
- -- weird, how can this happen, an inline glue-spec
+ if getid(prev) == gluespec_code then
+ report_visualize("ignoring invalid prev")
+ -- weird, how can this happen, an inline glue-spec, probably math
else
- info.prev = prev
- prev.next = info
+ setfield(info,"prev",prev)
+ setfield(prev,"next",info)
end
end
if head == current then
@@ -515,46 +570,55 @@ local function ruledbox(head,current,vertical,layer,what,simple,previous)
end
local function ruledglyph(head,current,previous)
- local wd = current.width
+ local wd = getfield(current,"width")
+ -- local wd = chardata[getfield(current,"font")][getfield(current,"char")].width
if wd ~= 0 then
- local ht = current.height
- local dp = current.depth
- local next = current.next
+ local ht = getfield(current,"height")
+ local dp = getfield(current,"depth")
+ local next = getnext(current)
local prev = previous
- current.next = nil
- current.prev = nil
- local linewidth = emwidth/20
+ setfield(current,"next",nil)
+ setfield(current,"prev",nil)
+ local linewidth = emwidth/(2*fraction)
local baseline
- if dp ~= 0 and ht ~= 0 then
+ -- if dp ~= 0 and ht ~= 0 then
+ if (dp >= 0 and ht >= 0) or (dp <= 0 and ht <= 0) then
baseline = new_rule(wd-2*linewidth,linewidth,0)
end
local doublelinewidth = 2*linewidth
-- could be a pdf rule
- local info =
- new_rule(linewidth,ht,dp) ..
- new_rule(wd-doublelinewidth,-dp+linewidth,dp) ..
- new_rule(linewidth,ht,dp) ..
- new_kern(-wd+linewidth) ..
- new_rule(wd-doublelinewidth,ht,-ht+linewidth) ..
- new_kern(-wd+doublelinewidth) ..
+ local info = linked_nodes(
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth),
+ new_kern(-wd+doublelinewidth),
baseline
+ )
+local char = chardata[getfield(current,"font")][getfield(current,"char")]
+if char and char.tounicode and #char.tounicode > 4 then -- hack test
+ setlistcolor(info,c_ligature)
+ setlisttransparency(info,c_ligature_d)
+else
setlistcolor(info,c_glyph)
setlisttransparency(info,c_glyph_d)
+end
info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = l_glyph
- local info = current .. new_kern(-wd) .. info
+ setfield(info,"width",0)
+ setfield(info,"height",0)
+ setfield(info,"depth",0)
+ setattr(info,a_layer,l_glyph)
+ local info = linked_nodes(current,new_kern(-wd),info)
info = fast_hpack(info)
- info.width = wd
+ setfield(info,"width",wd)
if next then
- info.next = next
- next.prev = info
+ setfield(info,"next",next)
+ setfield(next,"prev",info)
end
if prev then
- info.prev = prev
- prev.next = info
+ setfield(info,"prev",prev)
+ setfield(prev,"next",info)
end
if head == current then
return info, info
@@ -599,9 +663,9 @@ local tags = {
-- we sometimes pass previous as we can have issues in math (not watertight for all)
local function ruledglue(head,current,vertical)
- local spec = current.spec
- local width = spec.width
- local subtype = current.subtype
+ local spec = getfield(current,"spec")
+ local width = getfield(spec,"width")
+ local subtype = getsubtype(current)
local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
local info = g_cache[amount]
if info then
@@ -629,13 +693,13 @@ local function ruledglue(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
local k_cache = { }
local function ruledkern(head,current,vertical)
- local kern = current.kern
+ local kern = getfield(current,"kern")
local info = k_cache[kern]
if info then
-- print("kern hit")
@@ -655,13 +719,13 @@ local function ruledkern(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
local p_cache = { }
local function ruledpenalty(head,current,vertical)
- local penalty = current.penalty
+ local penalty = getfield(current,"penalty")
local info = p_cache[penalty]
if info then
-- print("penalty hit")
@@ -681,10 +745,10 @@ local function ruledpenalty(head,current,vertical)
info = vpack_nodes(info)
end
head, current = insert_node_before(head,current,info)
- return head, current.next
+ return head, getnext(current)
end
-local function visualize(head,vertical)
+local function visualize(head,vertical,forced)
local trace_hbox = false
local trace_vbox = false
local trace_vtop = false
@@ -697,13 +761,14 @@ local function visualize(head,vertical)
local trace_glyph = false
local trace_simple = false
local trace_user = false
+ local trace_math = false
local current = head
local previous = nil
local attr = unsetvalue
local prev_trace_fontkern = nil
while current do
- local id = current.id
- local a = current[a_visual] or unsetvalue
+ local id = getid(current)
+ local a = forced or getattr(current,a_visual) or unsetvalue
if a ~= attr then
prev_trace_fontkern = trace_fontkern
if a == unsetvalue then
@@ -719,6 +784,7 @@ local function visualize(head,vertical)
trace_glyph = false
trace_simple = false
trace_user = false
+ trace_math = false
else -- dead slow:
trace_hbox = hasbit(a, 1)
trace_vbox = hasbit(a, 2)
@@ -732,46 +798,45 @@ local function visualize(head,vertical)
trace_glyph = hasbit(a, 512)
trace_simple = hasbit(a,1024)
trace_user = hasbit(a,2048)
+ trace_math = hasbit(a,4096)
end
attr = a
end
if trace_strut then
- current[a_layer] = l_strut
+ setattr(current,a_layer,l_strut)
elseif id == glyph_code then
if trace_glyph then
head, current = ruledglyph(head,current,previous)
end
elseif id == disc_code then
- if trace_glyph then
- local pre = current.pre
- if pre then
- current.pre = ruledglyph(pre,pre)
- end
- local post = current.post
- if post then
- current.post = ruledglyph(post,post)
- end
- local replace = current.replace
- if replace then
- current.replace = ruledglyph(replace,replace)
- end
+ local pre = getfield(current,"pre")
+ if pre then
+ setfield(current,"pre",visualize(pre,false,a))
+ end
+ local post = getfield(current,"post")
+ if post then
+ setfield(current,"post",visualize(post,false,a))
+ end
+ local replace = getfield(current,"replace")
+ if replace then
+ setfield(current,"replace",visualize(replace,false,a))
end
elseif id == kern_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
-- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or current[a_fontkern] then
+ if subtype == font_kern_code or getattr(current,a_fontkern) then
if trace_fontkern or prev_trace_fontkern then
head, current = fontkern(head,current)
end
- elseif subtype == user_kern_code then
+ else -- if subtype == user_kern_code then
if trace_kern then
head, current = ruledkern(head,current,vertical)
end
end
elseif id == glue_code then
- local content = current.leader
+ local content = getleader(current)
if content then
- current.leader = visualize(content,false)
+ setfield(current,"leader",visualize(content,false))
elseif trace_glue then
head, current = ruledglue(head,current,vertical)
end
@@ -779,22 +844,18 @@ local function visualize(head,vertical)
if trace_penalty then
head, current = ruledpenalty(head,current,vertical)
end
- elseif id == disc_code then
- current.pre = visualize(current.pre)
- current.post = visualize(current.post)
- current.replace = visualize(current.replace)
elseif id == hlist_code then
- local content = current.list
+ local content = getlist(current)
if content then
- current.list = visualize(content,false)
+ setfield(current,"list",visualize(content,false))
end
if trace_hbox then
head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous)
end
elseif id == vlist_code then
- local content = current.list
+ local content = getlist(current)
if content then
- current.list = visualize(content,true)
+ setfield(current,"list",visualize(content,true))
end
if trace_vtop then
head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous)
@@ -806,12 +867,16 @@ local function visualize(head,vertical)
head, current = whatsit(head,current)
end
elseif id == user_code then
- if trace_whatsit then
+ if trace_user then
head, current = user(head,current)
end
+ elseif id == math_code then
+ if trace_math then
+ head, current = math(head,current)
+ end
end
previous = current
- current = current.next
+ current = getnext(current)
end
return head
end
@@ -837,28 +902,39 @@ local function cleanup()
nk, k_cache = freed(k_cache)
nw, w_cache = freed(w_cache)
nb, b_cache = freed(b_cache)
- -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
+ -- report_visualize("cache cleanup: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
end
-function visualizers.handler(head)
+local function handler(head)
if usedfont then
starttiming(visualizers)
-- local l = texgetattribute(a_layer)
-- local v = texgetattribute(a_visual)
-- texsetattribute(a_layer,unsetvalue)
-- texsetattribute(a_visual,unsetvalue)
- head = visualize(head)
+ head = visualize(tonut(head))
-- texsetattribute(a_layer,l)
-- texsetattribute(a_visual,v)
-- -- cleanup()
stoptiming(visualizers)
+ return tonode(head), true
+ else
+ return head, false
end
- return head, false
end
+visualizers.handler = handler
+
function visualizers.box(n)
- local box = texgetbox(n)
- box.list = visualizers.handler(box.list)
+ if usedfont then
+ starttiming(visualizers)
+ local box = getbox(n)
+ setfield(box,"list",visualize(getlist(box)))
+ stoptiming(visualizers)
+ return head, true
+ else
+ return head, false
+ end
end
local last = nil
@@ -872,9 +948,9 @@ local mark = {
local function markfonts(list)
for n in traverse_nodes(list) do
- local id = n.id
+ local id = getid(n)
if id == glyph_code then
- local font = n.font
+ local font = getfont(n)
local okay = used[font]
if not okay then
last = last + 1
@@ -883,23 +959,32 @@ local function markfonts(list)
end
setcolor(n,okay)
elseif id == hlist_code or id == vlist_code then
- markfonts(n.list)
+ markfonts(getlist(n))
end
end
end
function visualizers.markfonts(list)
last, used = 0, { }
- markfonts(type(n) == "number" and texgetbox(n).list or n)
+ markfonts(type(n) == "number" and getlist(getbox(n)) or n)
end
-function commands.markfonts(n)
- visualizers.markfonts(n)
-end
+luatex.registerstopactions(cleanup)
statistics.register("visualization time",function()
if enabled then
- cleanup() -- in case we don't don't do it each time
+ -- cleanup() -- in case we don't don't do it each time
return format("%s seconds",statistics.elapsedtime(visualizers))
end
end)
+
+-- interface
+
+local implement = interfaces.implement
+
+implement { name = "setvisual", arguments = "string", actions = visualizers.setvisual }
+implement { name = "getvisual", arguments = "string", actions = { setvisual, context } }
+implement { name = "setvisuallayer", arguments = "string", actions = visualizers.setlayer }
+implement { name = "markvisualfonts", arguments = "integer", actions = visualizers.markfonts }
+implement { name = "setvisualfont", arguments = "integer", actions = visualizers.setfont }
+
diff --git a/tex/context/base/trac-vis.mkiv b/tex/context/base/trac-vis.mkiv
index 694d1b09d..6ee8a6b8d 100644
--- a/tex/context/base/trac-vis.mkiv
+++ b/tex/context/base/trac-vis.mkiv
@@ -54,10 +54,10 @@
\let\syst_visualizers_vbox\vbox
\let\syst_visualizers_vtop\vtop
-\unexpanded\def\ruledhbox{\syst_visualizers_hbox attr \visualattribute \ctxcommand{visual("simplehbox")} }
-\unexpanded\def\ruledvbox{\syst_visualizers_vbox attr \visualattribute \ctxcommand{visual("simplevbox")} }
-\unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("simplevtop")} } % special case
-\unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \ctxcommand{visual("vtop")} }
+\unexpanded\def\ruledhbox{\syst_visualizers_hbox attr \visualattribute \clf_getvisual{simplehbox} }
+\unexpanded\def\ruledvbox{\syst_visualizers_vbox attr \visualattribute \clf_getvisual{simplevbox} }
+\unexpanded\def\ruledvtop{\syst_visualizers_vtop attr \visualattribute \clf_getvisual{simplevtop} } % special case
+\unexpanded\def\ruledtopv{\syst_visualizers_vtop attr \visualattribute \clf_getvisual{vtop} }
\unexpanded\def\ruledmbox#1{\ruledhbox{\startimath#1\stopimath}}
@@ -83,26 +83,33 @@
\to \t_syst_visualizers_optimize
\unexpanded\def\showmakeup
- {\ctxcommand{setvisual("makeup")}%
- \let\normalvtop\ruledtopv
- \let\vtop \ruledtopv}
+ {\dosingleempty\syst_visualizers_makeup}
+
+\unexpanded\def\syst_visualizers_makeup[#1]%
+ {\iffirstargument
+ \clf_setvisual{#1}%
+ \else
+ \clf_setvisual{makeup}%
+ \let\normalvtop\ruledtopv
+ \let\vtop \ruledtopv
+ \fi}
\unexpanded\def\showallmakeup
- {\ctxcommand{setvisual("all")}%
+ {\clf_setvisual{all}%
\let\normalvtop\ruledtopv
\let\vtop \ruledtopv
\showstruts}
\unexpanded\def\showboxes
- {\ctxcommand{setvisual("boxes")}%
+ {\clf_setvisual{boxes}%
\let\normalvtop\ruledtopv
\let\vtop \ruledtopv}
\unexpanded\def\showglyphs
- {\ctxcommand{setvisual("glyph")}}
+ {\clf_setvisual{glyph}}
\unexpanded\def\showfontkerns
- {\ctxcommand{setvisual("fontkern")}}
+ {\clf_setvisual{fontkern}}
\unexpanded\def\setvisualizerfont
{\dosingleempty\syst_visualizers_setfont}
@@ -110,18 +117,21 @@
\def\syst_visualizers_setfont[#1]% somename at 4pt
{\begingroup
\doifelsenothing{#1}{\definedfont[Mono at 4pt]}{\definedfont[#1]}%
- \ctxlua{nodes.visualizers.setfont()}%
+ \clf_setvisualfont\fontid\font
\endgroup}
+\unexpanded\def\resetvisualizers
+ {\attribute\visualattribute\attributeunsetvalue}
+
%D New (these might also be a visualizers):
-\definecolor[f:r:t][a=1,t=.25,r=1]
-\definecolor[f:g:t][a=1,t=.25,g=1]
-\definecolor[f:b:t][a=1,t=.25,b=1]
-\definecolor[f:c:t][a=1,t=.25,c=1]
-\definecolor[f:m:t][a=1,t=.25,m=1]
-\definecolor[f:y:t][a=1,t=.25,y=1]
-\definecolor[f:k:t][a=1,t=.25,s=0]
+% \definecolor[f:r:t][a=1,t=.25,r=1]
+% \definecolor[f:g:t][a=1,t=.25,g=1]
+% \definecolor[f:b:t][a=1,t=.25,b=1]
+% \definecolor[f:c:t][a=1,t=.25,c=1]
+% \definecolor[f:m:t][a=1,t=.25,m=1]
+% \definecolor[f:y:t][a=1,t=.25,y=1]
+% \definecolor[f:k:t][a=1,t=.25,s=0]
% \def\node_backgrounds_boxes_add#1[#2]%
% {\node_backgrounds_boxes_initialize
@@ -141,14 +151,14 @@
%D Overload:
% \def\spac_struts_vide_hbox
-% {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+% {\hbox attr \visualattribute \clf_getvisual{strut} }
%
% \def\spac_struts_vide_hbox
-% {\xdef\spac_struts_vide_hbox{\hbox attr \visualattribute \ctxcommand{visual("strut")} }%
+% {\xdef\spac_struts_vide_hbox{\hbox attr \visualattribute \clf_getvisual{strut} }%
% \spac_struts_vide_hbox}
\unexpanded\def\spac_struts_vide_hbox
- {\hbox attr \visualattribute \ctxcommand{visual("strut")} }
+ {\hbox attr \visualattribute \clf_getvisual{strut} }
\appendtoks
\normalexpanded{\unexpanded\xdef\spac_struts_vide_hbox\expandafter{\spac_struts_vide_hbox}}%
@@ -160,7 +170,7 @@
{\dontleavehmode
\begingroup
\setbox\scratchbox\hbox{\getbuffer}%
- \ctxcommand{markfonts(\number\scratchbox)}%
+ \clf_markvisualfonts\scratchbox
\unhbox\scratchbox
\endgroup}
diff --git a/tex/context/base/type-imp-buy.mkiv b/tex/context/base/type-imp-buy.mkiv
index 9815cc44b..dbfffe57c 100644
--- a/tex/context/base/type-imp-buy.mkiv
+++ b/tex/context/base/type-imp-buy.mkiv
@@ -11,27 +11,125 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+% monotype sabon
+
\starttypescriptcollection[sabon]
-\starttypescript [serif] [sabon] [name]
-
- \definefontsynonym [Serif] [SabonMT]
- \definefontsynonym [SerifItalic] [SabonMT-Italic]
- \definefontsynonym [SerifSlanted] [SabonMT-Italic]
- \definefontsynonym [SerifBold] [SabonMT-SemiBold]
- \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifBoldSlanted] [SabonMT-SemiBoldItalic]
- \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
-
- \definefontsynonym[SabonMT] [sab_____]
- \definefontsynonym[SabonMT-Italic] [sabi____]
- \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
- \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
- \definefontsynonym[SabonMT-SemiBold] [sabs____]
- \definefontsynonym[SabonMT-RegularSC] [sabsc___]
- \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
- \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
-\stoptypescript
+ \starttypescript [serif] [sabon] [name]
+ %
+ \definefontsynonym [Serif] [SabonMT]
+ \definefontsynonym [SerifItalic] [SabonMT-Italic]
+ \definefontsynonym [SerifSlanted] [SabonMT-Italic]
+ \definefontsynonym [SerifBold] [SabonMT-SemiBold]
+ \definefontsynonym [SerifBoldItalic] [SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifBoldSlanted][SabonMT-SemiBoldItalic]
+ \definefontsynonym [SerifCaps] [SabonMT-RegularSC]
+ %
+ \definefontsynonym[SabonMT] [sab_____]
+ \definefontsynonym[SabonMT-Italic] [sabi____]
+ \definefontsynonym[SabonMT-ItalicOsF] [saboi___]
+ \definefontsynonym[SabonMT-SemiBoldOsF] [sabos___]
+ \definefontsynonym[SabonMT-SemiBold] [sabs____]
+ \definefontsynonym[SabonMT-RegularSC] [sabsc___]
+ \definefontsynonym[SabonMT-SemiBoldItalic] [sabsi___]
+ \definefontsynonym[SabonMT-SemiBoldItalicOsF][sasio___]
+ %
+ \stoptypescript
\stoptypescriptcollection
+% itc stone
+
+\starttypescriptcollection[stone]
+
+ \starttypescript [sans] [stone] [name]
+ %
+ \definefontsynonym [Sans] [StoneSansITC-Medium]
+ \definefontsynonym [SansItalic] [StoneSansITC-MediumItalic]
+ \definefontsynonym [SansSlanted] [StoneSansITC-MediumItalic]
+ \definefontsynonym [SansBold] [StoneSansITC-Bold]
+ \definefontsynonym [SansBoldItalic] [StoneSansITC-BoldItalic]
+ \definefontsynonym [SansBoldSlanted][StoneSansITC-BoldItalic]
+ \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
+ %
+ \definefontsynonym[StoneSansITC-Bold] [stosnb]
+ \definefontsynonym[StoneSansITC-BoldItalic] [stosnbi]
+ \definefontsynonym[StoneSansITC-Medium] [stosnm]
+ \definefontsynonym[StoneSansITC-MediumItalic][stosnmi]
+ \definefontsynonym[StoneSansSemITC-Semi] [stosns]
+ \definefontsynonym[StoneSansSemITC-SemiIta] [stosnsi]
+ \definefontsynonym[StoneSansSCITC-Medium] [stosnscm]
+ \definefontsynonym[StoneSansSemSCITC-Semi] [stosnscs]
+ %
+ \stoptypescript
+
+ \starttypescript [serif] [stone] [name]
+ %
+ \definefontsynonym [Serif] [StoneSerifITC-Medium]
+ \definefontsynonym [SerifItalic] [StoneSerifITC-MediumItalic]
+ \definefontsynonym [SerifSlanted] [StoneSerifITC-MediumItalic]
+ \definefontsynonym [SerifBold] [StoneSerifITC-Bold]
+ \definefontsynonym [SerifBoldItalic] [StoneSerifITC-BoldItalic]
+ \definefontsynonym [SerifBoldSlanted][StoneSerifITC-BoldItalic]
+ \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
+ %
+ \definefontsynonym[StoneSerifITC-Bold] [stosfb]
+ \definefontsynonym[StoneSerifITC-BoldItalic] [stosfbi]
+ \definefontsynonym[StoneSerifITC-Medium] [stosfm]
+ \definefontsynonym[StoneSerifITC-MediumItalic][stosfmi]
+ \definefontsynonym[StoneSerifSemITC-Semi] [stosfs]
+ \definefontsynonym[StoneSerifSemITC-SemiIta] [stosfsi]
+ \definefontsynonym[StoneSerifSCITC-Medium] [stosfscm]
+ \definefontsynonym[StoneSerifSemSCITC-Semi] [stosfscs]
+ %
+ \stoptypescript
+
+ \starttypescript [sans] [stone-oldstyle] [name]
+ %
+ \definefontsynonym [Sans] [StoneSansOSITC-Medium]
+ \definefontsynonym [SansItalic] [StoneSansOSITC-MediumItalic]
+ \definefontsynonym [SansSlanted] [StoneSansOSITC-MediumItalic]
+ \definefontsynonym [SansBold] [StoneSansOSITC-Bold]
+ \definefontsynonym [SansBoldItalic] [StoneSansOSITC-BoldItalic]
+ \definefontsynonym [SansBoldSlanted][StoneSansOSITC-BoldItalic]
+ \definefontsynonym [SansCaps] [StoneSansSCITC-Medium]
+ %
+ \definefontsynonym[StoneSansOSITC-Bold] [stosnob]
+ \definefontsynonym[StoneSansOSITC-BoldItalic] [stosnobi]
+ \definefontsynonym[StoneSansOSITC-Medium] [stosnom]
+ \definefontsynonym[StoneSansOSITC-MediumItalic][stosnomi]
+ \definefontsynonym[StoneSansSemOSITC-Semi] [stosnos]
+ \definefontsynonym[StoneSansSemOSITC-SemiIta] [stosnosi]
+ %
+ \stoptypescript
+
+ \starttypescript [serif] [stone-oldstyle] [name]
+ %
+ \definefontsynonym [Serif] [StoneSerifOSITC-Medium]
+ \definefontsynonym [SerifItalic] [StoneSerifOSITC-MediumItalic]
+ \definefontsynonym [SerifSlanted] [StoneSerifOSITC-MediumItalic]
+ \definefontsynonym [SerifBold] [StoneSerifOSITC-Bold]
+ \definefontsynonym [SerifBoldItalic] [StoneSerifOSITC-BoldItalic]
+ \definefontsynonym [SerifBoldSlanted] [StoneSerifOSITC-BoldItalic]
+ \definefontsynonym [SerifCaps] [StoneSerifSCITC-Medium]
+ %
+ \definefontsynonym[StoneSerifOSITC-Bold] [stosfob]
+ \definefontsynonym[StoneSerifOSITC-BoldItalic] [stosfobi]
+ \definefontsynonym[StoneSerifOSITC-Medium] [stosfom]
+ \definefontsynonym[StoneSerifOSITC-MediumItalic][stosfomi]
+ \definefontsynonym[StoneSerifSemOSITC-Semi] [stosfos]
+ \definefontsynonym[StoneSerifSemOSITC-SemiIta] [stosfosi]
+ %
+ \stoptypescript
+
+\stoptypescriptcollection
+
+% linotype industria
+
+\starttypescriptcollection[industria]
+
+ \starttypescript [sans] [industria] [name]
+ \definefontsynonym[Industria-Solid][lt_50545]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-cambria.mkiv b/tex/context/base/type-imp-cambria.mkiv
index 91288b6d0..9bfa2ee5c 100644
--- a/tex/context/base/type-imp-cambria.mkiv
+++ b/tex/context/base/type-imp-cambria.mkiv
@@ -11,6 +11,10 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+%D We use Dejavu as it covers wider range of monospaced glyphs.
+
+\loadtypescriptfile[dejavu]
+
\starttypescriptcollection[cambria]
% microsoft: cambria.ttc cambriab.ttf cambriai.ttf cambriaz.ttf
@@ -60,13 +64,13 @@
\starttypescript [cambria,cambria-m,cambria-a]
% any
\definetypeface [cambria] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
- \definetypeface [cambria] [\s!tt] [\s!mono] [modern] [\s!default]
+ \definetypeface [cambria] [\s!tt] [\s!mono] [dejavu] [\s!default]
\definetypeface [cambria] [\s!mm] [\s!math] [\typescriptone] [\s!default]
\stoptypescript
\starttypescript [cambria-x,cambria-y]
% test x
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [cambria] [\s!default]
- \definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default]
\definetypeface [\typescriptone] [\s!mm] [\s!math] [\typescriptone] [\s!default]
\stoptypescript
diff --git a/tex/context/base/type-imp-dejavu.mkiv b/tex/context/base/type-imp-dejavu.mkiv
index de1f7752c..41cf1f701 100644
--- a/tex/context/base/type-imp-dejavu.mkiv
+++ b/tex/context/base/type-imp-dejavu.mkiv
@@ -15,33 +15,6 @@
\starttypescriptcollection[dejavu]
- \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
- \setups[\s!font:\s!fallback:\s!mono]
- % \definefontsynonym [\s!Mono] [\s!name:dejavusansmonocondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonoboldcondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonoobliquecondensed] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldobliquecondensed] [\s!features=\s!none]
- \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=dejavu-condensed-mono]
- \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=dejavu-condensed-mono]
- \stoptypescript
-
- % \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
- % \setups[\s!font:\s!fallback:\s!mono]
- % \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none]
- % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
- % \stoptypescript
-
- \starttypescript[dejavu-condensed]
- \definetypeface [dejavu-condensed] [\s!rm] [\s!serif] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!ss] [\s!sans] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!tt] [\s!mono] [dejavu-condensed] [\s!default]
- \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
- \stoptypescript
-
\starttypescript [\s!serif] [dejavu] [\s!name]
\setups[\s!font:\s!fallback:\s!serif]
\definefontsynonym [\s!Serif] [\s!name:dejavuserif] [\s!features=\s!default]
@@ -66,11 +39,17 @@
\definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
\stoptypescript
+ \starttypescript [\s!math][dejavu][\s!name]
+ %\loadfontgoodies[dejavu]
+ \definefontsynonym[\s!MathRoman][file:dejavu-math.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
\starttypescript[dejavu]
\definetypeface [dejavu] [\s!rm] [\s!serif] [dejavu] [\s!default]
\definetypeface [dejavu] [\s!ss] [\s!sans] [dejavu] [\s!default]
\definetypeface [dejavu] [\s!tt] [\s!mono] [dejavu] [\s!default]
- \definetypeface [dejavu] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+% \definetypeface [dejavu] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \definetypeface [dejavu] [\s!mm] [\s!math] [dejavu] [\s!default]
\stoptypescript
\starttypescript[dejavubidi]
@@ -96,4 +75,32 @@
\definefontsynonym [\s!SansBoldItalic] [\s!name:dejavusanscondensedboldoblique] [\s!features=\s!default]
\stoptypescript
+ \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
+ \setups[\s!font:\s!fallback:\s!mono]
+ % \definefontsynonym [\s!Mono] [\s!name:dejavusansmonocondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonoboldcondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonoobliquecondensed] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldobliquecondensed] [\s!features=\s!none]
+ \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=dejavu-condensed-mono]
+ \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=dejavu-condensed-mono]
+ \stoptypescript
+
+ % \starttypescript [\s!mono] [dejavu-condensed] [\s!name]
+ % \setups[\s!font:\s!fallback:\s!mono]
+ % \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none]
+ % \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
+ % \stoptypescript
+
+ \starttypescript[dejavu-condensed]
+ \definetypeface [dejavu-condensed] [\s!rm] [\s!serif] [dejavu-condensed] [\s!default]
+ \definetypeface [dejavu-condensed] [\s!ss] [\s!sans] [dejavu-condensed] [\s!default]
+ \definetypeface [dejavu-condensed] [\s!tt] [\s!mono] [dejavu-condensed] [\s!default]
+% \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \definetypeface [dejavu-condensed] [\s!mm] [\s!math] [dejavu] [\s!default]
+ \stoptypescript
+
\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-ebgaramond.mkiv b/tex/context/base/type-imp-ebgaramond.mkiv
new file mode 100644
index 000000000..838654d49
--- /dev/null
+++ b/tex/context/base/type-imp-ebgaramond.mkiv
@@ -0,0 +1,45 @@
+%D \module
+%D [ file=type-imp-ebgaramond,
+%D version=2013.06.22,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=EB Garamond,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\definefontfeature
+ [eb-garamond-normal]
+ [default]
+ [mode=node,ccmp=yes,calt=yes,
+ liga=yes,dlig=yes,hlig=yes,
+ kern=yes,mark=yes,mkmk=yes,
+ onum=yes,pnum=yes,salt=yes,
+ script=latn]
+
+\definefontfeature
+ [eb-garamond-smallcaps]
+ [eb-garamond-normal]
+ [smcp=yes,c2sc=yes]
+
+\starttypescriptcollection[ebgaramond]
+
+ \starttypescript [serif] [ebgaramond]
+ \loadfontgoodies[ebgaramond]
+ \setups[font:fallback:serif]
+ \definefontsynonym [Serif] [file:ebgaramond-regular] [features=eb-garamond-normal]
+ \definefontsynonym [SerifItalic] [file:ebgaramond-italic] [features=eb-garamond-normal]
+ \definefontsynonym [SerifBold] [file:ebgaramond-bold] [features=eb-garamond-normal]
+ \definefontsynonym [SerifCaps] [Serif] [features=eb-garamond-smallcaps]
+ \stoptypescript
+
+ \starttypescript[ebgaramond]
+ \definetypeface [ebgaramond] [rm] [serif] [ebgaramond] [default] [designsize=auto]
+ \definetypeface [ebgaramond] [tt] [mono] [dejavu] [default]
+ \definetypeface [ebgaramond] [mm] [math] [bonum] [default]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-ipaex.mkiv b/tex/context/base/type-imp-ipaex.mkiv
new file mode 100644
index 000000000..b11f96878
--- /dev/null
+++ b/tex/context/base/type-imp-ipaex.mkiv
@@ -0,0 +1,137 @@
+%D \module
+%D [ file=type-imp-ipaex,
+%D version=2015.05.01,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=IPAex,
+%D author=Yusuke KUROKI \& Hans HAGEN,
+%D date=\currentdate,
+%D copyright=Yusuke KUROKI \& Hans HAGEN]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% usage: \setupbodyfont[ipaex,10pt]
+
+\loadtypescriptfile[texgyre]
+\loadtypescriptfile[xits]
+
+\definefontfeature
+ [jp-default]
+ [script=default,lang=japanese]
+
+\definefontfeature
+ [jp-latin-default]
+ [jp-default]
+
+\definefontfeature
+ [jp-latin-slanted]
+ [jp-latin-default]
+ [slant=.2]
+
+\definefontfeature
+ [jp-latin-slanted-mono]
+ [slant=.2]
+
+\definefontfeature
+ [jp-latin-smallcaps]
+ [jp-latin-default]
+ [smcp=yes]
+
+\definefontfeature
+ [jp-latin-smallcaps-mono]
+ [cmcp=yes]
+
+\definefontfallback[jp-serif] [texgyrepagella-regular*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifbold] [texgyrepagella-bold*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifitalic] [texgyrepagella-italic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifbolditalic] [texgyrepagella-bolditalic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifslanted] [texgyrepagella-regular*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifboldslanted][texgyrepagella-bold*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-serifcaps] [texgyrepagella-regular*jp-latin-smallcaps] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sans] [texgyreheros-regular*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansbold] [texgyreheros-bold*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansitalic] [texgyreheros-italic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansbolditalic] [texgyreheros-bolditalic*jp-latin-default] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansslanted] [texgyreheros-regular*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sansboldslanted] [texgyreheros-bold*jp-latin-slanted] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-sanscaps] [texgyreheros-regular*jp-latin-smallcaps] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-mono] [texgyrecursor-regular] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monobold] [texgyrecursor-bold] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoitalic] [texgyrecursor-italic] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monobolditalic] [texgyrecursor-bolditalic] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoslanted] [texgyrecursor-regular*jp-latin-slanted-mono] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monoboldslanted] [texgyrecursor-bold*jp-latin-slanted-mono] [0x0000-0x0400][force=yes,rscale=1.0406]
+\definefontfallback[jp-monocaps] [texgyrecursor-regular*jp-latin-smallcaps-mono][0x0000-0x0400][force=yes,rscale=1.0406]
+
+\starttypescriptcollection[ipaex]
+ \starttypescript [\s!serif] [ipaexm]
+ \definefontsynonym [ipaexm] [\s!file:ipaexm][\s!features=jp-default,\s!fallbacks=jp-serif]
+ \definefontsynonym [ipaexmbold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifbold]
+ \definefontsynonym [ipaexmitalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifitalic]
+ \definefontsynonym [ipaexmbolditalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifbolditalic]
+ \definefontsynonym [ipaexmslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifslanted]
+ \definefontsynonym [ipaexmboldslanted][\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-serifboldslanted]
+ \definefontsynonym [ipaexmcaps] [\s!file:ipaexm][\s!features=jp-default,\s!fallbacks=jp-serifcaps]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [ipaexg]
+ \definefontsynonym[ipaexg] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sans]
+ \definefontsynonym[ipaexgbold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansbold]
+ \definefontsynonym[ipaexgitalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansitalic]
+ \definefontsynonym[ipaexgbolditalic] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansbolditalic]
+ \definefontsynonym[ipaexgslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansslanted]
+ \definefontsynonym[ipaexgboldslanted] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sansboldslanted]
+ \definefontsynonym[ipaexgcaps] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-sanscaps]
+ \stoptypescript
+
+ \starttypescript [\s!mono] [ipaexgmono]
+ \definefontsynonym[ipaexgmono] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-mono]
+ \definefontsynonym[ipaexgmonobold] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-monobold]
+ \definefontsynonym[ipaexgmonoitalic] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoitalic]
+ \definefontsynonym[ipaexgmonobolditalic] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monobolditalic]
+ \definefontsynonym[ipaexgmonoslanted] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoslanted]
+ \definefontsynonym[ipaexgmonoboldslanted] [\s!file:ipaexg][\s!features=jp-slanted,\s!fallbacks=jp-monoboldslanted]
+ \definefontsynonym[ipaexgmonocaps] [\s!file:ipaexg][\s!features=jp-default,\s!fallbacks=jp-monocaps]
+ \stoptypescript
+
+ \starttypescript [\s!serif] [ipaexm] [\s!name]
+ \definefontsynonym[\s!Serif] [ipaexm]
+ \definefontsynonym[\s!SerifBold] [ipaexmbold]
+ \definefontsynonym[\s!SerifItalic] [ipaexmitalic]
+ \definefontsynonym[\s!SerifBoldItalic] [ipaexmbolditalic]
+ \definefontsynonym[\s!SerifSlanted] [ipaexmslanted]
+ \definefontsynonym[\s!SerifBoldSlanted][ipaexmboldslanted]
+ \definefontsynonym[\s!SerifCaps] [ipaexmcaps]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [ipaexg] [\s!name]
+ \definefontsynonym[\s!Sans] [ipaexg]
+ \definefontsynonym[\s!SansBold] [ipaexgbold]
+ \definefontsynonym[\s!SansItalic] [ipaexgitalic]
+ \definefontsynonym[\s!SansBoldItalic] [ipaexgbolditalic]
+ \definefontsynonym[\s!SansSlanted] [ipaexgslanted]
+ \definefontsynonym[\s!SansBoldSlanted] [ipaexgboldslanted]
+ \definefontsynonym[\s!SansCaps] [ipaexgcaps]
+ \stoptypescript
+
+ \starttypescript [\s!mono] [ipaexgmono] [\s!name]
+ \definefontsynonym[\s!Mono] [ipaexgmono]
+ \definefontsynonym[\s!MonoBold] [ipaexgmonobold]
+ \definefontsynonym[\s!MonoItalic] [ipaexgmonoitalic]
+ \definefontsynonym[\s!MonoBoldItalic] [ipaexgmonobolditalic]
+ \definefontsynonym[\s!MonoSlanted] [ipaexgmonoslanted]
+ \definefontsynonym[\s!MonoBoldSlanted] [ipaexgmonoboldslanted]
+ \definefontsynonym[\s!MonoCaps] [ipaexgmonocaps]
+ \stoptypescript
+
+ % xits might get replaced with a pagella once we have it
+
+ \starttypescript[ipaex]
+ \definetypeface [ipaex] [\s!rm] [\s!serif] [ipaexm]
+ \definetypeface [ipaex] [\s!ss] [\s!sans] [ipaexg]
+ \definetypeface [ipaex] [\s!tt] [\s!mono] [ipaexgmono]
+ \definetypeface [ipaex] [\s!mm] [\s!math] [xits]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-latinmodern.mkiv b/tex/context/base/type-imp-latinmodern.mkiv
index afe2c6417..fe4b669bd 100644
--- a/tex/context/base/type-imp-latinmodern.mkiv
+++ b/tex/context/base/type-imp-latinmodern.mkiv
@@ -71,11 +71,14 @@
\starttypescript [\s!math] [modern,latin-modern-designsize,latin-modern] [\s!name]
\loadfontgoodies[lm]
- \loadfontgoodies[lm-math]
\definefontsynonym [\s!MathRoman] [LMMathRoman-Regular]
\definefontsynonym [\s!MathRomanBold] [LMMathRoman-Bold]
\stoptypescript
+ \starttypescript [\s!math] [latin-modern-designsize] [\s!name]
+ \loadfontgoodies[lm-math]
+ \stoptypescript
+
\starttypescript [\s!serif] [modern-variable,latin-modern-variable-designsize,latin-modern-variable] [\s!name]
\loadfontgoodies[lm]
\definefontsynonym [\s!Serif] [LMTypewriterVarWd-Regular] [\s!features=\s!default]
diff --git a/tex/context/base/type-imp-lato.mkiv b/tex/context/base/type-imp-lato.mkiv
new file mode 100644
index 000000000..8fb8647fc
--- /dev/null
+++ b/tex/context/base/type-imp-lato.mkiv
@@ -0,0 +1,56 @@
+%D \module
+%D [ file=type-imp-lato,
+%D version=2014.05.02,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=Lato fonts,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% hai : hair / lta : italic
+% lig : light / lta : italic
+% reg : regular / lta : italic
+% bol : bold / lta : italic
+% bla : black / lta : italic
+
+\loadtypescriptfile[dejavu]
+\loadtypescriptfile[xits]
+
+\starttypescriptcollection[lato]
+
+ \starttypescript [\s!sans] [lato] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-reg] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-bol] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-bollta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [lato-light] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-lig] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-reg] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-liglta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-reglta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [lato-dark] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:lato-bol] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!file:lato-bla] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!file:lato-bollta] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!file:lato-blalta] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript[lato,lato-light,lato-dark]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [\typescriptone] [\s!default]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [dejavu] [\s!default]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [dejavu] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [xits] [\s!default] [\s!rscale=1.2]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-imp-texgyre.mkiv b/tex/context/base/type-imp-texgyre.mkiv
index 24185f41d..247b4ef21 100644
--- a/tex/context/base/type-imp-texgyre.mkiv
+++ b/tex/context/base/type-imp-texgyre.mkiv
@@ -153,7 +153,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [helvetica] [\s!default] [\s!rscale=0.9]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.05]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [times] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [termes] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -161,7 +161,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [modern] [\s!default] [\s!rscale=1.075]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.075]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [palatino] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [pagella] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -169,7 +169,7 @@
\definetypeface [\typescriptone] [\s!rm] [\s!serif] [\typescriptone] [\s!default]
\definetypeface [\typescriptone] [\s!ss] [\s!sans] [modern] [\s!default] [\s!rscale=1.1]
\definetypeface [\typescriptone] [\s!tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.1]
- \definetypeface [\typescriptone] [\s!mm] [\s!math] [modern] [\s!default] [\s!rscale=1.1]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [schola] [\s!default]
\quittypescriptscanning
\stoptypescript
@@ -277,3 +277,12 @@
\stoptypescript
\stoptypescriptcollection
+
+\starttypescriptcollection[texgyre-math-schola]
+
+ \starttypescript [\s!math][schoolbook,schola][\s!all]
+ \loadfontgoodies[texgyre]
+ \definefontsynonym[\s!MathRoman][file:texgyre-schola-math-regular.otf][\s!features=\s!math\mathsizesuffix]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index 9ee97acae..cd5b32d3e 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -6,15 +6,23 @@ if not modules then modules = { } end modules ['type-ini'] = {
license = "see context related readme files"
}
+local gsub = string.gsub
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
-- more code will move here
-local commands, context = commands, context
+local commands = commands
+local context = context
+local implement = interfaces.implement
-local gsub = string.gsub
+local uselibrary = resolvers.uselibrary
-local report_typescripts = logs.reporter("fonts","typescripts")
+local name_one = nil
+local name_two = nil
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only
+local p_strip = Cs((P("type-") * (P("imp-")^0))^0/"" * P(1)^0)
+
+local report = logs.reporter("fonts","typescripts")
local function action(name,foundname)
-- context.startreadingfile()
@@ -27,50 +35,83 @@ local function action(name,foundname)
context.loadfoundtypescriptfile(foundname)
end
-local name_one, name_two
+local patterns = {
+ "type-imp-%s.mkiv",
+ "type-imp-%s.tex"
+}
-local function failure_two(name)
- report_typescripts("unknown library %a or %a",name_one,name_two)
+local function failure(name)
+ if name == "loc" then
+ -- ignore
+ else
+ report("unknown library %a",name)
+ end
end
-local function failure_one(name)
- name_two = gsub(name,"%-.*$","")
- if name_two == name then
- report_typescripts("unknown library %a",name_one)
- else
- commands.uselibrary {
- name = name_two,
+implement {
+ name = "loadtypescriptfile",
+ arguments = "string",
+ actions = function(name) -- a more specific name
+ uselibrary {
+ name = lpegmatch(p_strip,name) or name,
patterns = patterns,
action = action,
- failure = failure_two,
+ failure = failure,
onlyonce = false, -- will become true
}
end
-end
-
-function commands.doprocesstypescriptfile(name)
- name_one = gsub(name,"^type%-","")
- commands.uselibrary {
- name = name_one,
- patterns = patterns,
- action = action,
- failure = failure_one,
- onlyonce = false, -- will become true
- }
-end
+}
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" }
+local patterns = {
+ "type-imp-%s.mkiv",
+ "type-imp-%s.tex",
+ -- obsolete
+ "type-%s.mkiv",
+ "type-%s.tex"
+}
-local function failure(name)
- report_typescripts("unknown library %a",name)
-end
+-- local function failure_two(name)
+-- report("unknown library %a or %a",name_one,name_two)
+-- end
+--
+-- local function failure_one(name)
+-- name_two = gsub(name,"%-.*$","")
+-- if name == "loc" then
+-- -- ignore
+-- elseif name_two == name then
+-- report("unknown library %a",name_one)
+-- else
+-- commands.uselibrary {
+-- name = name_two,
+-- patterns = patterns,
+-- action = action,
+-- failure = failure_two,
+-- onlyonce = false, -- will become true
+-- }
+-- end
+-- end
+--
+-- function commands.doprocesstypescriptfile(name)
+-- name_one = lpegmatch(p_strip,name) or name
+-- uselibrary {
+-- name = name_one,
+-- patterns = patterns,
+-- action = action,
+-- failure = failure_one,
+-- onlyonce = false, -- will become true
+-- }
+-- end
-function commands.loadtypescriptfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^type%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = false, -- will become true
- }
-end
+implement {
+ name = "doprocesstypescriptfile",
+ arguments = "string",
+ actions = function(name)
+ uselibrary {
+ name = lpegmatch(p_strip,name) or name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false, -- will become true
+ }
+ end
+}
diff --git a/tex/context/base/type-ini.mkvi b/tex/context/base/type-ini.mkvi
index a4d576d80..f56c5573d 100644
--- a/tex/context/base/type-ini.mkvi
+++ b/tex/context/base/type-ini.mkvi
@@ -172,7 +172,7 @@
\unexpanded\def\loadtypescriptfile[#1]%
{\pushmacro\typescriptstate
\let\typescriptstate\plustwo % assumes 2 at the outer level
- \ctxcommand{loadtypescriptfile("#1")}%
+ \clf_loadtypescriptfile{#1}%
\popmacro\typescriptstate}
\unexpanded\def\loadfoundtypescriptfile#1%
@@ -237,7 +237,7 @@
\expandafter\let\csname\??typescriptfiles\currenttypefile\endcsname\t_font_typescripts}
\def\font_typescript_process_typescript_file
- {\ctxcommand{doprocesstypescriptfile("\currenttypefile")}}
+ {\clf_doprocesstypescriptfile{\currenttypefile}}
\unexpanded\def\usetypescriptonce
{\dotripleempty\font_typescripts_use_once}
@@ -294,12 +294,12 @@
%{\appendtoks\starttypescript#definitions\stoptypescript\to\c_font_typescripts_document}
{\c_font_typescripts_document\expandafter{\the\c_font_typescripts_document\starttypescript#definitions\stoptypescript}}
-\def\font_typescripts_start_process % could be a faster \doifnextoptionalelse if needed
+\def\font_typescripts_start_process % could be a faster \doifelsenextoptionalif needed
{\let\typescriptone \m_font_typescripts_one
\let\typescripttwo \m_font_typescripts_two
\let\typescriptthree\m_font_typescripts_three
\let\m_font_typescripts_match\empty
- \doifnextoptionalelse\font_typescripts_start_process_one\font_typescripts_start_process_all}
+ \doifelsenextoptionalcs\font_typescripts_start_process_one\font_typescripts_start_process_all}
\def\font_typescripts_start_process_all % could be a \let
{\ifconditional\c_font_typescripts_first_pass
@@ -333,10 +333,10 @@
{\font_typescripts_check\m_font_typescripts_three\typescriptthree\font_typescripts_start_process_again_three}
\def\font_typescripts_start_process_again_one
- {\doifnextoptionalelse\font_typescripts_start_process_two\font_typescripts_start_process_yes}
+ {\doifelsenextoptionalcs\font_typescripts_start_process_two\font_typescripts_start_process_yes}
\def\font_typescripts_start_process_again_two
- {\doifnextoptionalelse\font_typescripts_start_process_three\font_typescripts_start_process_yes}
+ {\doifelsenextoptionalcs\font_typescripts_start_process_three\font_typescripts_start_process_yes}
\let\font_typescripts_start_process_again_three\font_typescripts_start_process_yes
@@ -359,7 +359,7 @@
\donetrue
\let#target\m_font_typescripts_check
\else
- \doifcommonelse\m_font_typescripts_check#asked\donetrue\donefalse
+ \doifelsecommon\m_font_typescripts_check#asked\donetrue\donefalse
\ifdone
\let#target\commalistelement
\fi
@@ -381,17 +381,17 @@
\unexpanded\def\loadmapline{\dodoubleempty\font_map_load_line}
\def\font_map_load_file[#filename]%
- {\ctxlua{fonts.mappings.loadfile("#filename")}}
+ {\clf_loadmapfile{#filename}}
\def\font_map_load_line[#kind][#data]%
- {\ctxlua{fonts.mappings.loadline("#kind","#data")}}
+ {\clf_loadmapline{#kind}{#data}}
\unexpanded\def\forgetmapfiles
- {\ctxlua{fonts.mappings.reset()}}
+ {\clf_resetmapfiles}
-\prependtoks
- \loadmapfile[mkiv-base.map]% can't we preload this one?
-\to \everystarttext
+% \prependtoks
+% \loadmapfile[mkiv-base.map]% can't we preload this one?
+% \to \everystarttext
%D A handy shortcut:
@@ -497,7 +497,7 @@
{\font_typefaces_define_indeed[#name][#style]}
\def\font_typefaces_define_d[#name][#specification][#dummya][#dummyb][#dummyc][#dummyd]% use definitions in lfg file
- {\ctxlua{fonts.definetypeface("#name",\!!bs#specification\!!es)}}
+ {\clf_definetypeface{#name}{#specification}}
\def\font_typefaces_define_indeed[#name][#style]% saveguard against redefinition
{\doifsomething{#name}
@@ -575,12 +575,6 @@
\fi \fi \fi
\ifmmode\mr\else\tf\fi} % needed ?
-% obsolete
-%
-% \unexpanded\def\usetypefile[#type]% recurses on path ! % no storage
-% {\edef\currenttypefile{#type}%
-% \ctxcommand{doprocesstypescriptfile("\currenttypefile")}}
-
%D For Taco:
%D
%D \starttyping
diff --git a/tex/context/base/type-run.mkiv b/tex/context/base/type-run.mkiv
index 4da633371..0455d8966 100644
--- a/tex/context/base/type-run.mkiv
+++ b/tex/context/base/type-run.mkiv
@@ -21,7 +21,7 @@
\def\dochecktypescript##1##2% script use
{\doifelsenothing{##1##2}
{\donetrue}
- {\doifcommonelse{##1}{##2}\donetrue\donefalse}}
+ {\doifelsecommon{##1}{##2}\donetrue\donefalse}}
\edef\typescriptone {\truetypescript{#1}}%
\edef\typescripttwo {\truetypescript{#2}}%
\edef\typescriptthree{\truetypescript{#3}}%
@@ -30,7 +30,7 @@
\obeylines % else we loose the first line due to lookahead
\dotripleempty\dostarttypescript}
\def\dostarttypescript[##1][##2][##3]%
- {\long\def\next####1\stoptypescript{\egroup}
+ {\def\next####1\stoptypescript{\egroup}
\dochecktypescript{##1}\typescriptone \ifdone
\dochecktypescript{##2}\typescripttwo \ifdone
\dochecktypescript{##3}\typescriptthree\ifdone
diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua
index bc9f66ee4..4d0f28d9a 100644
--- a/tex/context/base/typo-bld.lua
+++ b/tex/context/base/typo-bld.lua
@@ -6,9 +6,12 @@ if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
license = "see context related readme files"
}
+-- no need for nuts in the one-line demo (that might move anyway)
+
local insert, remove = table.insert, table.remove
-local builders, nodes, node = builders, nodes, node
+builders = builders or { }
+local builders = builders
builders.paragraphs = builders.paragraphs or { }
local parbuilders = builders.paragraphs
@@ -33,11 +36,12 @@ local texsetattribute = tex.setattribute
local texnest = tex.nest
local texlists = tex.lists
+local nodes = nodes
local nodepool = nodes.pool
local new_baselineskip = nodepool.baselineskip
local new_lineskip = nodepool.lineskip
-local insert_node_before = node.insert_before
-local hpack_node = node.hpack
+local insert_node_before = nodes.insert_before
+local hpack_node = nodes.hpack
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
@@ -161,14 +165,8 @@ local function processor(head,followed_by_display)
end
end
-function constructors.enable()
- enabled = true
-end
-
-function constructors.disable()
- enabled = false
-end
-
+function constructors.enable () enabled = true end
+function constructors.disable() enabled = false end
callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
@@ -176,15 +174,6 @@ statistics.register("linebreak processing time", function()
return statistics.elapsedseconds(parbuilders)
end)
--- interface
-
-commands.defineparbuilder = constructors.define
-commands.startparbuilder = constructors.start
-commands.stopparbuilder = constructors.stop
-commands.setparbuilder = constructors.set
-commands.enableparbuilder = constructors.enable
-commands.disableparbuilder = constructors.disable
-
-- todo: move from nodes.builders to builders
nodes.builders = nodes.builder or { }
@@ -226,7 +215,16 @@ local function report(groupcode,head)
report_page_builder(" list : %s",head and nodeidstostring(head) or "")
end
+-- use tex.[sg]etlist
+
function builders.buildpage_filter(groupcode)
+ -- -- this needs checking .. gets called too often
+ -- if group_code ~= "after_output" then
+ -- if trace_page_builder then
+ -- report(groupcode)
+ -- end
+ -- return nil, false
+ -- end
local head, done = texlists.contrib_head, false
if head then
starttiming(builders)
@@ -237,14 +235,16 @@ function builders.buildpage_filter(groupcode)
stoptiming(builders)
-- -- doesn't work here (not passed on?)
-- tex.pagegoal = tex.vsize - tex.dimen.d_page_floats_inserted_top - tex.dimen.d_page_floats_inserted_bottom
- texlists.contrib_head = head
- return done and head or true
+ texlists.contrib_head = head or nil -- needs checking
+-- tex.setlist("contrib_head",head,head and nodes.tail(head))
+ return done and head or true -- no return value needed
else
if trace_page_builder then
report(groupcode)
end
- return nil, false
+ return nil, false -- no return value needed
end
+
end
callbacks.register('vpack_filter', builders.vpack_filter, "vertical spacing etc")
@@ -253,3 +253,12 @@ callbacks.register('buildpage_filter', builders.buildpage_filter, "vertical spac
statistics.register("v-node processing time", function()
return statistics.elapsedseconds(builders)
end)
+
+local implement = interfaces.implement
+
+implement { name = "defineparbuilder", actions = constructors.define, arguments = "string" }
+implement { name = "setparbuilder", actions = constructors.set, arguments = "string" }
+implement { name = "startparbuilder", actions = constructors.start, arguments = "string" }
+implement { name = "stopparbuilder", actions = constructors.stop }
+implement { name = "enableparbuilder", actions = constructors.enable }
+implement { name = "disableparbuilder", actions = constructors.disable }
diff --git a/tex/context/base/typo-bld.mkiv b/tex/context/base/typo-bld.mkiv
index 10502005b..69047c98b 100644
--- a/tex/context/base/typo-bld.mkiv
+++ b/tex/context/base/typo-bld.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
+\writestatus{loading}{ConTeXt Typesetting Macros / Paragraph Building}
%D This is very experimental, undocumented, subjected to changes, etc. just as
%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
@@ -38,18 +38,18 @@
\installcorenamespace {parbuilder}
\unexpanded\def\defineparbuilder[#1]%
- {\ctxcommand{defineparbuilder("#1")}}
+ {\clf_defineparbuilder{#1}}
\unexpanded\def\startparbuilder[#1]%
{\ifhmode\par\fi
- \ctxcommand{startparbuilder("#1")}}
+ \clf_startparbuilder{#1}}
\unexpanded\def\stopparbuilder
{\ifhmode\par\fi
- \ctxcommand{stopparbuilder()}}
+ \clf_stopparbuilder}
\unexpanded\def\setmainparbuilder[#1]%
- {\ctxcommand{setparbuilder("#1")}}
+ {\clf_setparbuilder{#1}}
% no high level interface, after all implementing a linebreaker is not something that
% the average user will do
@@ -58,7 +58,7 @@
\defineparbuilder[oneline] % just for testing
\defineparbuilder[basic] % just for testing
-\def\enableparbuilders {\ctxcommand{enableparbuilder()}} % hooks in otr so we need to pickup
-\def\disableparbuilders{\ctxcommand{disableparbuilder()}} % hooks in otr so we need to pickup
+\unexpanded\def\enableparbuilders {\clf_enableparbuilder } % hooks in otr so we need to pickup
+\unexpanded\def\disableparbuilders{\clf_disableparbuilder} % hooks in otr so we need to pickup
\protect \endinput
diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua
index 3558efa8e..a9d775856 100644
--- a/tex/context/base/typo-brk.lua
+++ b/tex/context/base/typo-brk.lua
@@ -20,23 +20,41 @@ local report_breakpoints = logs.reporter("typesetting","breakpoints")
local nodes, node = nodes, node
local settings_to_array = utilities.parsers.settings_to_array
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local free_node = node.free
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove -- ! nodes
-local tonodes = nodes.tonodes
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local copy_node = nuts.copy
+local copy_nodelist = nuts.copy_list
+local free_node = nuts.free
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+
+local tonodes = nuts.tonodes
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local tasks = nodes.tasks
local v_reset = interfaces.variables.reset
+local implement = interfaces.implement
+
local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
local new_disc = nodepool.disc
@@ -80,74 +98,86 @@ local function insert_break(head,start,before,after)
end
methods[1] = function(head,start)
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
insert_break(head,start,10000,0)
end
return head, start
end
methods[2] = function(head,start) -- ( => (-
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.next, hyphen.prev = hyphen, tmp
- start.post = tmp
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"replace",tmp)
+ local tmp = copy_node(tmp)
+ local hyphen = copy_node(tmp)
+ setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
+ setfield(tmp,"next",hyphen)
+ setfield(hyphen,"prev",tmp)
+ setfield(start,"post",tmp)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[3] = function(head,start) -- ) => -)
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.prev, hyphen.next = hyphen, tmp
- start.pre = hyphen
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"replace",tmp)
+ local tmp = copy_node(tmp)
+ local hyphen = copy_node(tmp)
+ setfield(hyphen,"char",languages.prehyphenchar(getfield(tmp,"lang")))
+ setfield(tmp,"prev",hyphen)
+ setfield(hyphen,"next",tmp)
+ setfield(start,"pre",hyphen)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[4] = function(head,start) -- - => - - -
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
+ -- setfield(start,"attr",copy_nodelist(getfield(tmp,"attr"))) -- just a copy will do
+ setfield(start,"attr",getfield(tmp,"attr"))
+ setfield(start,"pre",copy_node(tmp))
+ setfield(start,"post",copy_node(tmp))
+ setfield(start,"replace",tmp)
insert_break(head,start,10000,10000)
end
return head, start
end
methods[5] = function(head,start,settings) -- x => p q r
- if start.prev and start.next then
+ if getprev(start) and getnext(start) then
local tmp
head, start, tmp = remove_node(head,start)
head, start = insert_node_before(head,start,new_disc())
- local attr = tmp.attr
- local font = tmp.font
- start.attr = copy_nodelist(attr) -- todo: critical only
- local left, right, middle = settings.left, settings.right, settings.middle
+ local attr = getfield(tmp,"attr")
+ local font = getfont(tmp)
+ local left = settings.left
+ local right = settings.right
+ local middle = settings.middle
if left then
- start.pre = tonodes(tostring(left),font,attr) -- was right
+ setfield(start,"pre",(tonodes(tostring(left),font,attr))) -- was right
end
if right then
- start.post = tonodes(tostring(right),font,attr) -- was left
+ setfield(start,"post",(tonodes(tostring(right),font,attr))) -- was left
end
if middle then
- start.replace = tonodes(tostring(middle),font,attr)
+ setfield(start,"replace",(tonodes(tostring(middle),font,attr)))
end
+ -- setfield(start,"attr",copy_nodelist(attr)) -- todo: critical only -- just a copy will do
+ setfield(start,"attr",attr) -- todo: critical only -- just a copy will do
free_node(tmp)
insert_break(head,start,10000,10000)
end
@@ -155,31 +185,32 @@ methods[5] = function(head,start,settings) -- x => p q r
end
function breakpoints.handler(head)
+ head = tonut(head)
local done, numbers = false, languages.numbers
local start, n = head, 0
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_breakpoints]
+ local attr = getattr(start,a_breakpoints)
if attr and attr > 0 then
- start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
+ setattr(start,a_breakpoints,unsetvalue) -- maybe test for subtype > 256 (faster)
-- look ahead and back n chars
local data = mapping[attr]
if data then
local map = data.characters
- local cmap = map[start.char]
+ local cmap = map[getchar(start)]
if cmap then
- local lang = start.lang
+ local lang = getfield(start,"lang")
-- we do a sanity check for language
local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
if smap then
if n >= smap.nleft then
local m = smap.nright
- local next = start.next
+ local next = getnext(start)
while next do -- gamble on same attribute (not that important actually)
- local id = next.id
+ local id = getid(next)
if id == glyph_code then -- gamble on same attribute (not that important actually)
- if map[next.char] then
+ if map[getchar(next)] then
break
elseif m == 1 then
local method = methods[smap.type]
@@ -190,10 +221,10 @@ function breakpoints.handler(head)
break
else
m = m - 1
- next = next.next
+ next = getnext(next)
end
- elseif id == kern_code and next.subtype == kerning_code then
- next = next.next
+ elseif id == kern_code and getsubtype(next) == kerning_code then
+ next = getnext(next)
-- ignore intercharacter kerning, will go way
else
-- we can do clever and set n and jump ahead but ... not now
@@ -214,14 +245,14 @@ function breakpoints.handler(head)
else
-- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
end
- elseif id == kern_code and start.subtype == kerning_code then
+ elseif id == kern_code and getsubtype(start) == kerning_code then
-- ignore intercharacter kerning, will go way
else
n = 0
end
- start = start.next
+ start = getnext(start)
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -290,6 +321,32 @@ end
-- interface
-commands.definebreakpoints = breakpoints.define
-commands.definebreakpoint = breakpoints.setreplacement
-commands.setbreakpoints = breakpoints.set
+implement {
+ name = "definebreakpoints",
+ actions = breakpoints.define,
+ arguments = "string"
+}
+
+implement {
+ name = "definebreakpoint",
+ actions = breakpoints.setreplacement,
+ arguments = {
+ "string",
+ "string",
+ "string",
+ {
+ { "type", "integer" },
+ { "nleft" },
+ { "nright" },
+ { "right" },
+ { "left" },
+ { "middle" },
+ }
+ }
+}
+
+implement {
+ name = "setbreakpoints",
+ actions = breakpoints.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-brk.mkiv b/tex/context/base/typo-brk.mkiv
index af498bfec..3b463c06f 100644
--- a/tex/context/base/typo-brk.mkiv
+++ b/tex/context/base/typo-brk.mkiv
@@ -25,13 +25,13 @@
\definesystemattribute[breakpoint][public,global]
-\exhyphenchar\minusone % we use a different order then base tex, so we really need this
+% see below: \exhyphenchar \minusone % we use a different order tha n base tex, so we really need this
\unexpanded\def\definebreakpoints
{\dosingleargument\typo_breakpoints_define}
\def\typo_breakpoints_define[#1]%
- {\ctxcommand{definebreakpoints("#1")}}
+ {\clf_definebreakpoints{#1}}
\def\definebreakpoint
{\dotripleempty\typo_breakpoints_define_character}
@@ -39,18 +39,24 @@
\def\typo_breakpoints_define_character[#1][#2][#3]% name char settings
{\begingroup
\getdummyparameters[\c!type=1,\c!nleft=3,\c!nright=3,\s!language=,\c!left=,\c!right=,\c!middle=,#3]%
- \ctxcommand{definebreakpoint("#1", "#2", "\reallanguagetag{\directdummyparameter\s!language}", { % maybe deal with #3 at the lua end
- type = \directdummyparameter\c!type,
- nleft = "\directdummyparameter\c!nleft",
- nright = "\directdummyparameter\c!nright",
- right = "\directdummyparameter\c!right",
- left = "\directdummyparameter\c!left",
- middle = "\directdummyparameter\c!middle"
- } )}%
+ \clf_definebreakpoint
+ {#1}%
+ {#2}%
+ {\reallanguagetag{\directdummyparameter\s!language}}%
+ {% maybe deal with #3 at the lua end
+ type \directdummyparameter\c!type
+ nleft {\directdummyparameter\c!nleft}%
+ nright {\directdummyparameter\c!nright}%
+ right {\directdummyparameter\c!right}%
+ left {\directdummyparameter\c!left}%
+ middle {\directdummyparameter\c!middle}%
+ }%
+ \relax
\endgroup}
\unexpanded\def\setbreakpoints[#1]%
- {\ctxcommand{setbreakpoints("#1")}}
+ {\exhyphenchar\minusone % we use a different order tha n base tex, so we really need this
+ \clf_setbreakpoints{#1}}
\unexpanded\def\resetbreakpoints
{\attribute\breakpointattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index 0fc1a3093..80a74bac9 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -16,9 +16,26 @@ local report_casing = logs.reporter("typesetting","casing")
local nodes, node = nodes, node
-local copy_node = nodes.copy
-local end_of_math = nodes.end_of_math
-
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local copy_node = nuts.copy
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+local insert_after = nuts.insert_after
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
@@ -58,6 +75,8 @@ local a_cases = attributes.private("case")
local extract = bit32.extract
local run = 0 -- a trick to make neighbouring ranges work
+local blocked = { }
+
local function set(tag,font)
if run == 2^6 then
run = 1
@@ -90,88 +109,40 @@ end
--
-- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words}
-local uccodes = characters.uccodes
-local lccodes = characters.lccodes
+local uccodes = characters.uccodes
+local lccodes = characters.lccodes
+local categories = characters.categories
-- true false true == mixed
-local function helper(start,attr,lastfont,n,codes,special,once,keepother)
- local char = start.char
+local function replacer(start,codes)
+ local char = getchar(start)
local dc = codes[char]
if dc then
- local fnt = start.font
- if keepother and dc == char then
- local lfa = lastfont[n]
- if lfa then
- start.font = lfa
- return start, true
- else
- return start, false
- end
- else
- if special then
- local lfa = lastfont[n]
- if lfa then
- local previd = start.prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- fnt = lfa
- start.font = lfa
- end
+ local fnt = getfont(start)
+ local ifc = fontchar[fnt]
+ if type(dc) == "table" then
+ for i=1,#dc do
+ if not ifc[dc[i]] then
+ return start, false
end
end
- local ifc = fontchar[fnt]
- if type(dc) == "table" then
- local ok = true
- for i=1,#dc do
- -- could be cached in font
- if not ifc[dc[i]] then
- ok = false
- break
- end
- end
- if ok then
- -- todo: use generic injector
- local prev = start
- local original = start
- for i=1,#dc do
- local chr = dc[i]
- prev = start
- if i == 1 then
- start.char = chr
- else
- local g = copy_node(original)
- g.char = chr
- local next = start.next
- g.prev = start
- if next then
- g.next = next
- start.next = g
- next.prev = g
- end
- start = g
- end
- end
- if once then
- lastfont[n] = false
- end
- return prev, true
- end
- if once then
- lastfont[n] = false
+ for i=#dc,1,-1 do
+ local chr = dc[i]
+ if i == 1 then
+ setfield(start,"char",chr)
+ else
+ local g = copy_node(start)
+ setfield(g,"char",chr)
+ insert_after(start,start,g)
end
- return start, false
- elseif ifc[dc] then
- start.char = dc
- if once then
- lastfont[n] = false
- end
- return start, true
end
+ return start, true
+ elseif ifc[dc] then
+ setfield(start,"char",dc)
+ return start, true
end
end
- if once then
- lastfont[n] = false
- end
return start, false
end
@@ -192,148 +163,156 @@ end
cases.register = register
-local function WORD(start,attr,lastfont,n)
+local function WORD(start,attr,lastfont,n,count,where,first)
lastfont[n] = false
- return helper(start,attr,lastfont,n,uccodes)
+ return replacer(first or start,uccodes)
end
-local function word(start,attr,lastfont,n)
+local function word(start,attr,lastfont,n,count,where,first)
lastfont[n] = false
- return helper(start,attr,lastfont,n,lccodes)
-end
-
-local function blockrest(start)
- local n = start.next
- while n do
- local id = n.id
- if id == glyph_code or id == disc_node and n[a_cases] == attr then
- n[a_cases] = unsetvalue
- else
- -- break -- we can have nested mess
- end
- n = n.next
- end
+ return replacer(first or start,lccodes)
end
-local function Word(start,attr,lastfont,n) -- looks quite complex
- lastfont[n] = false
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev then
- blockrest(start)
- return helper(start,attr,lastfont,n,uccodes)
+local function Words(start,attr,lastfont,n,count,where,first) -- looks quite complex
+ if where == "post" then
+ return
end
- local previd = prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- -- only the first character is treated
- blockrest(start)
- -- we could return the last in the range and save some scanning
- -- but why bother
- return helper(start,attr,lastfont,n,uccodes)
+ if count == 1 and where ~= "post" then
+ replacer(first or start,uccodes)
+ return start, true, true
else
- return start, false
+ return start, false, true
end
end
-local function Words(start,attr,lastfont,n)
- lastfont[n] = false
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev then
- return helper(start,attr,lastfont,n,uccodes)
+local function Word(start,attr,lastfont,n,count,where,first)
+ blocked[attr] = true
+ return Words(start,attr,lastfont,n,count,where,first)
+end
+
+local function camel(start,attr,lastfont,n,count,where,first)
+ local _, done_1 = word(start,attr,lastfont,n,count,where,first)
+ local _, done_2 = Words(start,attr,lastfont,n,count,where,first)
+ return start, done_1 or done_2, true
+end
+
+local function mixed(start,attr,lastfont,n,count,where,first)
+ if where == "post" then
+ return
end
- local previd = prev.id
- if previd ~= glyph_code and previd ~= disc_code then
- return helper(start,attr,lastfont,n,uccodes)
+ local used = first or start
+ local char = getchar(first)
+ local dc = uccodes[char]
+ if not dc then
+ return start, false, true
+ elseif dc == char then
+ local lfa = lastfont[n]
+ if lfa then
+ setfield(first,"font",lfa)
+ return start, true, true
+ else
+ return start, false, true
+ end
else
- return start, false
+ replacer(first or start,uccodes)
+ return start, true, true
end
end
-local function capital(start,attr,lastfont,n) -- 3
- return helper(start,attr,lastfont,n,uccodes,true,true)
-end
-
-local function Capital(start,attr,lastfont,n) -- 4
- return helper(start,attr,lastfont,n,uccodes,true,false)
+local function Capital(start,attr,lastfont,n,count,where,first,once) -- 3
+ local used = first or start
+ if count == 1 and where ~= "post" then
+ local lfa = lastfont[n]
+ if lfa then
+ local dc = uccodes[getchar(used)]
+ if dc then
+ setfield(used,"font",lfa)
+ end
+ end
+ end
+ local s, d, c = replacer(first or start,uccodes)
+ if once then
+ lastfont[n] = false -- here
+ end
+ return start, d, c
end
-local function mixed(start,attr,lastfont,n)
- return helper(start,attr,lastfont,n,uccodes,false,false,true)
+local function capital(start,attr,lastfont,n,where,count,first,count) -- 4
+ return Capital(start,attr,lastfont,n,where,count,first,true)
end
-local function none(start,attr,lastfont,n)
- return start, false
+local function none(start,attr,lastfont,n,count,where,first)
+ return start, false, true
end
-local function random(start,attr,lastfont,n)
+local function random(start,attr,lastfont,n,count,where,first)
+ local used = first or start
+ local char = getchar(used)
+ local font = getfont(used)
+ local tfm = fontchar[font]
lastfont[n] = false
- local ch = start.char
- local tfm = fontchar[start.font]
- if lccodes[ch] then
+ local kind = categories[char]
+ if kind == "lu" then
while true do
- local d = chardata[randomnumber(1,0xFFFF)]
- if d then
- local uc = uccodes[d]
- if uc and tfm[uc] then -- this also intercepts tables
- start.char = uc
- return start, true
- end
+ local n = randomnumber(0x41,0x5A)
+ if tfm[n] then -- this also intercepts tables
+ setfield(used,"char",n)
+ return start, true
end
end
- elseif uccodes[ch] then
+ elseif kind == "ll" then
while true do
- local d = chardata[randomnumber(1,0xFFFF)]
- if d then
- local lc = lccodes[d]
- if lc and tfm[lc] then -- this also intercepts tables
- start.char = lc
- return start, true
- end
+ local n = randomnumber(0x61,0x7A)
+ if tfm[n] then -- this also intercepts tables
+ setfield(used,"char",n)
+ return start, true
end
end
end
return start, false
end
-register(variables.WORD, WORD) -- 1
-register(variables.word, word) -- 2
-register(variables.Word, Word) -- 3
-register(variables.Words, Words) -- 4
-register(variables.capital, capital) -- 5
-register(variables.Capital, Capital) -- 6
-register(variables.none, none) -- 7 (dummy)
-register(variables.random, random) -- 8
-register(variables.mixed, mixed) -- 9
+register(variables.WORD, WORD) -- 1
+register(variables.word, word) -- 2
+register(variables.Word, Word) -- 3
+register(variables.Words, Words) -- 4
+register(variables.capital,capital) -- 5
+register(variables.Capital,Capital) -- 6
+register(variables.none, none) -- 7 (dummy)
+register(variables.random, random) -- 8
+register(variables.mixed, mixed) -- 9
+register(variables.camel, camel) -- 10
-register(variables.cap, variables.capital) -- clone
-register(variables.Cap, variables.Capital) -- clone
+register(variables.cap, variables.capital) -- clone
+register(variables.Cap, variables.Capital) -- clone
function cases.handler(head) -- not real fast but also not used on much data
local lastfont = { }
local lastattr = nil
local done = false
- local start = head
+ local start = tonut(head)
+ local count = 0
+ local previd = nil
+ local prev = nil
while start do -- while because start can jump ahead
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_cases]
- if attr and attr > 0 then
+ local attr = getattr(start,a_cases)
+ if attr and attr > 0 and not blocked[attr] then
if attr ~= lastattr then
lastattr = attr
+ count = 1
+ else
+ count = count + 1
end
- start[a_cases] = unsetvalue
+ setattr(start,a_cases,unsetvalue)
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- start, ok = action(start,attr,lastfont,n)
+ start, ok = action(start,attr,lastfont,n,count)
if ok then
done = true
end
@@ -345,42 +324,88 @@ function cases.handler(head) -- not real fast but also not used on much data
end
end
elseif id == disc_code then
- local attr = start[a_cases]
- if attr and attr > 0 then
+ local attr = getattr(start,a_cases)
+ if attr and attr > 0 and not blocked[attr] then
if attr ~= lastattr then
lastattr = attr
+ count = 0
end
- start[a_cases] = unsetvalue
+ setattr(start,a_cases,unsetvalue)
local n, id, m = get(attr)
if lastfont[n] == nil then
lastfont[n] = id
end
local action = actions[n] -- map back to low number
if action then
- local replace = start.replace
+ local replace = getfield(start,"replace")
if replace then
- action(replace,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,replace) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"replace",g)
+ if quit then break end
+ end
end
- local pre = start.pre
+ local pre = getfield(start,"pre")
if pre then
- action(pre,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,pre) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"pre",g)
+ if quit then break end
+ end
end
- local post = start.post
+ local post = getfield(start,"post")
if post then
- action(post,attr,lastfont,n)
+ local cnt = count
+ for g in traverse_id(glyph_code,post) do
+ cnt = cnt + 1
+ -- setattr(g,a_cases,unsetvalue)
+ local _, _, quit = action(start,attr,lastfont,n,cnt,"post",g)
+ if quit then break end
+ end
end
end
+ count = count + 1
end
elseif id == math_code then
start = end_of_math(start)
+ count = 0
+ elseif prev_id == kern_code and getsubtype(prev) == kerning_code then
+ -- still inside a word ...nomally kerns are added later
+ else
+ count = 0
end
- if start then -- why test
- start = start.next
+ if start then
+ prev = start
+ previd = id
+ start = getnext(start)
end
end
return head, done
end
+-- function cases.handler(head) -- let's assume head doesn't change ... no reason
+-- local done = false
+-- local lastfont = { }
+-- for first, last, size, attr in nuts.words(tonut(head),a_cases) do
+-- local n, id, m = get(attr)
+-- if lastfont[n] == nil then
+-- lastfont[n] = id
+-- end
+-- local action = actions[n]
+-- if action then
+-- local _, ok = action(first,attr,lastfont,n)
+-- if ok then
+-- done = true
+-- end
+-- end
+-- end
+-- return head, done
+-- end
+
local enabled = false
function cases.set(n,id)
@@ -407,4 +432,8 @@ end
-- interface
-commands.setcharactercasing = cases.set
+interfaces.implement {
+ name = "setcharactercasing",
+ actions = cases.set,
+ arguments = { "string", "integer" }
+}
diff --git a/tex/context/base/typo-cap.mkiv b/tex/context/base/typo-cap.mkiv
index c4458129f..2859ba104 100644
--- a/tex/context/base/typo-cap.mkiv
+++ b/tex/context/base/typo-cap.mkiv
@@ -43,6 +43,7 @@
\definecapitals[\v!WORD] % all lower
\definecapitals[\v!Word] % one upper + font
\definecapitals[\v!Words] % some upper
+\definecapitals[\v!camel] % lowers first
\definecapitals[\v!word][\c!style=] % nothing
%D \macros
@@ -76,7 +77,7 @@
% test \Word{test TEST \TeX} test
\unexpanded\def\setcharactercasing[#1]%
- {\ctxcommand{setcharactercasing("#1",\number\fontid\font)}}
+ {\clf_setcharactercasing{#1}\fontid\font}
% todo: names casings
@@ -84,6 +85,7 @@
\unexpanded\def\word {\groupedcommand{\setcharactercasing[\v!word ]}{}}
\unexpanded\def\Word {\groupedcommand{\setcharactercasing[\v!Word ]}{}}
\unexpanded\def\Words{\groupedcommand{\setcharactercasing[\v!Words]}{}}
+\unexpanded\def\camel{\groupedcommand{\setcharactercasing[\v!camel]}{}}
% This might become:
%
@@ -166,16 +168,13 @@
\def\typo_capitals_set_fake#1%
{\edef\currentcapitals{#1}%
- %\setcharactercasing[\currentcapitals]%
- \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}%
- \signalcharacter % retain current style
+ \clf_setcharactercasing{\currentcapitals}\fontid\font
\usecapitalsstyleparameter\c!style}
\def\typo_capitals_set_real#1%
{\edef\currentcapitals{#1}%
\sc
- %\setcharactercasing[\currentcapitals]}
- \ctxcommand{setcharactercasing("\currentcapitals",\number\fontid\font)}}
+ \clf_setcharactercasing{\currentcapitals}\fontid\font}
\unexpanded\def\pseudosmallcapped{\groupedcommand{\typo_capitals_set_fake\v!WORD }\donothing} % all upper
\unexpanded\def\pseudoSmallcapped{\groupedcommand{\typo_capitals_set_fake\v!capital}\donothing} % one upper + font
diff --git a/tex/context/base/typo-chr.lua b/tex/context/base/typo-chr.lua
new file mode 100644
index 000000000..db8579c84
--- /dev/null
+++ b/tex/context/base/typo-chr.lua
@@ -0,0 +1,251 @@
+if not modules then modules = { } end modules ['typo-chr'] = {
+ version = 1.001,
+ comment = "companion to typo-bld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local nodecodes = nodes.nodecodes
+-- local whatsitcodes = nodes.whatsitcodes
+-- local glyph_code = nodecodes.glyph
+-- local whatsit_code = nodecodes.whatsit
+-- local user_code = whatsitcodes.userdefined
+--
+-- local stringusernode = nodes.pool.userstring
+--
+-- local nuts = nodes.nuts
+-- local pool = nuts.pool
+--
+-- local tonut = nuts.tonut
+-- local tonode = nuts.tonode
+-- local getid = nuts.getid
+-- local getprev = nuts.getprev
+-- local getsubtype = nuts.getsubtype
+-- local getchar = nuts.getchar
+-- local getfield = nuts.getfield
+--
+-- local remove_node = nuts.remove
+-- local traverse_by_id = nuts.traverse_id
+--
+-- local signal = pool.userids.signal
+--
+-- local is_punctuation = characters.is_punctuation
+--
+-- local actions = {
+-- removepunctuation = function(head,n)
+-- local prev = getprev(n)
+-- if prev then
+-- if getid(prev) == glyph_code then
+-- if is_punctuation[getchar(prev)] then
+-- head = remove_node(head,prev,true)
+-- end
+-- end
+-- end
+-- return head
+-- end
+-- }
+--
+-- -- we can also use properties .. todo (saves pass)
+--
+-- typesetters.signals = { }
+--
+-- function typesetters.signals.handler(head)
+-- local h = tonut(head)
+-- local done = false
+-- for n in traverse_by_id(whatsit_code,h) do
+-- if getsubtype(n) == user_code and getfield(n,"user_id") == signal and getfield(n,"type") == 115 then
+-- local action = actions[getfield(n,"value")]
+-- if action then
+-- h = action(h,n)
+-- end
+-- h = remove_node(h,n,true)
+-- done = true
+-- end
+-- end
+-- if done then
+-- return tonode(h), true
+-- else
+-- return head
+-- end
+-- end
+--
+-- local enabled = false
+--
+-- local function signal(what)
+-- if not enabled then
+-- nodes.tasks.prependaction("processors","normalizers", "typesetters.signals.handler")
+-- enabled = true
+-- end
+-- context(stringusernode(signal,what))
+-- end
+--
+-- interfaces.implement {
+-- name = "signal",
+-- actions = signal,
+-- arguments = "string",
+-- }
+
+local insert, remove = table.insert, table.remove
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+local glyph_code = nodecodes.glyph
+local whatsit_code = nodecodes.whatsit
+local localpar_code = whatsitcodes.localpar
+
+local texnest = tex.nest
+local free_node = node.free
+local flush_list = node.flush_list
+
+local settexattribute = tex.setattribute
+local punctuation = characters.is_punctuation
+
+local variables = interfaces.variables
+local v_all = variables.all
+local v_reset = variables.reset
+
+local a_marked = attributes.numbers['marked']
+local lastmarked = 0
+local marked = {
+ [v_all] = 1,
+ [""] = 1,
+ [v_reset] = attributes.unsetvalue,
+}
+
+local stack = { }
+
+local function pickup()
+ local list = texnest[texnest.ptr]
+ if list then
+ local tail = list.tail
+ if tail and tail.id == glyph_code and punctuation[tail.char] then
+ local prev = tail.prev
+ list.tail = prev
+ if prev then
+ prev.next = nil
+ end
+ list.tail = prev
+ tail.prev = nil
+ return tail
+ end
+ end
+end
+
+local actions = {
+ remove = function(specification)
+ local n = pickup()
+ if n then
+ free_node(n)
+ end
+ end,
+ push = function(specification)
+ local n = pickup()
+ if n then
+ insert(stack,n or false)
+ end
+ end,
+ pop = function(specification)
+ local n = remove(stack)
+ if n then
+ context(n)
+ end
+ end,
+}
+
+local function pickuppunctuation(specification)
+ local action = actions[specification.action or "remove"]
+ if action then
+ action(specification)
+ end
+end
+
+-- I played with nested marked content but it makes no sense and gives
+-- complex code. Also, it's never needed so why bother.
+
+local function pickup(head,tail,str)
+ local attr = marked[str]
+ local last = tail
+ if last[a_marked] == attr then
+ local first = last
+ while true do
+ local prev = first.prev
+ if prev and prev[a_marked] == attr then
+ if prev.id == whatsit_code and prev.subtype == localpar_code then
+ break
+ else
+ first = prev
+ end
+ else
+ break
+ end
+ end
+ return first, last
+ end
+end
+
+local actions = {
+ remove = function(specification)
+ local list = texnest[texnest.ptr]
+ if list then
+ local head = list.head
+ local tail = list.tail
+ local first, last = pickup(head,tail,specification.mark)
+ if first then
+ if first == head then
+ list.head = nil
+ list.tail = nil
+ else
+ local prev = first.prev
+ list.tail = prev
+ prev.next = nil
+ end
+ flush_list(first)
+ end
+ end
+ end,
+}
+
+local function pickupmarkedcontent(specification)
+ local action = actions[specification.action or "remove"]
+ if action then
+ action(specification)
+ end
+end
+
+local function markcontent(str)
+ local currentmarked = marked[str or v_all]
+ if not currentmarked then
+ lastmarked = lastmarked + 1
+ currentmarked = lastmarked
+ marked[str] = currentmarked
+ end
+ settexattribute(a_marked,currentmarked)
+end
+
+interfaces.implement {
+ name = "pickuppunctuation",
+ actions = pickuppunctuation,
+ arguments = {
+ {
+ { "action" }
+ }
+ }
+}
+
+interfaces.implement {
+ name = "pickupmarkedcontent",
+ actions = pickupmarkedcontent,
+ arguments = {
+ {
+ { "action" },
+ { "mark" }
+ }
+ }
+}
+
+interfaces.implement {
+ name = "markcontent",
+ actions = markcontent,
+ arguments = "string",
+}
diff --git a/tex/context/base/typo-chr.mkiv b/tex/context/base/typo-chr.mkiv
new file mode 100644
index 000000000..dc0c68664
--- /dev/null
+++ b/tex/context/base/typo-chr.mkiv
@@ -0,0 +1,82 @@
+
+%D \module
+%D [ file=typo-chr,
+%D version=2015.01.01, % or about that time
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Cleaning Up Mess,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Characters}
+
+\unprotect
+
+%D This is a very experimental feature, mostly for Alan and me to play with in
+%D publication hell.
+%D
+%D \startbuffer
+%D before, after\par
+%D before,\removepunctuation after\par
+%D before\markcontent[gone]{\darkred gone}\removemarkedcontent[gone]after\par
+%D before\markcontent[kept]{\darkgreen kept}\removemarkedcontent[gone]after\par
+%D \markcontent[gone]{\darkred gone}\removemarkedcontent[gone]after\par
+%D \markcontent[kept]{\darkgreen kept}\removemarkedcontent[gone]after\par
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer\blank
+%D
+%D This feature is paragraph based and is only to be used for small snippets of text,
+%D for instance when combining bit and pieces where keeping a state is complex compared
+%D to cleaning up unwanted stuff.
+
+\registerctxluafile{typo-chr}{1.001}
+
+\definesystemattribute[marked][public]
+
+\unexpanded\def\removepunctuation
+ {\clf_pickuppunctuation action{remove}\relax} % the first experiment
+
+\unexpanded\def\pushpunctuation
+ {\clf_pickuppunctuation action{push}\relax} % the first experiment
+
+\unexpanded\def\poppunctuation
+ {\clf_pickuppunctuation action{pop}\relax} % the first experiment
+
+\unexpanded\def\markcontent
+ {\dosingleempty\typo_marked_mark}
+
+\def\typo_marked_mark[#1]#2%
+ {\dontleavehmode
+ \bgroup
+ \clf_markcontent{#1}%
+ \bgroup
+ #2% double grouping makes aftergroups work ok
+ \egroup
+ \egroup}
+
+\unexpanded\def\startmarkedcontent
+ {\dontleavehmode
+ \bgroup
+ \dosingleempty\typo_marked_start}
+
+\def\typo_marked_start[#1]%
+ {\clf_markcontent{#1}%
+ % double grouping makes aftergroups work ok
+ \bgroup}
+
+\unexpanded\def\stopmarkedcontent
+ {\egroup
+ \egroup}
+
+\unexpanded\def\removemarkedcontent
+ {\dosingleempty\typo_marked_remove}
+
+\def\typo_marked_remove[#1]%
+ {\clf_pickupmarkedcontent action{remove}mark{#1}\relax}
+
+\protect \endinput
diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua
index 2aa05b6d1..7228e02c5 100644
--- a/tex/context/base/typo-cln.lua
+++ b/tex/context/base/typo-cln.lua
@@ -28,7 +28,15 @@ local tasks = nodes.tasks
local texsetattribute = tex.setattribute
-local traverse_id = node.traverse_id
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local setfield = nuts.setfield
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local traverse_id = nuts.traverse_id
local unsetvalue = attributes.unsetvalue
@@ -48,18 +56,18 @@ local resetter = { -- this will become an entry in char-def
function cleaners.handler(head)
local inline, done = false, false
- for n in traverse_id(glyph_code,head) do
- local char = n.char
+ for n in traverse_id(glyph_code,tonut(head)) do
+ local char = getchar(n)
if resetter[char] then
inline = false
elseif not inline then
- local a = n[a_cleaner]
+ local a = getattr(n,a_cleaner)
if a == 1 then -- currently only one cleaner so no need to be fancy
local upper = uccodes[char]
if type(upper) == "table" then
-- some day, not much change that \SS ends up here
else
- n.char = upper
+ setfield(n,"char",upper)
done = true
if trace_autocase then
report_autocase("")
@@ -93,4 +101,8 @@ end
-- interface
-commands.setcharactercleaning = cleaners.set
+interfaces.implement {
+ name = "setcharactercleaning",
+ actions = cleaners.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-cln.mkiv b/tex/context/base/typo-cln.mkiv
index 5306f614f..193198dc5 100644
--- a/tex/context/base/typo-cln.mkiv
+++ b/tex/context/base/typo-cln.mkiv
@@ -24,7 +24,7 @@
%D 1: Autocap first character of a line
\unexpanded\def\setcharactercleaning[#1]% This name might change!
- {\ctxcommand{setcharactercleaning("#1")}} % also accepts "reset"
+ {\clf_setcharactercleaning{#1}} % also accepts "reset"
% \appendtoks
% \attribute\cleanerattribute\attributeunsetvalue
diff --git a/tex/context/base/typo-del.mkiv b/tex/context/base/typo-del.mkiv
index 603471f75..4c3c5ab3d 100644
--- a/tex/context/base/typo-del.mkiv
+++ b/tex/context/base/typo-del.mkiv
@@ -70,23 +70,23 @@
{\ifcase\boundarycharactermode
\or
%\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
- \languageparameter#1%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
+ \usedlanguageparameter#1%
%\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
\unexpanded\def\leftboundarycharacter#1#2%
{\ifcase\boundarycharactermode
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
@@ -94,10 +94,10 @@
{\ifcase\boundarycharactermode
\or
\prewordbreak %\nobreak
- \hskip\hspaceamount\currentlanguage{#2}%
- \languageparameter#1%
+ \hskip\hspaceamount\currentusedlanguage{#2}%
+ \usedlanguageparameter#1%
\or
- \languageparameter#1%
+ \usedlanguageparameter#1%
\fi
\boundarycharactermode\plusone}
@@ -142,13 +142,13 @@
{\beforesubsentence
\ifdim\lastkern=\d_typo_subsentence_signal
\unskip
- \kern\hspaceamount\currentlanguage{intersentence}%
+ \kern\hspaceamount\currentusedlanguage{intersentence}%
\fi
\global\advance\c_typo_subsentence_nesting\plusone
\ifnum\c_typo_subsentence_nesting=\plusone
\dontleavehmode % was \leaveoutervmode
\fi
- \dostarttagged\t!subsentence\empty
+ \dostarttagged\t!subsentence\empty % no chain
\symbol[\ifodd\c_typo_subsentence_nesting\c!leftsentence\else\c!leftsubsentence\fi]%
}% \ignorespaces}
@@ -166,7 +166,7 @@
\unexpanded\def\endofsubsentencespacing
{\ifdim\lastkern=\d_typo_subsentence_signal
\unskip
- \hskip\hspaceamount\currentlanguage{intersentence}%
+ \hskip\hspaceamount\currentusedlanguage{intersentence}%
% no good, actually language dependent:
% \ignorespaces
\else
@@ -193,8 +193,8 @@
%definehspace [quote] [\zeropoint]
%definehspace [speech] [\zeropoint]
-\definehspace [quote] [\hspaceamount\currentlanguage{quotation}]
-\definehspace [speech] [\hspaceamount\currentlanguage{quotation}]
+\definehspace [quote] [\hspaceamount\currentusedlanguage{quotation}]
+\definehspace [speech] [\hspaceamount\currentusedlanguage{quotation}]
\definesymbol
[\c!leftquotation]
@@ -204,6 +204,14 @@
[\c!rightquotation]
[\rightboundarycharacter\c!rightquotation{quotation}]
+\definesymbol
+ [\c!nextleftquotation]
+ [\rightboundarycharacter\c!leftquotation{quotation}]
+
+\definesymbol
+ [\c!nextrightquotation]
+ [\leftboundarycharacter\c!rightquotation{quotation}]
+
\definesymbol
[\c!leftquote]
[\leftboundarycharacter\c!leftquote{quote}]
@@ -248,6 +256,7 @@
\def\typo_delimited_push#1%
{\globalpushmacro\currentdelimitedtext
\def\currentdelimitedtext{#1}%
+ \setlanguageparameter\delimitedtextparameter
\let\currentparentdelimitedtext\currentdelimitedtext
\global\advance\c_typo_delimited_nesting\plusone
\edef\delimitedtextlevel{\number\c_typo_delimited_nesting}%
@@ -297,9 +306,9 @@
\unexpanded\def\startdelimitedtext[#1]%
{\begingroup
\typo_delimited_push{#1}%
- \dostarttagged\t!delimitedblock\currentdelimitedtext
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \dostarttaggedchained\t!delimitedblock\currentdelimitedtext\??delimitedtext
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_start_font
\else
\expandafter\typo_delimited_start_other
@@ -313,16 +322,16 @@
\ignorespaces}
\def\typo_delimited_start_other
- {\edef\p_repeat{\delimitedtextparameter\c!repeat}%
- \ifx\p_repeat\v!yes
+ {\edef\p_delimited_repeat{\delimitedtextparameter\c!repeat}%
+ \ifx\p_delimited_repeat\v!yes
\let\typo_delimited_repeat\typo_delimited_repeat_ideed
\else
\let\typo_delimited_repeat\relax
\fi
- \edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ \edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_start_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_start_par
\else
\doubleexpandafter\typo_delimited_start_txt
@@ -338,8 +347,10 @@
\def\typo_delimited_start_par_indeed[#1]%
{\let\typo_delimited_stop\typo_delimited_stop_par
- \doifsomething{\delimitedtextparameter\c!spacebefore}
- {\blank[\delimitedtextparameter\c!spacebefore]}%
+ \edef\p_delimited_spacebefore{\delimitedtextparameter\c!spacebefore}%
+ \ifx\p_delimited_spacebefore\empty \else
+ \blank[\p_delimited_spacebefore]%
+ \fi
\delimitedtextparameter\c!before
\edef\m_typo_delimited_narrower{#1}%
\ifx\m_typo_delimited_narrower\empty
@@ -353,11 +364,21 @@
\fi
% so far
\pushmacro\checkindentation
- \doifsomething{\delimitedtextparameter\c!indenting} % WS
- {\setupindenting[\delimitedtextparameter\c!indenting]}%
+ \useindentingparameter\delimitedtextparameter
+ %
\begingroup
\usedelimitedtextstyleandcolor\c!style\c!color
+ %
+ \edef\p_delimited_left {\delimitedtextparameter{\c!left}}%
+ \edef\p_delimited_right {\delimitedtextparameter{\c!right}}%
+ \edef\p_delimited_nextleft {\delimitedtextparameter{\c!nextleft}}%
+ \edef\p_delimited_nextright{\delimitedtextparameter{\c!nextright}}%
+ %
\leftdelimitedtextmark
+ %
+ \setnextleftdelimitedtextmark
+ \setnextrightdelimitedtextmark
+ %
\ignorespaces}
\def\typo_delimited_stop_par
@@ -368,10 +389,12 @@
\popmacro\checkindentation
\typo_delimited_stop_par_indeed
\delimitedtextparameter\c!after
- \doifsomething{\delimitedtextparameter\c!spaceafter}
- {\blank[\delimitedtextparameter\c!spaceafter]}%
+ \edef\p_delimited_spaceafter{\delimitedtextparameter\c!spaceafter}%
+ \ifx\p_delimited_spaceafter\empty \else
+ \blank[\p_delimited_spaceafter]%
+ \fi
\useindentnextparameter\delimitedtextparameter
- \dorechecknextindentation}% AM: This was missing!
+ \aftergroup\dorechecknextindentation}% AM: This was missing!
\def\typo_delimited_start_txt
{\let\typo_delimited_stop\typo_delimited_stop_txt
@@ -394,18 +417,18 @@
\unexpanded\def\delimitedtext[#1]%
{\dontleavehmode % following ones can be omited
\typo_delimited_push{#1}%
- \edef\p_method{\delimitedtextparameter\c!method}%
- \ifx\p_method\s!font
+ \edef\p_delimited_method{\delimitedtextparameter\c!method}%
+ \ifx\p_delimited_method\s!font
\expandafter\typo_delimited_fontdriven
\else
\expandafter\typo_delimited_other
\fi}
\def\typo_delimited_other
- {\edef\p_location{\delimitedtextparameter\c!location}%
- \ifx\p_location\v!paragraph
+ {\edef\p_delimited_location{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_location\v!paragraph
\singleexpandafter\typo_delimited_par
- \else\ifx\p_location\v!margin
+ \else\ifx\p_delimited_location\v!margin
\doubleexpandafter\typo_delimited_par
\else
\doubleexpandafter\typo_delimited_txt
@@ -417,16 +440,89 @@
\unexpanded\def\stopdelimited {\stopdelimitedtext} % no let, dynamically assigned
\def\delimited {\delimitedtext}
+% todo: \dostarttagged\t!nothing\empty % for left/right boxes
+
+%D We have 4 different location and symbol handlers (two pairs):
+%D
+%D \starttyping
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft=right,
+%D nextright=left]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D
+%D \setupdelimitedtext
+%D [quotation]
+%D [nextleft={\symbol[nextleftquotation]},
+%D nextright={\symbol[nextrightquotation]}]
+%D
+%D \input tufte \startquotation \input tufte \stopquotation
+%D \stoptyping
+
+\unexpanded\def\setnextleftdelimitedtextmark
+ {\ifx\p_delimited_nextleft\empty
+ % nothing
+ \else\ifx\p_delimited_nextleft\v!left
+ \typo_delimited_nextleft_symbol\p_delimited_left
+ \else\ifx\p_delimited_nextleft\v!right
+ \typo_delimited_nextleft_symbol\p_delimited_right
+ \else
+ \typo_delimited_nextleft_symbol\p_delimited_nextleft
+ \fi\fi\fi}
+
+\unexpanded\def\setnextrightdelimitedtextmark
+ {\ifx\p_delimited_nextright\empty
+ % nothing
+ \else\ifx\p_delimited_nextright\v!right
+ \typo_delimited_nextright_symbol\p_delimited_right
+ \else\ifx\p_delimited_nextright\v!left
+ \typo_delimited_nextright_symbol\p_delimited_left
+ \else
+ \typo_delimited_nextright_symbol\p_delimited_nextright
+ \fi\fi\fi}
+
\unexpanded\def\leftdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!left}
- {\setbox\scratchbox\hbox{\delimitedtextparameter\c!left}%
- \dontleavehmode
- \doif{\delimitedtextparameter\c!location}\v!margin{\hskip-\wd\scratchbox}%
- \box\scratchbox}}
+ {\ifx\p_delimited_left\empty
+ % nothing
+ \else
+ \typo_delimited_left_symbol\p_delimited_left
+ \fi}
\unexpanded\def\rightdelimitedtextmark
- {\doifsomething{\delimitedtextparameter\c!right}
- {\hsmash{\delimitedtextparameter\c!right}}}
+ {\ifx\p_delimited_right\empty
+ % nothing
+ \else
+ \typo_delimited_right_symbol\p_delimited_right
+ \fi}
+
+\def\typo_delimited_left_symbol#1%
+ {\setbox\scratchbox\hbox{\usedelimitedtextstyleandcolor\c!symstyle\c!symcolor#1}%
+ \dontleavehmode
+ \edef\p_delimited_margin{\delimitedtextparameter\c!location}%
+ \ifx\p_delimited_margin\v!margin
+ \hskip-\wd\scratchbox
+ \fi
+ \box\scratchbox}
+
+\def\typo_delimited_right_symbol#1%
+ {\hsmash{\usedelimitedtextstyleandcolor\c!symstyle\c!symcolor#1}}
+
+\def\typo_delimited_nextleft_symbol#1%
+ {\localleftbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_left_symbol#1%
+ \egroup}
+
+\def\typo_delimited_nextright_symbol#1%
+ {\localrightbox\bgroup
+ \swapmacros\leftboundarycharacter\rightboundarycharacter
+ \boundarycharactermode\plusone
+ \typo_delimited_right_symbol#1%
+ \egroup}
% \starttext
% \hyphenatedword{groepsvrijstellingsverordeningen}\par
@@ -443,11 +539,12 @@
\def\typo_delimited_handle_middle#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi
\ifhmode % else funny pagebeaks
\penalty\plustenthousand
@@ -462,14 +559,15 @@
\def\typo_delimited_handle_left#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\else\ifdim\lastskip=\d_typo_delimited_signal
\unskip
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi\fi
\strut % new, needed below
\ifhmode % else funny pagebeaks
@@ -485,16 +583,17 @@
\def\typo_delimited_handle_right#1%
{\begingroup
+ \usedelimitedtextstyleandcolor\c!symstyle\c!symcolor
\setbox\scratchbox\hbox{\delimitedtextparameter#1}%
\ifdim\wd\scratchbox>\zeropoint
\ifdim\lastkern=\d_typo_delimited_signal
\unkern
\penalty\plustenthousand
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\else\ifdim\lastskip=\d_typo_delimited_signal
\unskip
\penalty\plustenthousand
- \hskip\hspaceamount\currentlanguage{interquotation}%
+ \hskip\hspaceamount\currentusedlanguage{interquotation}%
\fi\fi
\ifhmode % else funny pagebeaks
\penalty\plustenthousand
@@ -508,21 +607,57 @@
\unexpanded\def\typo_delimited_par
{\groupedcommand
- {\dostarttagged\t!delimited\currentdelimitedtext % block?
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext % block?
\typo_delimited_handle_left\c!left}
{\typo_delimited_handle_right\c!right
\removelastskip
\dostoptagged
\typo_delimited_pop}}
+% \unexpanded\def\typo_delimited_txt
+% {\doifelse{\delimitedtextparameter\c!style}\v!normal
+% \typo_delimited_quoted
+% \typo_delimited_attributed}
+%
+% \def\typo_delimited_quoted_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \typo_delimited_handle_left\c!left}
+%
+% \def\typo_delimited_quoted_e
+% {\typo_delimited_handle_right\c!right
+% \removelastskip
+% \dostoptagged
+% \typo_delimited_pop}
+%
+% \def\typo_delimited_attributed_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \usedelimitedtextstyleandcolor\c!style\c!color}
+%
+% \def\typo_delimited_attributed_e
+% {\dostoptagged
+% \typo_delimited_pop}
+%
+% \def\typo_delimited_fontdriven_b
+% {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+% \languageparameter{\c!left\currentparentdelimitedtext}}% was: \currentdelimitedtext
+%
+% \def\typo_delimited_fontdriven_e
+% {\languageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
+% \dostoptagged
+% \typo_delimited_pop}
+
\unexpanded\def\typo_delimited_txt
- {\doifelse{\delimitedtextparameter\c!style}\v!normal
- \typo_delimited_quoted
- \typo_delimited_attributed}
+ {\edef\p_left_right{\delimitedtextparameter\c!left\delimitedtextparameter\c!right}%
+ \ifx\p_left_right\empty
+ \expandafter\typo_delimited_attributed
+ \else
+ \expandafter\typo_delimited_quoted
+ \fi}
\def\typo_delimited_quoted_b
- {\dostarttagged\t!delimited\currentdelimitedtext
- \typo_delimited_handle_left\c!left}
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+ \typo_delimited_handle_left\c!left
+ \usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_quoted_e
{\typo_delimited_handle_right\c!right
@@ -531,7 +666,7 @@
\typo_delimited_pop}
\def\typo_delimited_attributed_b
- {\dostarttagged\t!delimited\currentdelimitedtext
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
\usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_attributed_e
@@ -539,11 +674,12 @@
\typo_delimited_pop}
\def\typo_delimited_fontdriven_b
- {\dostarttagged\t!delimited\currentdelimitedtext
- \languageparameter{\c!left\currentparentdelimitedtext}}% was: \currentdelimitedtext
+ {\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
+ \usedlanguageparameter{\c!left\currentparentdelimitedtext}% was: \currentdelimitedtext
+ \usedelimitedtextstyleandcolor\c!style\c!color}
\def\typo_delimited_fontdriven_e
- {\languageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
+ {\usedlanguageparameter{\c!right\currentparentdelimitedtext}% was: \currentdelimitedtext
\dostoptagged
\typo_delimited_pop}
diff --git a/tex/context/base/typo-dha.lua b/tex/context/base/typo-dha.lua
index d5ad66e7e..904b774ec 100644
--- a/tex/context/base/typo-dha.lua
+++ b/tex/context/base/typo-dha.lua
@@ -49,13 +49,30 @@ local trace_directions = false trackers.register("typesetters.directions.defa
local report_directions = logs.reporter("typesetting","text directions")
-
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
-local remove_node = nodes.remove
-local end_of_math = nodes.end_of_math
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
local nodecodes = nodes.nodecodes
local whatcodes = nodes.whatcodes
@@ -108,7 +125,7 @@ end
local function process(start)
- local head = start
+ local head = tonut(start) -- we have a global head
local current = head
local inserted = nil
@@ -167,9 +184,9 @@ local function process(start)
finidir = finish
end
if embedded <= 0 then
- finish, autodir, done = "TRT", -1
+ finish, autodir = "TRT", -1
else
- finish, autodir, done = "TLT", 1
+ finish, autodir = "TLT", 1
end
done = true
if finidir == finish then
@@ -180,31 +197,31 @@ local function process(start)
end
local function nextisright(current)
- current = current.next
- local id = current.id
+ current = getnext(current)
+ local id = getid(current)
if id == glyph_code then
- local character = current.char
+ local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
local function previsright(current)
- current = current.prev
- local id = current.id
+ current = getprev(current)
+ local id = getid(current)
if id == glyph_code then
- local char = current.char
+ local character = getchar(current)
local direction = chardirections[character]
return direction == "r" or direction == "al" or direction == "an"
end
end
while current do
- local id = current.id
+ local id = getid(current)
if id == math_code then
- current = end_of_math(current.next).next
+ current = getnext(end_of_math(getnext(current)))
else
- local attr = current[a_directions]
+ local attr = getattr(current,a_directions)
if attr and attr > 0 and attr ~= prevattr then
if not getglobal(a) then
lro, rlo = false, false
@@ -213,7 +230,7 @@ local function process(start)
end
if id == glyph_code then
if attr and attr > 0 then
- local character = current.char
+ local character = getchar(current)
local direction = chardirections[character]
local reversed = false
if rlo or override > 0 then
@@ -223,24 +240,24 @@ local function process(start)
end
elseif lro or override < 0 then
if direction == "r" or direction == "al" then
- current[a_state] = s_isol
+ setprop(current,a_state,s_isol)
direction = "l"
reversed = true
end
end
if direction == "on" then
local mirror = charmirrors[character]
- if mirror and fontchar[current.font][mirror] then
+ if mirror and fontchar[getfont(current)][mirror] then
local class = charclasses[character]
if class == "open" then
if nextisright(current) then
if autodir >= 0 then
force_auto_right_before(direction)
end
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
elseif autodir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
else
mirror = false
@@ -251,14 +268,14 @@ local function process(start)
local fencedir = fences[#fences]
fences[#fences] = nil
if fencedir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
force_auto_right_before(direction)
else
mirror = false
end
elseif autodir < 0 then
- current.char = mirror
+ setfield(current,"char",mirror)
done = true
else
mirror = false
@@ -329,16 +346,16 @@ local function process(start)
top = top - 1
end
obsolete[#obsolete+1] = current
- else
+ elseif trace_directions then
setcolor(current)
end
else
-- we do nothing
end
elseif id == whatsit_code then
- local subtype = current.subtype
+ local subtype = getsubtype(current)
if subtype == localpar_code then
- local dir = current.dir
+ local dir = getfield(current,"dir")
if dir == 'TRT' then
autodir = -1
elseif dir == 'TLT' then
@@ -351,7 +368,7 @@ local function process(start)
if finish then
finish_auto_before()
end
- local dir = current.dir
+ local dir = getfield(current,"dir")
if dir == "+TRT" then
finish, autodir = "TRT", -1
elseif dir == "-TRT" then
@@ -370,7 +387,7 @@ local function process(start)
elseif finish then
finish_auto_before()
end
- local cn = current.next
+ local cn = getnext(current)
if cn then
-- we're okay
elseif finish then
@@ -390,7 +407,7 @@ local function process(start)
end
end
- return head, done
+ return tonode(head), done
end
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index ef05e62da..c753a0352 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -19,10 +19,24 @@ local report_digits = logs.reporter("typesetting","digits")
local nodes, node = nodes, node
-local hpack_node = node.hpack
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local hpack_node = nuts.hpack
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
@@ -30,7 +44,7 @@ local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local tasks = nodes.tasks
local new_glue = nodepool.glue
@@ -66,16 +80,20 @@ function nodes.aligned(head,start,stop,width,how)
if how == "flushleft" or how == "middle" then
head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
end
- local prv, nxt = start.prev, stop.next
- start.prev, stop.next = nil, nil
+ local prv = getprev(start)
+ local nxt = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
if prv then
- prv.next, packed.prev = packed, prv
+ setfield(prv,"next",packed)
+ setfield(packed,"prev",prv)
end
if nxt then
- nxt.prev, packed.next = packed, nxt
+ setfield(nxt,"prev",packed)
+ setfield(packed,"next",nxt)
end
- if packed.prev then
+ if getprev(packed) then
return head, packed
else
return packed, packed
@@ -83,16 +101,16 @@ function nodes.aligned(head,start,stop,width,how)
end
actions[1] = function(head,start,attr)
- local font = start.font
- local char = start.char
- local unic = chardata[font][char].tounicode
- local what = unic and tonumber(unic,16) or char
- if charbase[what].category == "nd" then
- local oldwidth, newwidth = start.width, getdigitwidth(font)
+ local font = getfont(start)
+ local char = getchar(start)
+ local unic = chardata[font][char].unicode or char
+ if charbase[unic].category == "nd" then -- ignore unic tables
+ local oldwidth = getfield(start,"width")
+ local newwidth = getdigitwidth(font)
if newwidth ~= oldwidth then
if trace_digits then
report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
- attr%100,div(attr,100),char,what,newwidth-oldwidth)
+ attr%100,div(attr,100),char,unic,newwidth-oldwidth)
end
head, start = nodes.aligned(head,start,start,newwidth,"middle")
return head, start, true
@@ -102,12 +120,13 @@ actions[1] = function(head,start,attr)
end
function digits.handler(head)
+ head = tonut(head)
local done, current, ok = false, head, false
while current do
- if current.id == glyph_code then
- local attr = current[a_digits]
+ if getid(current) == glyph_code then
+ local attr = getattr(current,a_digits)
if attr and attr > 0 then
- current[a_digits] = unsetvalue
+ setattr(current,a_digits,unsetvalue)
local action = actions[attr%100] -- map back to low number
if action then
head, current, ok = action(head,current,attr)
@@ -117,9 +136,11 @@ function digits.handler(head)
end
end
end
- current = current and current.next
+ if current then
+ current = getnext(current)
+ end
end
- return head, done
+ return tonode(head), done
end
local m, enabled = 0, false -- a trick to make neighbouring ranges work
@@ -152,4 +173,8 @@ end
-- interface
-commands.setdigitsmanipulation = digits.set
+interfaces.implement {
+ name = "setdigitsmanipulation",
+ actions = digits.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-dig.mkiv b/tex/context/base/typo-dig.mkiv
index 71425c594..aa610b3c9 100644
--- a/tex/context/base/typo-dig.mkiv
+++ b/tex/context/base/typo-dig.mkiv
@@ -39,7 +39,7 @@
%D \stoplines
\unexpanded\def\setdigitsmanipulation[#1]%
- {\ctxcommand{setdigitsmanipulation("#1")}}
+ {\clf_setdigitsmanipulation{#1}}
\unexpanded\def\resetdigitsmanipulation
{\attribute\digitsattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua
index a04028452..482b7114d 100644
--- a/tex/context/base/typo-dir.lua
+++ b/tex/context/base/typo-dir.lua
@@ -33,85 +33,43 @@ local formatters = string.formatters
local nodes, node = nodes, node
-local trace_textdirections = false trackers.register("typesetters.directions.text", function(v) trace_textdirections = v end)
-local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end)
-local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end)
+local trace_textdirections = false trackers.register("typesetters.directions.text", function(v) trace_textdirections = v end)
+local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end)
+local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end)
local report_textdirections = logs.reporter("typesetting","text directions")
local report_mathdirections = logs.reporter("typesetting","math directions")
+local hasbit = number.hasbit
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+local tasks = nodes.tasks
+local tracers = nodes.tracers
+local setcolor = tracers.colors.set
+local resetcolor = tracers.colors.reset
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local end_of_math = nodes.end_of_math
+local implement = interfaces.implement
-local texsetattribute = tex.setattribute
-local texsetcount = tex.setcount
-local unsetvalue = attributes.unsetvalue
+local directions = typesetters.directions or { }
+typesetters.directions = directions
-local hasbit = number.hasbit
+local a_directions = attributes.private('directions')
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local mathcodes = nodes.mathcodes
+local variables = interfaces.variables
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_on = variables.on
+local v_yes = variables.yes
-local tasks = nodes.tasks
-local tracers = nodes.tracers
-local setcolor = tracers.colors.set
-local resetcolor = tracers.colors.reset
+local m_enabled = 2^6 -- 64
+local m_global = 2^7
+local m_fences = 2^8
-local glyph_code = nodecodes.glyph
-local whatsit_code = nodecodes.whatsit
-local math_code = nodecodes.math
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-
-local new_textdir = nodepool.textdir
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local fontchar = fonthashes.characters
-
-local chardirections = characters.directions
-local charmirrors = characters.mirrors
-local charclasses = characters.textclasses
-
-local directions = typesetters.directions or { }
-typesetters.directions = directions
-
-local a_state = attributes.private('state')
-local a_directions = attributes.private('directions')
-local a_mathbidi = attributes.private('mathbidi')
-
-local strip = false
-
-local s_isol = fonts.analyzers.states.isol
-
-local variables = interfaces.variables
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_on = variables.on
-local v_yes = variables.yes
-
-local m_enabled = 2^6 -- 64
-local m_global = 2^7
-local m_fences = 2^8
-
-local handlers = { }
-local methods = { }
-local lastmethod = 0
+local handlers = { }
+local methods = { }
+local lastmethod = 0
local function installhandler(name,handler)
local method = methods[name]
@@ -181,9 +139,17 @@ function directions.setcolor(current,direction,reversed,mirror)
end
end
-function commands.getbidimode(specification)
- context(tomode(specification)) -- hash at tex end
-end
+implement {
+ name = "getbidimode",
+ actions = { tomode, context },
+ arguments = {
+ {
+ { "scope" },
+ { "method" },
+ { "fences" },
+ }
+ }
+}
local enabled = false
@@ -234,4 +200,8 @@ function directions.set(n) -- todo: names and numbers
texsetattribute(a_directions,n)
end
-commands.setdirection = directions.set
+implement {
+ name = "setdirection",
+ arguments = "integer",
+ actions = directions.set
+}
diff --git a/tex/context/base/typo-dir.mkiv b/tex/context/base/typo-dir.mkiv
index 0362af56c..f9b4ecb97 100644
--- a/tex/context/base/typo-dir.mkiv
+++ b/tex/context/base/typo-dir.mkiv
@@ -13,6 +13,9 @@
\writestatus{loading}{ConTeXt Typesetting Macros / Directions}
+%D At some point we might default to method 'two' but first I need to make it more
+%D efficient (and provide some options). I also want to have some basic tracing.
+
\unprotect
\registerctxluafile{typo-dir}{1.001}
@@ -33,10 +36,14 @@
\edef\righttoleftmark{\normalUchar"200F} \let\rlm\righttoleftmark
\unexpanded\def\setdirection[#1]% todo: symbolic names
- {\ctxcommand{setdirection(\number#1)}}
+ {\clf_setdirection#1\relax}
+
+% \unexpanded\def\resetdirection
+% {\clf_setdirection\zerocount}
+%
+% is in fact:
\unexpanded\def\resetdirection
- %{\ctxcommand{setdirection(0)}}
{\attribute\directionsattribute\attributeunsetvalue}
\newconstant\directionsbidimode % this one might become pivate
@@ -46,12 +53,14 @@
% \setupdirections[bidi=global,method=two]
% \setupdirections[bidi=global,method=two,fences=no]
+% maybee use chardefs
+
\def\typo_dir_get_mode
- {\def\currentbidimode{\ctxcommand{getbidimode {
- scope = "\directionsparameter\c!bidi ",
- method = "\directionsparameter\c!method",
- fences = "\directionsparameter\c!fences",
- }}}%
+ {\def\currentbidimode{\clf_getbidimode
+ scope {\directionsparameter\c!bidi}%
+ method {\directionsparameter\c!method}%
+ fences {\directionsparameter\c!fences}%
+ }%
\expandafter\glet\csname\??directionsbidimode\currentbidistamp\endcsname\currentbidimode}
\appendtoks
diff --git a/tex/context/base/typo-drp.lua b/tex/context/base/typo-drp.lua
index 903140dae..4bbf0b8e9 100644
--- a/tex/context/base/typo-drp.lua
+++ b/tex/context/base/typo-drp.lua
@@ -11,9 +11,7 @@ if not modules then modules = { } end modules ['typo-drp'] = {
local tonumber, type, next = tonumber, type, next
local ceil = math.ceil
-
-local utfbyte = utf.byte
-local utfchar = utf.char
+local settings_to_hash = utilities.parsers.settings_to_hash
local trace_initials = false trackers.register("typesetters.initials", function(v) trace_initials = v end)
local report_initials = logs.reporter("nodes","initials")
@@ -24,19 +22,41 @@ typesetters.initials = initials or { }
local nodes = nodes
local tasks = nodes.tasks
-local hpack_nodes = nodes.hpack
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local hpack_nodes = nuts.hpack
+
local nodecodes = nodes.nodecodes
local whatsitcodes = nodes.whatsitcodes
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_kern = nodepool.kern
-local insert_before = nodes.insert_before
-local insert_after = nodes.insert_after
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+local remove_node = nuts.remove
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
+local free_node = nuts.free
local variables = interfaces.variables
local v_default = variables.default
local v_margin = variables.margin
+local v_auto = variables.auto
+local v_first = variables.first
+local v_last = variables.last
local texget = tex.get
local texsetattribute = tex.setattribute
@@ -44,7 +64,8 @@ local unsetvalue = attributes.unsetvalue
local glyph_code = nodecodes.glyph
local hlist_code = nodecodes.hlist
-local kern_node = nodecodes.kern
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
local whatsit_code = nodecodes.whatsit
local localpar_code = whatsitcodes.localpar
@@ -56,6 +77,8 @@ local a_color = attributes.private('color')
local a_transparency = attributes.private('transparency')
local a_colorspace = attributes.private('colormodel')
+local category = characters.category
+
local settings = nil
function initials.set(specification)
@@ -68,7 +91,27 @@ function initials.set(specification)
texsetattribute(a_initial,1)
end
-commands.setinitial = initials.set
+interfaces.implement {
+ name = "setinitial",
+ actions = initials.set,
+ arguments = {
+ {
+ { "location" },
+ { "enabled", "boolean" },
+ { "method" },
+ { "distance" ,"dimen" },
+ { "hoffset" ,"dimen" },
+ { "voffset" ,"dimen" },
+ { "font", "integer" },
+ { "dynamic", "integer" },
+ { "ca", "integer" },
+ { "ma", "integer" },
+ { "ta", "integer" },
+ { "n", "integer" },
+ { "m", "integer" },
+ }
+ }
+}
-- dropped caps experiment (will be done properly when luatex
-- stores the state in the local par node) .. btw, search still
@@ -84,74 +127,297 @@ commands.setinitial = initials.set
-- todo: prevent linebreak .. but normally a initial ends up at the top of
-- a page so this has a low priority
+-- actions[v_default] = function(head,setting)
+-- local done = false
+-- if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+-- -- begin of par
+-- local first = getnext(head)
+-- -- parbox .. needs to be set at 0
+-- if first and getid(first) == hlist_code then
+-- first = getnext(first)
+-- end
+-- -- we need to skip over kerns and glues (signals)
+-- while first and getid(first) ~= glyph_code do
+-- first = getnext(first)
+-- end
+-- if first and getid(first) == glyph_code then
+-- local char = getchar(first)
+-- local prev = getprev(first)
+-- local next = getnext(first)
+-- -- if getid(prev) == hlist_code then
+-- -- -- set the width to 0
+-- -- end
+-- if next and getid(next) == kern_code then
+-- setfield(next,"kern",0)
+-- end
+-- if setting.font then
+-- setfield(first,"font",setting.font)
+-- end
+-- if setting.dynamic > 0 then
+-- setattr(first,0,setting.dynamic)
+-- end
+-- -- can be a helper
+-- local ma = setting.ma or 0
+-- local ca = setting.ca
+-- local ta = setting.ta
+-- if ca and ca > 0 then
+-- setattr(first,a_colorspace,ma == 0 and 1 or ma)
+-- setattr(first,a_color,ca)
+-- end
+-- if ta and ta > 0 then
+-- setattr(first,a_transparency,ta)
+-- end
+-- --
+-- local width = getfield(first,"width")
+-- local height = getfield(first,"height")
+-- local depth = getfield(first,"depth")
+-- local distance = setting.distance or 0
+-- local voffset = setting.voffset or 0
+-- local hoffset = setting.hoffset or 0
+-- local parindent = tex.parindent
+-- local baseline = texget("baselineskip").width
+-- local lines = tonumber(setting.n) or 0
+-- --
+-- setfield(first,"xoffset",- width - hoffset - distance - parindent)
+-- setfield(first,"yoffset",- voffset) -- no longer - height here
+-- -- We pack so that successive handling cannot touch the dropped cap. Packaging
+-- -- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
+-- -- yet stored in with localpar).
+-- setfield(first,"prev",nil)
+-- setfield(first,"next",nil)
+-- local h = hpack_nodes(first)
+-- setfield(h,"width",0)
+-- setfield(h,"height",0)
+-- setfield(h,"depth",0)
+-- setfield(prev,"next",h)
+-- setfield(next,"prev",h)
+-- setfield(h,"next",next)
+-- setfield(h,"prev",prev)
+-- first = h
+-- -- end of packaging
+-- if setting.location == v_margin then
+-- -- okay
+-- else
+-- if lines == 0 then -- safeguard, not too precise
+-- lines = ceil((height+voffset) / baseline)
+-- end
+-- -- We cannot set parshape yet ... when we can I'll add a slope
+-- -- option (positive and negative, in emwidth).
+-- local hangafter = - lines
+-- local hangindent = width + distance + parindent
+-- if trace_initials then
+-- report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
+-- end
+-- tex.hangafter = hangafter
+-- tex.hangindent = hangindent
+-- if parindent ~= 0 then
+-- insert_after(first,first,new_kern(-parindent))
+-- end
+-- end
+-- done = true
+-- end
+-- end
+-- return head, done
+-- end
+
actions[v_default] = function(head,setting)
local done = false
- if head.id == whatsit_code and head.subtype == localpar_code then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
-- begin of par
- local first = head.next
+ local first = getnext(head)
+ local indent = false
-- parbox .. needs to be set at 0
- if first and first.id == hlist_code then
- first = first.next
+ if first and getid(first) == hlist_code then
+ first = getnext(first)
+ indent = true
end
-- we need to skip over kerns and glues (signals)
- while first and first.id ~= glyph_code do
- first = first.next
+ while first and getid(first) ~= glyph_code do
+ first = getnext(first)
end
- if first and first.id == glyph_code then
- local char = first.char
- local prev = first.prev
- local next = first.next
- -- if prev.id == hlist_code then
- -- -- set the width to 0
- -- end
- if next and next.id == kern_node then
- next.kern = 0
- end
- if setting.font then
- first.font = setting.font
- end
- if setting.dynamic > 0 then
- first[0] = setting.dynamic
- end
- -- can be a helper
- local ma = setting.ma or 0
- local ca = setting.ca
- local ta = setting.ta
- if ca and ca > 0 then
- first[a_colorspace] = ma == 0 and 1 or ma
- first[a_color] = ca
- end
- if ta and ta > 0 then
- first[a_transparency] = ta
- end
- --
- local width = first.width
- local height = first.height
- local depth = first.depth
+ if first and getid(first) == glyph_code then
+ local ma = setting.ma or 0
+ local ca = setting.ca
+ local ta = setting.ta
+ local last = first
local distance = setting.distance or 0
local voffset = setting.voffset or 0
local hoffset = setting.hoffset or 0
local parindent = tex.parindent
local baseline = texget("baselineskip").width
local lines = tonumber(setting.n) or 0
+ local dynamic = setting.dynamic
+ local font = setting.font
+ local method = settings_to_hash(setting.method)
+ local length = tonumber(setting.m) or 1
--
- first.xoffset = - width - hoffset - distance - parindent
- first.yoffset = - voffset -- no longer - height here
+ -- 1 char | n chars | skip first quote | ignore punct | keep punct
+ --
+ if getattr(first,a_initial) then
+ for current in traverse(getnext(first)) do
+ if getattr(current,a_initial) then
+ last = current
+ else
+ break
+ end
+ end
+ elseif method[v_auto] then
+ local char = getchar(first)
+ local kind = category(char)
+ if kind == "po" or kind == "pi" then
+ if method[v_first] then
+ -- remove quote etc before initial
+ local next = getnext(first)
+ if not next then
+ -- don't start with a quote or so
+ return head, false
+ end
+ last = nil
+ for current in traverse_id(glyph_code,next) do
+ head, first = remove_node(head,first,true)
+ first = current
+ last = first
+ break
+ end
+ if not last then
+ -- no following glyph or so
+ return head, false
+ end
+ else
+ -- keep quote etc with initial
+ local next = getnext(first)
+ if not next then
+ -- don't start with a quote or so
+ return head, false
+ end
+ for current in traverse_id(glyph_code,next) do
+ last = current
+ break
+ end
+ if last == first then
+ return head, false
+ end
+ end
+ elseif kind == "pf" then
+ -- error: final quote
+ else
+ -- okay
+ end
+ -- maybe also: get all A. B. etc
+ local next = getnext(first)
+ if next then
+ for current in traverse_id(glyph_code,next) do
+ local char = getchar(current)
+ local kind = category(char)
+ if kind == "po" then
+ if method[v_last] then
+ -- remove period etc after initial
+ remove_node(head,current,true)
+ else
+ -- keep period etc with initial
+ last = current
+ end
+ end
+ break
+ end
+ end
+ else
+ for current in traverse_id(glyph_code,first) do
+ last = current
+ if length <= 1 then
+ break
+ else
+ length = length - 1
+ end
+ end
+ end
+ local current = first
+ while true do
+ local id = getid(current)
+ if id == kern_code then
+ setfield(current,"kern",0)
+ elseif id == glyph_code then
+ local next = getnext(current)
+ if font then
+ setfield(current,"font",font)
+ end
+ if dynamic > 0 then
+ setattr(current,0,dynamic)
+ end
+-- apply font
+
+-- local g = nodes.copy(tonode(current))
+-- g.subtype = 0
+-- nodes.handlers.characters(g)
+-- nodes.handlers.protectglyphs(g)
+-- setfield(current,"char",g.char)
+-- nodes.free(g)
+
+ -- can be a helper
+ if ca and ca > 0 then
+ setattr(current,a_colorspace,ma == 0 and 1 or ma)
+ setattr(current,a_color,ca)
+ end
+ if ta and ta > 0 then
+ setattr(current,a_transparency,ta)
+ end
+ --
+ end
+ if current == last then
+ break
+ else
+ current = getnext(current)
+ end
+ end
-- We pack so that successive handling cannot touch the dropped cap. Packaging
-- in a hlist is also needed because we cannot locally adapt e.g. parindent (not
-- yet stored in with localpar).
- first.prev = nil
- first.next = nil
- local h = hpack_nodes(first)
- h.width = 0
- h.height = 0
- h.depth = 0
- prev.next = h
- next.prev = h
- h.next = next
- h.prev = prev
-
- -- end of packaging
+ local prev = getprev(first)
+ local next = getnext(last)
+ --
+ setfield(first,"prev",nil)
+ setfield(last,"next",nil)
+ local dropper = hpack_nodes(first)
+ local width = getfield(dropper,"width")
+ local height = getfield(dropper,"height")
+ local depth = getfield(dropper,"depth")
+ setfield(dropper,"width",0)
+ setfield(dropper,"height",0)
+ setfield(dropper,"depth",0)
+ --
+ setfield(prev,"next",dropper)
+ if next then
+ setfield(next,"prev",dropper)
+ end
+ setfield(dropper,"next",next)
+ setfield(dropper,"prev",prev)
+ --
+ if next then
+ local current = next
+ while current do
+ local id = getid(current)
+ if id == glue_code or id == kern_code then
+ local next = getnext(current)
+ -- remove_node(current,current,true) -- created an invalid next link and dangling remains
+ remove_node(head,current,true)
+ current = next
+ else
+ break
+ end
+ end
+ end
+ --
+ local hoffset = width + hoffset + distance + (indent and parindent or 0)
+ for current in traverse_id(glyph_code,first) do
+ setfield(current,"xoffset",- hoffset )
+ setfield(current,"yoffset",- voffset) -- no longer - height here
+ if current == last then
+ break
+ end
+ end
+ --
+ first = dropper
+ --
if setting.location == v_margin then
-- okay
else
@@ -161,15 +427,15 @@ actions[v_default] = function(head,setting)
-- We cannot set parshape yet ... when we can I'll add a slope
-- option (positive and negative, in emwidth).
local hangafter = - lines
- local hangindent = width + distance + parindent
+ local hangindent = width + distance
if trace_initials then
report_initials("setting hangafter to %i and hangindent to %p",hangafter,hangindent)
end
tex.hangafter = hangafter
tex.hangindent = hangindent
- if parindent ~= 0 then
- insert_after(first,first,new_kern(-parindent))
- end
+ end
+ if indent then
+ insert_after(first,first,new_kern(-parindent))
end
done = true
end
@@ -178,16 +444,17 @@ actions[v_default] = function(head,setting)
end
function initials.handler(head)
+ head = tonut(head)
local start = head
local attr = nil
while start do
- attr = start[a_initial]
+ attr = getattr(start,a_initial)
if attr then
break
- elseif start.id == glyph then
+ elseif getid(start) == glyph then
break
else
- start = start.next
+ start = getnext(start)
end
end
if attr then
@@ -201,8 +468,8 @@ function initials.handler(head)
report_initials("processing initials, alternative %a",alternative)
end
local head, done = action(head,settings)
- return head, done
+ return tonode(head), done
end
end
- return head, false
+ return tonode(head), false
end
diff --git a/tex/context/base/typo-drp.mkiv b/tex/context/base/typo-drp.mkiv
index 78f6df0a2..2520c3bfd 100644
--- a/tex/context/base/typo-drp.mkiv
+++ b/tex/context/base/typo-drp.mkiv
@@ -57,6 +57,8 @@
\setupinitial
[\c!location=\v!text,
\c!n=3,
+ \c!m=1,
+ \c!method=\v!none,
% \s!font=Bold sa 4,
% \s!font=Bold ht \measure{initial:n},
\s!font=Bold cp \measure{initial:n},
@@ -67,24 +69,25 @@
\c!color=,
\c!before=\blank]
-\unexpanded\def\placeinitial
+\unexpanded\def\placeinitial % we cannot group so no settings
{\dosingleempty\typo_initials_place}
\def\typo_initials_place[#1]% old command
{\par
\namedinitialparameter{#1}\c!before
- \setinitial[#1]}
+ \setinitial[#1]\relax}
\unexpanded\def\setinitial
- {\dosingleempty\typo_initials_set}
+ {\dodoubleempty\typo_initials_set}
-\unexpanded\def\typo_initials_set[#1]%
- {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}}}
+\unexpanded\def\typo_initials_set[#1][#2]%
+ {\edef\typo_initial_handle{\typo_initial_handle_indeed{#1}{#2}}}
-\unexpanded\def\typo_initial_handle_indeed#1%
+\unexpanded\def\typo_initial_handle_indeed#1#2%
{\dontleavehmode
\begingroup
\edef\currentinitial{#1}%
+ \setupcurrentinitial[#2]%
\scratchcounter \initialparameter\c!n\relax
\scratchdistance\initialparameter\c!distance\relax
\scratchhoffset \initialparameter\c!hoffset \relax
@@ -95,24 +98,43 @@
{\definedfont[\initialparameter\s!font]}
{\useinitialstyleparameter\c!style}%
\useinitialcolorparameter\c!color
- \ctxcommand{setinitial{
- location = "\initialparameter\c!location",
- enabled = true,
- n = \number\scratchcounter,
- distance = \number\scratchdistance,
- hoffset = \number\scratchhoffset,
- voffset = \number\scratchvoffset,
- ma = \the\attribute\colormodelattribute ,
- ca = \the\attribute\colorattribute ,
- ta = \the\attribute\transparencyattribute,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount, % it's a bit over the top to support this here
- }}%
+ \edef\p_text{\initialparameter\c!text}% optional
+ \clf_setinitial
+ location {\initialparameter\c!location}%
+ enabled true\space
+ n \scratchcounter
+ m \numexpr\initialparameter\c!m\relax
+ method {\initialparameter\c!method}%
+ distance \scratchdistance
+ hoffset \scratchhoffset
+ voffset \scratchvoffset
+ ma \attribute\colormodelattribute
+ ca \attribute\colorattribute
+ ta \attribute\transparencyattribute
+ font \fontid\font
+ dynamic \attribute\zerocount % it's a bit over the top to support this here
+ \relax
\stopluacode
\kern\zeropoint % we need a node
+ \p_text
\endgroup
\globallet\typo_initial_handle\relax}
\let\typo_initial_handle\relax
+% \setupbodyfont[dejavu,9pt]
+%
+% \startbuffer
+% \setinitial[two] D. E. Knuth \ignorespaces\input knuth \par
+% \setinitial[two] Knuth \ignorespaces\input knuth \par
+% \setinitial[two] \quotation{D. E. Knuth} \ignorespaces\input knuth \par
+% \setinitial[two] \quotation {Knuth} \ignorespaces\input knuth \par
+% \setinitial[two] [text={D.E. Knuth}] \ignorespaces\input knuth \par
+% \setinitial[two] [m=4] D. E. Knuth \ignorespaces\input knuth \par
+% \stopbuffer
+%
+% \type{m=2} \start \defineinitial[two][m=2,method=none] \getbuffer \page \stop
+% \type{m=1,method=auto} \start \defineinitial[two][m=1,method=auto] \getbuffer \page \stop
+% \type{m=1,method={auto,first,last}} \start \defineinitial[two][m=1,method={first,auto,last}] \getbuffer \page \stop
+
\protect \endinput
diff --git a/tex/context/base/typo-dua.lua b/tex/context/base/typo-dua.lua
index ec85a3d9f..1e9b325a6 100644
--- a/tex/context/base/typo-dua.lua
+++ b/tex/context/base/typo-dua.lua
@@ -66,11 +66,25 @@ local formatters = string.formatters
local directiondata = characters.directions
local mirrordata = characters.mirrors
-local remove_node = nodes.remove
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -93,7 +107,7 @@ local maximum_stack = 60 -- probably spec but not needed
local directions = typesetters.directions
local setcolor = directions.setcolor
-local a_directions = attributes.private('directions')
+----- a_directions = attributes.private('directions')
local remove_controls = true directives.register("typesetters.directions.one.removecontrols",function(v) remove_controls = v end)
@@ -189,17 +203,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local chr = current.char
+ local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = current.next
- elseif id == whatsit_code and current.subtype == dir_code then
- local dir = current.dir
+ current = getnext(current)
+ elseif id == whatsit_code and getsubtype(current) == dir_code then
+ local dir = getfield(current,"dir")
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -209,27 +223,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = current.next
+ current = getnext(current)
elseif id == math_code then
local skip = 0
- current = current.next
- while current.id ~= math_code do
+ current = getnext(current)
+ while getid(current) ~= math_code do
skip = skip + 1
- current = current.next
+ current = getnext(current)
end
- skip = skip + 1
- current = current.next
+ skip = skip + 1
+ current = getnext(current)
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = current.next
+ current = getnext(current)
while n do
- local id = current.id
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
+ local id = getid(current)
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
skip = skip + 1
last = id
- current = current.next
+ current = getnext(current)
else
break
end
@@ -289,8 +303,8 @@ local function find_run_limit_b_s_ws_on(list,start,limit)
end
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if head.id == whatsit_code and head.subtype == localpar_code then
- if head.dir == "TRT" then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if getfield(head,"dir") == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -677,58 +691,66 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = current.id
+ local id = getid(current)
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- current.char = mirror
+ setfield(current,"char",mirror)
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- current.dir = pardir -- is this really needed?
+ setfield(current,"dir",pardir) -- is this really needed?
elseif id == glue_code then
- if enddir and current.subtype == parfillskip_code then
+ if enddir and getsubtype(current) == parfillskip_code then
-- insert the last enddir before \parfillskip glue
- head = insert_node_before(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and current.subtype == localpar_code then
+ if begindir and getsubtype(current) == localpar_code then
-- local_par should always be the 1st node
- head, current = insert_node_after(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
begindir = nil
done = true
end
end
if begindir then
- head = insert_node_before(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
done = true
end
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = current.next
+ current = getnext(current)
end
end
if enddir then
- head, current = insert_node_after(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
done = true
end
if not entry.remove then
- current = current.next
+ current = getnext(current)
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = current.next
+ current = getnext(current)
end
index = index + 1
end
@@ -736,6 +758,7 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
+ head = tonut(head)
local list, size = build_list(head)
local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context
if not dirfound and trace_details then
@@ -752,7 +775,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return head, done
+ return tonode(head), done
end
directions.installhandler(interfaces.variables.one,process)
diff --git a/tex/context/base/typo-dub.lua b/tex/context/base/typo-dub.lua
index 3ecfce364..a1c9de752 100644
--- a/tex/context/base/typo-dub.lua
+++ b/tex/context/base/typo-dub.lua
@@ -54,11 +54,26 @@ local directiondata = characters.directions
local mirrordata = characters.mirrors
local textclassdata = characters.textclasses
-local remove_node = nodes.remove
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+local nutstring = nuts.tostring
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+
+local nodepool = nuts.pool
local new_textdir = nodepool.textdir
local nodecodes = nodes.nodecodes
@@ -83,11 +98,11 @@ local getfences = directions.getfences
local a_directions = attributes.private('directions')
local a_textbidi = attributes.private('textbidi')
-local a_state = attributes.private('state')
+----- a_state = attributes.private('state')
-local s_isol = fonts.analyzers.states.isol
+----- s_isol = fonts.analyzers.states.isol
--- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
+----- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
local remove_controls = true directives.register("typesetters.directions.removecontrols",function(v) remove_controls = v end)
----- analyze_fences = true directives.register("typesetters.directions.analyzefences", function(v) analyze_fences = v end)
@@ -131,7 +146,7 @@ local report_directions = logs.reporter("typesetting","directions two")
--
-- l : left to right
-- r : right to left
--- al : right to legt arabic (esp punctuation issues)
+-- al : right to left arabic (esp punctuation issues)
-- explicit: (new)
--
@@ -242,17 +257,17 @@ local function build_list(head) -- todo: store node pointer ... saves loop
local size = 0
while current do
size = size + 1
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local chr = current.char
+ local chr = getchar(current)
local dir = directiondata[chr]
list[size] = { char = chr, direction = dir, original = dir, level = 0 }
- current = current.next
+ current = getnext(current)
elseif id == glue_code then
list[size] = { char = 0x0020, direction = "ws", original = "ws", level = 0 }
- current = current.next
- elseif id == whatsit_code and current.subtype == dir_code then
- local dir = current.dir
+ current = getnext(current)
+ elseif id == whatsit_code and getsubtype(current) == dir_code then
+ local dir = getfield(current,"dir")
if dir == "+TLT" then
list[size] = { char = 0x202A, direction = "lre", original = "lre", level = 0 }
elseif dir == "+TRT" then
@@ -262,27 +277,27 @@ local function build_list(head) -- todo: store node pointer ... saves loop
else
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, id = id } -- object replacement character
end
- current = current.next
+ current = getnext(current)
elseif id == math_code then
local skip = 0
- current = current.next
- while current.id ~= math_code do
+ current = getnext(current)
+ while getid(current) ~= math_code do
skip = skip + 1
- current = current.next
+ current = getnext(current)
end
skip = skip + 1
- current = current.next
+ current = getnext(current)
list[size] = { char = 0xFFFC, direction = "on", original = "on", level = 0, skip = skip, id = id }
else
local skip = 0
local last = id
- current = current.next
+ current = getnext(current)
while n do
- local id = current.id
- if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and current.subtype == dir_code) then
+ local id = getid(current)
+ if id ~= glyph_code and id ~= glue_code and not (id == whatsit_code and getsubtype(current) == dir_code) then
skip = skip + 1
last = id
- current = current.next
+ current = getnext(current)
else
break
end
@@ -311,7 +326,7 @@ end
-- ש ( ל ( א ) כ ) 2-8,4-6
-- ש ( ל [ א ] כ ) 2-8,4-6
-function resolve_fences(list,size,start,limit)
+local function resolve_fences(list,size,start,limit)
-- N0: funny effects, not always better, so it's an options
local stack = { }
local top = 0
@@ -365,8 +380,8 @@ end
-- the action
local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for local_par)
- if head.id == whatsit_code and head.subtype == localpar_code then
- if head.dir == "TRT" then
+ if getid(head) == whatsit_code and getsubtype(head) == localpar_code then
+ if getfield(head,"dir") == "TRT" then
return 1, "TRT", true
else
return 0, "TLT", true
@@ -785,58 +800,66 @@ local function apply_to_list(list,size,head,pardir)
report_directions("fatal error, size mismatch")
break
end
- local id = current.id
+ local id = getid(current)
local entry = list[index]
local begindir = entry.begindir
local enddir = entry.enddir
if id == glyph_code then
local mirror = entry.mirror
if mirror then
- current.char = mirror
+ setfield(current,"char",mirror)
end
if trace_directions then
local direction = entry.direction
setcolor(current,direction,direction ~= entry.original,mirror)
end
elseif id == hlist_code or id == vlist_code then
- current.dir = pardir -- is this really needed?
+ setfield(current,"dir",pardir) -- is this really needed?
elseif id == glue_code then
- if enddir and current.subtype == parfillskip_code then
+ if enddir and getsubtype(current) == parfillskip_code then
-- insert the last enddir before \parfillskip glue
- head = insert_node_before(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
enddir = false
done = true
end
elseif id == whatsit_code then
- if begindir and current.subtype == localpar_code then
+ if begindir and getsubtype(current) == localpar_code then
-- local_par should always be the 1st node
- head, current = insert_node_after(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
begindir = nil
done = true
end
end
if begindir then
- head = insert_node_before(head,current,new_textdir(begindir))
+ local d = new_textdir(begindir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head = insert_node_before(head,current,d)
done = true
end
local skip = entry.skip
if skip and skip > 0 then
for i=1,skip do
- current = current.next
+ current = getnext(current)
end
end
if enddir then
- head, current = insert_node_after(head,current,new_textdir(enddir))
+ local d = new_textdir(enddir)
+ -- setfield(d,"attr",getfield(current,"attr"))
+ head, current = insert_node_after(head,current,d)
done = true
end
if not entry.remove then
- current = current.next
+ current = getnext(current)
elseif remove_controls then
-- X9
head, current = remove_node(head,current,true)
done = true
else
- current = current.next
+ current = getnext(current)
end
index = index + 1
end
@@ -844,8 +867,9 @@ local function apply_to_list(list,size,head,pardir)
end
local function process(head)
+ head = tonut(head)
-- for the moment a whole paragraph property
- local attr = head[a_directions]
+ local attr = getattr(head,a_directions)
local analyze_fences = getfences(attr)
--
local list, size = build_list(head)
@@ -864,7 +888,7 @@ local function process(head)
report_directions("result : %s",show_done(list,size))
end
head, done = apply_to_list(list,size,head,pardir)
- return head, done
+ return tonode(head), done
end
directions.installhandler(interfaces.variables.two,process)
diff --git a/tex/context/base/typo-fln.lua b/tex/context/base/typo-fln.lua
index 4c97af450..2076a7464 100644
--- a/tex/context/base/typo-fln.lua
+++ b/tex/context/base/typo-fln.lua
@@ -23,25 +23,40 @@ local firstlines = typesetters.firstlines
local nodes = nodes
local tasks = nodes.tasks
-local getbox = nodes.getbox
+local context = context
+local implement = interfaces.implement
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getlist = nuts.getlist
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getbox = nuts.getbox
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
-local traverse_id = nodes.traverse_id
-local free_node_list = nodes.flush_list
-local free_node = nodes.flush_node
-local copy_node_list = nodes.copy_list
-local insert_node_after = nodes.insert_after
-local insert_node_before = nodes.insert_before
-local hpack_node_list = nodes.hpack
-local remove_node = nodes.remove
+local traverse_id = nuts.traverse_id
+local free_node_list = nuts.flush_list
+local free_node = nuts.flush_node
+local copy_node_list = nuts.copy_list
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local hpack_node_list = nuts.hpack
+local remove_node = nuts.remove
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local newpenalty = nodepool.penalty
local newkern = nodepool.kern
-local tracerrule = nodes.tracers.pool.nodes.rule
+local tracerrule = nodes.tracers.pool.nuts.rule
local actions = { }
firstlines.actions = actions
@@ -73,7 +88,21 @@ function firstlines.set(specification)
texsetattribute(a_firstline,1)
end
-commands.setfirstline = firstlines.set
+implement {
+ name = "setfirstline",
+ actions = firstlines.set,
+ arguments = {
+ {
+ { "alternative" },
+ { "font", "integer" },
+ { "dynamic", "integer" },
+ { "ma", "integer" },
+ { "ca", "integer" },
+ { "ta", "integer" },
+ { "n", "integer" },
+ }
+ }
+}
actions[v_line] = function(head,setting)
-- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental
@@ -92,9 +121,9 @@ actions[v_line] = function(head,setting)
local linebreaks = { }
for g in traverse_id(glyph_code,temp) do
if dynamic > 0 then
- g[0] = dynamic
+ setattr(g,0,dynamic)
end
- g.font = font
+ setfield(g,"font",font)
end
local start = temp
local list = temp
@@ -108,7 +137,7 @@ actions[v_line] = function(head,setting)
hsize = hsize - hangindent
end
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
n = n + 1
elseif id == disc_code then
@@ -117,7 +146,7 @@ actions[v_line] = function(head,setting)
-- this could be an option
elseif n > 0 then
local pack = hpack_node_list(copy_node_list(list,start))
- if pack.width > hsize then
+ if getfield(pack,"width") > hsize then
free_node_list(pack)
list = prev
break
@@ -128,7 +157,7 @@ actions[v_line] = function(head,setting)
nofchars = n
end
end
- start = start.next
+ start = getnext(start)
end
if not linebreaks[i] then
linebreaks[i] = n
@@ -139,18 +168,18 @@ actions[v_line] = function(head,setting)
for i=1,noflines do
local linebreak = linebreaks[i]
while start and n < nofchars do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then -- or id == disc_code then
if dynamic > 0 then
- start[0] = dynamic
+ setattr(start,0,dynamic)
end
- start.font = font
+ setfield(start,"font",font)
if ca and ca > 0 then
- start[a_colorspace] = ma == 0 and 1 or ma
- start[a_color] = ca
+ setattr(start,a_colorspace,ma == 0 and 1 or ma)
+ setattr(start,a_color,ca)
end
if ta and ta > 0 then
- start[a_transparency] = ta
+ setattr(start,a_transparency,ta)
end
n = n + 1
end
@@ -163,7 +192,7 @@ actions[v_line] = function(head,setting)
head, start = insert_node_after(head,start,newpenalty(-10000)) -- break
break
end
- start = start.next
+ start = getnext(start)
end
end
free_node_list(temp)
@@ -182,7 +211,7 @@ actions[v_word] = function(head,setting)
local ca = setting.ca
local ta = setting.ta
while start do
- local id = start.id
+ local id = getid(start)
-- todo: delete disc nodes
if id == glyph_code then
if not ok then
@@ -190,16 +219,16 @@ actions[v_word] = function(head,setting)
ok = true
end
if ca and ca > 0 then
- start[a_colorspace] = ma == 0 and 1 or ma
- start[a_color] = ca
+ setattr(start,a_colorspace,ma == 0 and 1 or ma)
+ setattr(start,a_color,ca)
end
if ta and ta > 0 then
- start[a_transparency] = ta
+ setattr(start,a_transparency,ta)
end
if dynamic > 0 then
- start[0] = dynamic
+ setattr(start,0,dynamic)
end
- start.font = font
+ setfield(start,"font",font)
elseif id == disc_code then
-- continue
elseif id == kern_code then -- todo: fontkern
@@ -210,7 +239,7 @@ actions[v_word] = function(head,setting)
break
end
end
- start = start.next
+ start = getnext(start)
end
return head, true
end
@@ -218,16 +247,17 @@ end
actions[v_default] = actions[v_line]
function firstlines.handler(head)
+ head = tonut(head)
local start = head
local attr = nil
while start do
- attr = start[a_firstline]
+ attr = getattr(start,a_firstline)
if attr then
break
- elseif start.id == glyph then
+ elseif getid(start) == glyph_code then
break
else
- start = start.next
+ start = getnext(start)
end
end
if attr then
@@ -240,17 +270,18 @@ function firstlines.handler(head)
if trace_firstlines then
report_firstlines("processing firstlines, alternative %a",alternative)
end
- return action(head,settings)
+ local head, done = action(head,settings)
+ return tonode(head), done
end
end
- return head, false
+ return tonode(head), false
end
-- goodie
-function commands.applytofirstcharacter(box,what)
+local function applytofirstcharacter(box,what)
local tbox = getbox(box) -- assumes hlist
- local list = tbox.list
+ local list = getlist(tbox)
local done = nil
for n in traverse_id(glyph_code,list) do
list = remove_node(list,n)
@@ -258,10 +289,10 @@ function commands.applytofirstcharacter(box,what)
break
end
if done then
- tbox.list = list
+ setfield(tbox,"list",list)
local kind = type(what)
if kind == "string" then
- context[what](done)
+ context[what](tonode(done))
elseif kind == "function" then
what(done)
else
@@ -269,3 +300,9 @@ function commands.applytofirstcharacter(box,what)
end
end
end
+
+implement {
+ name = "applytofirstcharacter",
+ actions = applytofirstcharacter,
+ arguments = { "integer", "string" }
+}
diff --git a/tex/context/base/typo-fln.mkiv b/tex/context/base/typo-fln.mkiv
index d8651b459..38a53bfa9 100644
--- a/tex/context/base/typo-fln.mkiv
+++ b/tex/context/base/typo-fln.mkiv
@@ -79,15 +79,15 @@
\begingroup
\edef\currentfirstline{#1}%
\usefirstlinestyleandcolor\c!style\c!color
- \ctxlua{commands.setfirstline {
- alternative = "\firstlineparameter\c!alternative",
- ma = \the\attribute\colormodelattribute,
- ca = \the\attribute\colorattribute,
- ta = \the\attribute\transparencyattribute,
- n = \number\firstlineparameter\c!n,
- font = \fontid\font,
- dynamic = \number\attribute\zerocount,
- }}%
+ \clf_setfirstline
+ alternative {\firstlineparameter\c!alternative}%
+ ma \attribute\colormodelattribute
+ ca \attribute\colorattribute
+ ta \attribute\transparencyattribute
+ n \numexpr\firstlineparameter\c!n\relax
+ font \fontid\font
+ dynamic \attribute\zerocount
+ \relax
\kern\zeropoint % we need a node
% \hskip\zeropoint\s!plus\emwidth\relax % can be an option
\endgroup
@@ -104,7 +104,7 @@
\unexpanded\def\applytofirstcharacter#1%
{\begingroup
\dowithnextbox
- {\ctxcommand{applytofirstcharacter(\number\nextbox,"\strippedcsname#1")}%
+ {\clf_applytofirstcharacter\nextbox{\strippedcsname#1}%
\unhbox\nextbox
\endgroup}%
\hbox}
diff --git a/tex/context/base/typo-inj.lua b/tex/context/base/typo-inj.lua
new file mode 100644
index 000000000..b5d9e1c51
--- /dev/null
+++ b/tex/context/base/typo-inj.lua
@@ -0,0 +1,94 @@
+if not modules then modules = { } end modules ['typo-inj'] = { -- was node-par
+ version = 1.001,
+ comment = "companion to typo-inj.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber = tonumber
+
+local context = context
+local implement = interfaces.implement
+
+local injectors = { }
+typesetters.injectors = injectors
+local list = { }
+injectors.list = list
+local showall = false
+
+local settings_to_array = utilities.parsers.settings_to_array
+
+local ctx_domarkinjector = context.domarkinjector
+local ctx_doactivateinjector = context.doactivateinjector
+
+table.setmetatableindex(list,function(t,k)
+ local v = {
+ counter = 0,
+ actions = { },
+ show = false,
+ active = false,
+ }
+ t[k] = v
+ return v
+end)
+
+function injectors.reset(name)
+ list[name] = nil
+end
+
+function injectors.set(name,numbers,command)
+ local injector = list[name]
+ local actions = injector.actions
+ local places = settings_to_array(numbers)
+ for i=1,#places do
+ actions[tonumber(places[i])] = command
+ end
+ if not injector.active then
+ ctx_doactivateinjector(name)
+ injector.active = true
+ end
+end
+
+function injectors.show(name)
+ if not name or name == "" then
+ showall = true
+ else
+ local list = settings_to_array(name)
+ for i=1,#list do
+ list[list[i]].show = true
+ end
+ end
+end
+
+function injectors.mark(name,show)
+ local injector = list[name]
+ local n = injector.counter + 1
+ injector.counter = n
+ if showall or injector.show then
+ ctx_domarkinjector(injector.actions[n] and 1 or 0,n)
+ end
+end
+
+function injectors.check(name,n) -- we could also accent n = number : +/- 2
+ local injector = list[name]
+ if n == false then
+ n = injector.counter
+ elseif n == nil then
+ n = injector.counter + 1 -- next (upcoming)
+ else
+ n = tonumber(n) or 0
+ end
+ local action = injector.actions[n]
+ if action then
+ context(action)
+ end
+end
+
+implement { name = "resetinjector", actions = injectors.reset, arguments = "string" }
+implement { name = "showinjector", actions = injectors.show, arguments = "string" }
+implement { name = "setinjector", actions = injectors.set, arguments = { "string", "string", "string" } }
+implement { name = "markinjector", actions = injectors.mark, arguments = "string" }
+implement { name = "checkinjector", actions = injectors.check, arguments = "string" }
+implement { name = "checkpreviousinjector", actions = injectors.check, arguments = { "string", true } }
+implement { name = "checknextinjector", actions = injectors.check }
diff --git a/tex/context/base/typo-inj.mkiv b/tex/context/base/typo-inj.mkiv
new file mode 100644
index 000000000..46cd9fe45
--- /dev/null
+++ b/tex/context/base/typo-inj.mkiv
@@ -0,0 +1,77 @@
+%D \module
+%D [ file=typo-inj,
+%D version=2014.10.13,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Triggering Actions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Triggering Actions}
+
+%D This is a sort of escape from too automatic typesetting of lists. I have
+%D been thinking fo a more generic injector for instance based on tags (as we
+%D already index each element) but thi sis sort of fuzzy because the number of a
+%D tag is not always incremented before we check for it. Also, registers and lists
+%D are among the few candidates that cannot be controlled directly by putting
+%D something in the input. So,m for the moment I stick to this mechanism but
+%D future versions of \CONTEXT\ might do it differently. Compatibility is not much
+%D of an issue here as this mechanism is only to be used in final production runs.
+
+\unprotect
+
+\registerctxluafile{typo-inj}{1.001}
+
+% todo: no need in trialmode
+
+%D \showinjector
+%D
+%D \setinjector[register][3][\column]
+%D \setinjector[list] [2][{\blank[3*big]}]
+%D
+%D \starttext
+%D \placelist[section][criterium=text]
+%D \blank[3*big]
+%D \placeregister[index][criterium=text]
+%D \page
+%D \startsection[title=Alpha] first \index{first} \stopsection
+%D \startsection[title=Beta] second \index{second} \stopsection
+%D \startsection[title=Gamma] third \index{third} \stopsection
+%D \startsection[title=Delta] fourth \index{fourth} \stopsection
+%D \stoptext
+
+\unexpanded\def\resetinjector [#1]{\clf_resetinjector{#1}}
+\unexpanded\def\markinjector [#1]{\dontleavehmode\clf_markinjector{#1}}
+\unexpanded\def\checkinjector [#1]{\clf_checkinjector{#1}}
+\unexpanded\def\checknextinjector {\clf_checknextinjector}
+\unexpanded\def\checkpreviousinjector {\clf_checkpreviousinjector}
+\unexpanded\def\dosetinjector [#1][#2][#3]{\clf_setinjector{#1}{#2}{#3}}
+\unexpanded\def\doshowinjector [#1]{\clf_showinjector{#1}}
+
+\unexpanded\def\setinjector {\dotripleargument\dosetinjector}
+\unexpanded\def\showinjector{\dosingleempty\doshowinjector}
+
+\unexpanded\def\domarkinjector#1#2% called at the lua end
+ {\dontleavehmode\llap{\infofont\ifcase#1\else\red\fi<#2>\quad}}
+
+% low level definers .. we could have \injectors_mark and \injectors_check and then
+% use \v!list instead of \s!list
+
+\unexpanded\def\doinstallinjector#1%
+ {\letvalue{typo_injectors_mark_#1}\donothing
+ \letvalue{typo_injectors_check_#1}\donothing}
+
+\unexpanded\def\doactivateinjector#1% used at lua end
+ {\setuxvalue{typo_injectors_mark_#1}{\dontleavehmode\noexpand\clf_markinjector{#1}}%
+ \setuxvalue{typo_injectors_check_#1}{\noexpand\clf_checkinjector{#1}}}
+
+\unexpanded\def\dotestinjector#1% only for testing outside unprotect
+ {\csname typo_injectors_check_#1\endcsname
+ \csname typo_injectors_mark_#1\endcsname}
+
+\protect \endinput
+
diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua
index 452b623c8..d3a31fad1 100644
--- a/tex/context/base/typo-itc.lua
+++ b/tex/context/base/typo-itc.lua
@@ -9,9 +9,10 @@ if not modules then modules = { } end modules ['typo-itc'] = {
local utfchar = utf.char
local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
-
local report_italics = logs.reporter("nodes","italics")
+local threshold = 0.5 trackers.register("typesetters.threshold", function(v) threshold = v == true and 0.5 or tonumber(v) end)
+
typesetters.italics = typesetters.italics or { }
local italics = typesetters.italics
@@ -24,21 +25,41 @@ local math_code = nodecodes.math
local tasks = nodes.tasks
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local end_of_math = node.end_of_math
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local setfield = nuts.setfield
+
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local end_of_math = nuts.end_of_math
+local find_tail = nuts.tail
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
local a_italics = attributes.private("italics")
local unsetvalue = attributes.unsetvalue
-local new_correction_kern = nodes.pool.fontkern
-local new_correction_glue = nodes.pool.glue
+local new_correction_kern = nodepool.fontkern
+local new_correction_glue = nodepool.glue
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
local italicsdata = fonthashes.italics
+local exheights = fonthashes.exheights
+
+local implement = interfaces.implement
local forcedvariant = false
@@ -82,105 +103,369 @@ end
-- todo: clear attribute
+local function okay(data,current,font,prevchar,previtalic,char,what)
+ if not data then
+ if trace_italics then
+ report_italics("ignoring %p between %s italic %C and italic %C",previtalic,what,prevchar,char)
+ end
+ return false
+ end
+ if threshold then
+ local ht = getfield(current,"height")
+ local ex = exheights[font]
+ local th = threshold * ex
+ if ht <= th then
+ if trace_italics then
+ report_italics("ignoring correction between %s italic %C and regular %C, height %p less than threshold %p",prevchar,what,char,ht,th)
+ end
+ return false
+ end
+ end
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and regular %C",previtalic,what,prevchar,char)
+ end
+ return true
+end
+
+-- maybe: with_attributes(current,n) :
+--
+-- local function correction_kern(kern,n)
+-- return with_attributes(new_correction_kern(kern),n)
+-- end
+
+local function correction_kern(kern,n)
+ local k = new_correction_kern(kern)
+ if n then
+ local a = getfield(n,"attr")
+ if a then -- maybe not
+ setfield(k,"attr",a) -- can be a marked content (border case)
+ end
+ end
+ return k
+end
+
+local function correction_glue(glue,n)
+ local g = new_correction_glue(glue)
+ if n then
+ local a = getfield(n,"attr")
+ if a then -- maybe not
+ setfield(g,"attr",a) -- can be a marked content (border case)
+ end
+ end
+ return g
+end
+
function italics.handler(head)
- local done = false
- local italic = 0
- local lastfont = nil
- local lastattr = nil
- local previous = nil
- local prevchar = nil
- local current = head
- local inserted = nil
+
+ local prev = nil
+ local prevchar = nil
+ local prevhead = tonut(head)
+ local previtalic = 0
+ local previnserted = nil
+
+ local replace = nil
+ local replacechar = nil
+ local replacehead = nil
+ local replaceitalic = 0
+ local replaceinserted = nil
+
+ local post = nil
+ local postchar = nil
+ local posthead = nil
+ local postitalic = 0
+ local postinserted = nil
+
+ local current = prevhead
+ local done = false
+ local lastfont = nil
+ local lastattr = nil
+
while current do
- local id = current.id
+ local id = getid(current)
if id == glyph_code then
- local font = current.font
- local char = current.char
+ local font = getfont(current)
+ local char = getchar(current)
local data = italicsdata[font]
if font ~= lastfont then
- if italic ~= 0 then
- if data then
- if trace_italics then
- report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
- end
- else
- if trace_italics then
- report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
+ if previtalic ~= 0 then
+ if okay(data,current,font,prevchar,previtalic,char,"glyph") then
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
done = true
end
- elseif inserted and data then
+ elseif previnserted and data then
if trace_italics then
- report_italics("deleting last correction before %C",char)
+ report_italics("deleting last correction before %s %C",char,"glyph")
end
- delete_node(head,inserted)
+ delete_node(prevhead,previnserted)
else
- -- nothing
+ --
+ if replaceitalic ~= 0 then
+ if okay(data,replace,font,replacechar,replaceitalic,char,"replace") then
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ done = true
+ end
+ replaceitalic = 0
+ elseif replaceinserted and data then
+ if trace_italics then
+ report_italics("deleting last correction before %s %C","replace",char)
+ end
+ delete_node(replacehead,replaceinserted)
+ end
+ --
+ if postitalic ~= 0 then
+ if okay(data,post,font,postchar,postitalic,char,"post") then
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ done = true
+ end
+ postitalic = 0
+ elseif postinserted and data then
+ if trace_italics then
+ report_italics("deleting last correction before %s %C","post",char)
+ end
+ delete_node(posthead,postinserted)
+ end
end
+ --
lastfont = font
end
if data then
- local attr = forcedvariant or current[a_italics]
+ local attr = forcedvariant or getattr(current,a_italics)
if attr and attr > 0 then
local cd = data[char]
if not cd then
-- this really can happen
- italic = 0
+ previtalic = 0
else
- italic = cd.italic or cd.italic_correction
- if not italic then
- italic = setitalicinfont(font,char) -- calculated once
- -- italic = 0
+ previtalic = cd.italic or cd.italic_correction
+ if not previtalic then
+ previtalic = setitalicinfont(font,char) -- calculated once
+ -- previtalic = 0
end
- if italic ~= 0 then
+ if previtalic ~= 0 then
lastfont = font
lastattr = attr
- previous = current
+ prev = current
+ -- prevhead = head
prevchar = char
end
end
else
- italic = 0
+ previtalic = 0
end
else
- italic = 0
+ previtalic = 0
end
- inserted = nil
+ previnserted = nil
+ replaceinserted = nil
+ postinserted = nil
elseif id == disc_code then
- -- skip
- elseif id == kern_code then
- inserted = nil
- italic = 0
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ replace = getfield(current,"replace")
+ if replace then
+ local current = find_tail(replace)
+ if getid(current) ~= glyph_code then
+ current = getprev(current)
+ end
+ if current and getid(current) == glyph_code then
+ local font = getfont(current)
+ local char = getchar(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+ replaceitalic = 0
+ else
+ replaceitalic = cd.italic or cd.italic_correction
+ if not replaceitalic then
+ replaceitalic = setitalicinfont(font,char) -- calculated once
+ -- replaceitalic = 0
+ end
+ if replaceitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ replacechar = char
+ replacehead = replace
+ replace = current
+ end
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- else
+-- replaceitalic = 0
+ end
+-- replaceinserted = nil
+-- else
+-- replaceitalic = 0
+-- replaceinserted = nil
+ end
+ post = getfield(current,"post")
+ if post then
+ local current = find_tail(post)
+ if getid(current) ~= glyph_code then
+ current = getprev(current)
+ end
+ if current and getid(current) == glyph_code then
+ local font = getfont(current)
+ local char = getchar(current)
+ local data = italicsdata[font]
+ if data then
+ local attr = forcedvariant or getattr(current,a_italics)
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+-- postitalic = 0
+ else
+ postitalic = cd.italic or cd.italic_correction
+ if not postitalic then
+ postitalic = setitalicinfont(font,char) -- calculated once
+ -- postitalic = 0
+ end
+ if postitalic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ postchar = char
+ posthead = post
+ post = current
+ end
+ end
+-- else
+-- postitalic = 0
+ end
+-- else
+-- postitalic = 0
+ end
+-- else
+-- postitalic = 0
+ end
+-- postinserted = nil
+-- else
+-- postitalic = 0
+-- postinserted = nil
+ end
+ elseif id == kern_code then -- how about fontkern ?
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
elseif id == glue_code then
- if italic ~= 0 then
+ if previtalic ~= 0 then
if trace_italics then
- report_italics("inserting %p between italic %C and glue",italic,prevchar)
+ report_italics("inserting %p between %s italic %C and glue",previtalic,"glyph",prevchar)
+ end
+ previnserted = correction_glue(previtalic,current) -- maybe just add ? else problem with penalties
+ previtalic = 0
+ done = true
+ insert_node_after(prevhead,prev,previnserted)
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and glue",replaceitalic,"replace",replacechar)
+ end
+ replaceinserted = correction_kern(replaceitalic,current) -- needs to be a kern
+ replaceitalic = 0
+ done = true
+ insert_node_after(replacehead,replace,replaceinserted)
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and glue",postitalic,"post",postchar)
+ end
+ postinserted = correction_kern(postitalic,current) -- needs to be a kern
+ postitalic = 0
+ done = true
+ insert_node_after(posthead,post,postinserted)
end
- inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties
- insert_node_after(head,previous,inserted)
- italic = 0
- done = true
end
elseif id == math_code then
current = end_of_math(current)
- elseif italic ~= 0 then
- if trace_italics then
- report_italics("inserting %p between italic %C and whatever",italic,prevchar)
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ else
+ if previtalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",previtalic,"glyph",prevchar)
+ end
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",replaceritalic,"replace",replacechar)
+ end
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and whatever",postitalic,"post",postchar)
+ end
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ previnserted = nil
+ previtalic = 0
+ replaceinserted = nil
+ replaceitalic = 0
+ postinserted = nil
+ postitalic = 0
+ done = true
+ end
end
- inserted = nil
- insert_node_after(head,previous,new_correction_kern(italic))
- italic = 0
- done = true
end
- current = current.next
+ current = getnext(current)
end
- if italic ~= 0 and lastattr > 1 then -- more control is needed here
- if trace_italics then
- report_italics("inserting %p between italic %C and end of list",italic,prevchar)
+ if lastattr and lastattr > 1 then -- more control is needed here
+ if previtalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",previtalic,"glyph",prevchar)
+ end
+ insert_node_after(prevhead,prev,correction_kern(previtalic,current))
+ done = true
+ else
+ if replaceitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",replaceitalic,"replace",replacechar)
+ end
+ insert_node_after(replacehead,replace,correction_kern(replaceitalic,current))
+ done = true
+ end
+ if postitalic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between %s italic %C and end of list",postitalic,"post",postchar)
+ end
+ insert_node_after(posthead,post,correction_kern(postitalic,current))
+ done = true
+ end
end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
end
return head, done
end
@@ -210,10 +495,21 @@ function italics.reset()
texsetattribute(a_italics,unsetvalue)
end
+implement {
+ name = "setitaliccorrection",
+ actions = italics.set,
+ arguments = "string"
+}
+
+implement {
+ name = "resetitaliccorrection",
+ actions = italics.reset,
+}
+
local variables = interfaces.variables
local settings_to_hash = utilities.parsers.settings_to_hash
-function commands.setupitaliccorrection(option) -- no grouping !
+local function setupitaliccorrection(option) -- no grouping !
if enable then
enable()
end
@@ -224,6 +520,7 @@ function commands.setupitaliccorrection(option) -- no grouping !
elseif options[variables.always] then
variant = 2
end
+ -- maybe also keywords for threshold
if options[variables.global] then
forcedvariant = variant
texsetattribute(a_italics,unsetvalue)
@@ -236,16 +533,28 @@ function commands.setupitaliccorrection(option) -- no grouping !
end
end
+implement {
+ name = "setupitaliccorrection",
+ actions = setupitaliccorrection,
+ arguments = "string"
+}
+
-- for manuals:
local stack = { }
-function commands.pushitaliccorrection()
- table.insert(stack,{forcedvariant, texgetattribute(a_italics) })
-end
+implement {
+ name = "pushitaliccorrection",
+ actions = function()
+ table.insert(stack,{forcedvariant, texgetattribute(a_italics) })
+ end
+}
-function commands.popitaliccorrection()
- local top = table.remove(stack)
- forcedvariant = top[1]
- texsetattribute(a_italics,top[2])
-end
+implement {
+ name = "popitaliccorrection",
+ actions = function()
+ local top = table.remove(stack)
+ forcedvariant = top[1]
+ texsetattribute(a_italics,top[2])
+ end
+}
diff --git a/tex/context/base/typo-itc.mkvi b/tex/context/base/typo-itc.mkvi
index 4a3bba518..c0aa8e2f6 100644
--- a/tex/context/base/typo-itc.mkvi
+++ b/tex/context/base/typo-itc.mkvi
@@ -42,10 +42,10 @@
% 2 = end of word and end of a list
\unexpanded\def\setitaliccorrection[#code]% rather low level (might go away)
- {\ctxlua{typesetters.italics.set(\number#code)}}
+ {\clf_setitaliccorrection#code\relax}
\unexpanded\def\resetitaliccorrection% rather low level (might go away)
- {\ctxlua{typesetters.italics.reset()}}
+ {\clf_resetitaliccorrection}
% global : no attributes, just always (faster and less memory)
% text : only text
@@ -53,7 +53,7 @@
% none : -
\unexpanded\def\setupitaliccorrection[#settings]%
- {\ctxcommand{setupitaliccorrection("#settings")}}
+ {\clf_setupitaliccorrection{#settings}}
\appendtoks
\attribute\italicsattribute\attributeunsetvalue
diff --git a/tex/context/base/typo-itm.mkiv b/tex/context/base/typo-itm.mkiv
index eb47e4076..0bb8170c7 100644
--- a/tex/context/base/typo-itm.mkiv
+++ b/tex/context/base/typo-itm.mkiv
@@ -109,9 +109,9 @@
\let\p_typo_items_symbol\empty
\let\m_typo_items_symbol\firstofoneargument
\else
- \doifconversiondefinedelse\p_typo_items_symbol
+ \doifelseconversiondefined\p_typo_items_symbol
{\def\m_typo_items_symbol{\convertnumber\p_typo_items_symbol}}
- {\doifsymboldefinedelse\p_typo_items_symbol
+ {\doifelsesymboldefined\p_typo_items_symbol
{\def\m_typo_items_symbol{\symbol[\p_typo_items_symbol]\gobbleoneargument}}
{\let\m_typo_items_symbol\firstofoneargument}}%
\fi\fi
diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua
index 56f58bb73..46a977cfd 100644
--- a/tex/context/base/typo-krn.lua
+++ b/tex/context/base/typo-krn.lua
@@ -11,23 +11,38 @@ if not modules then modules = { } end modules ['typo-krn'] = {
local next, type, tonumber = next, type, tonumber
local utfchar = utf.char
-local nodes, node, fonts = nodes, node, fonts
+local nodes = nodes
+local fonts = fonts
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local end_of_math = node.end_of_math
+local tasks = nodes.tasks
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+-- check what is used
+
+local find_node_tail = nuts.tail
+local free_node = nuts.free
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local end_of_math = nuts.end_of_math
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
local new_gluespec = nodepool.gluespec
local new_kern = nodepool.kern
local new_glue = nodepool.glue
@@ -36,6 +51,7 @@ local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
local skipcodes = nodes.skipcodes
local disccodes = nodes.disccodes
+local listcodes = nodes.listcodes
local glyph_code = nodecodes.glyph
local kern_code = nodecodes.kern
@@ -45,7 +61,12 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local math_code = nodecodes.math
+local box_list_code = listcodes.box
+local user_list_code = listcodes.unknown
+
local discretionary_code = disccodes.discretionary
+local automatic_code = disccodes.automatic
+
local kerning_code = kerncodes.kerning
local userkern_code = kerncodes.userkern
local userskip_code = skipcodes.userskip
@@ -107,10 +128,10 @@ kerns.keeptogether = false -- just for fun (todo: control setting with key/value
-- blue : keep by goodie
function kerns.keepligature(n) -- might become default
- local f = n.font
- local a = n[0] or 0
+ local f = getfont(n)
+ local a = getattr(n,0) or 0
if trace_ligatures then
- local c = n.char
+ local c = getchar(n)
local d = fontdescriptions[f][c].name
if a > 0 and contextsetups[a].keepligatures == v_auto then
report("font %!font:name!, glyph %a, slot %X -> ligature %s, by %s feature %a",f,d,c,"kept","dynamic","keepligatures")
@@ -169,9 +190,9 @@ end
local function kern_injector(fillup,kern)
if fillup then
local g = new_glue(kern)
- local s = g.spec
- s.stretch = kern
- s.stretch_order = 1
+ local s = getfield(g,"spec")
+ setfield(s,"stretch",kern)
+ setfield(s,"stretch_order",1)
return g
else
return new_kern(kern)
@@ -181,212 +202,398 @@ end
local function spec_injector(fillup,width,stretch,shrink)
if fillup then
local s = new_gluespec(width,2*stretch,2*shrink)
- s.stretch_order = 1
+ setfield(s,"stretch_order",1)
return s
else
return new_gluespec(width,stretch,shrink)
end
end
--- needs checking ... base mode / node mode -- also use insert_before/after etc
+-- a simple list injector, no components and such .. just disable ligatures in
+-- kern mode .. maybe not even hyphenate ... anyway, the next one is for simple
+-- sublists .. beware: we can have char -1
+
+local function inject_begin(boundary,prev,keeptogether,krn,ok) -- prev is a glyph
+ local id = getid(boundary)
+ if id == kern_code then
+ if getsubtype(boundary) == kerning_code or getattr(boundary,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local next = getnext(boundary)
+ if not next or (getid(next) == glyph_code and keeptogether(prev,next)) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(boundary,"subtype",userkern_code)
+ setfield(boundary,"kern",getfield(boundary,"kern") + quaddata[getfont(prev)]*krn)
+ return boundary, true
+ end
+ end
+ elseif id == glyph_code then
+ if keeptogether and keeptogether(boundary,prev) then
+ -- keep 'm
+ else
+ local charone = getchar(prev)
+ if charone > 0 then
+ local font = getfont(boundary)
+ local chartwo = getchar(boundary)
+ local kerns = chardata[font][charone].kerns
+ local kern = new_kern((kerns and kerns[chartwo] or 0) + quaddata[font]*krn)
+ setfield(boundary,"prev",kern)
+ setfield(kern,"next",boundary)
+ return kern, true
+ end
+ end
+ end
+ return boundary, ok
+end
+
+local function inject_end(boundary,next,keeptogether,krn,ok)
+ local tail = find_node_tail(boundary)
+ local id = getid(tail)
+ if id == kern_code then
+ if getsubtype(tail) == kerning_code or getattr(tail,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local prev = getprev(tail)
+ if getid(prev) == glyph_code and keeptogether(prev,two) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(tail,"subtype",userkern_code)
+ setfield(tail,"kern",getfield(tail,"kern") + quaddata[getfont(next)]*krn)
+ return boundary, true
+ end
+ end
+ elseif id == glyph_code then
+ if keeptogether and keeptogether(tail,two) then
+ -- keep 'm
+ else
+ local charone = getchar(tail)
+ if charone > 0 then
+ local font = getfont(tail)
+ local chartwo = getchar(next)
+ local kerns = chardata[font][charone].kerns
+ local kern = (kerns and kerns[chartwo] or 0) + quaddata[font]*krn
+ insert_node_after(boundary,tail,new_kern(kern))
+ return boundary, true
+ end
+ end
+ end
+ return boundary, ok
+end
-local function do_process(head,force) -- todo: glue so that we can fully stretch
- local start, done, lastfont = head, false, nil
+local function process_list(head,keeptogether,krn,font,okay)
+ local start = head
+ local prev = nil
+ local pid = nil
+ local kern = 0
+ local mark = font and markdata[font]
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if not font then
+ font = getfont(start)
+ mark = markdata[font]
+ kern = quaddata[font]*krn
+ end
+ if prev then
+ local char = getchar(start)
+ if mark[char] then
+ -- skip
+ elseif pid == kern_code then
+ if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ local prevprev = getprev(prev)
+ if getid(prevprev) == glyph_code and keeptogether(prevprev,start) then
+ inject = false
+ end
+ end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(prev,"subtype",userkern_code)
+ setfield(prev,"kern",getfield(prev,"kern") + kern)
+ okay = true
+ end
+ end
+ elseif pid == glyph_code then
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ else
+ local prevchar = getchar(prev)
+ local kerns = chardata[font][prevchar].kerns
+ -- if kerns then
+ -- print("it happens indeed, basemode kerns not yet injected")
+ -- end
+ insert_node_before(head,start,new_kern((kerns and kerns[char] or 0) + kern))
+ okay = true
+ end
+ end
+ end
+ end
+ if start then
+ prev = start
+ pid = id
+ start = getnext(start)
+ end
+ end
+ return head, okay, prev
+end
+
+local function closest_bound(b,get)
+ b = get(b)
+ if b and getid(b) ~= glue_code then
+ while b do
+ if not getattr(b,a_kerns) then
+ break
+ elseif getid(b) == glyph_code then
+ return b, getfont(b)
+ else
+ b = get(b)
+ end
+ end
+ end
+end
+
+function kerns.handler(head)
+ local head = tonut(head)
+ local start = head
+ local done = false
+ local lastfont = nil
local keepligature = kerns.keepligature
local keeptogether = kerns.keeptogether
- local fillup = false
+ local fillup = false
+ local bound = false
+ local prev = nil
+ local previd = nil
+ local prevchar = nil
+ local prevfont = nil
+ local prevmark = nil
while start do
- -- faster to test for attr first
- local attr = force or start[a_kerns]
+ -- fontkerns don't get the attribute but they always sit between glyphs so
+ -- are always valid bound .. disc nodes also somtimes don't get them
+ local id = getid(start)
+ local attr = getattr(start,a_kerns)
if attr and attr > 0 then
- start[a_kerns] = unsetvalue
+ setattr(start,a_kerns,0) -- unsetvalue)
local krn = mapping[attr]
if krn == v_max then
- krn = .25
+ krn = .25
fillup = true
else
fillup = false
end
- if krn and krn ~= 0 then
- local id = start.id
- if id == glyph_code then
- lastfont = start.font
- local c = start.components
- if not c then
- -- fine
- elseif keepligature and keepligature(start) then
- -- keep 'm
- else
- c = do_process(c,attr)
+ if not krn or krn == 0 then
+ bound = false
+ elseif id == glyph_code then -- we could use the subtype ligature
+ local c = getfield(start,"components")
+ if not c then
+ -- fine
+ elseif keepligature and keepligature(start) then
+ -- keep 'm
+ c = nil
+ else
+ while c do
local s = start
- local p, n = s.prev, s.next
- local tail = find_node_tail(c)
+ local t = find_node_tail(c)
+ local p = getprev(s)
+ local n = getnext(s)
if p then
- p.next = c
- c.prev = p
+ setfield(p,"next",c)
+ setfield(c,"prev",p)
else
head = c
end
if n then
- n.prev = tail
+ setfield(n,"prev",t)
+ setfield(t,"next",n)
end
- tail.next = n
start = c
- s.components = nil
- -- we now leak nodes !
- -- free_node(s)
- done = true
+ setfield(s,"components",nil)
+ free_node(s)
+ c = getfield(start,"components")
end
- local prev = start.prev
- if not prev then
- -- skip
- elseif markdata[lastfont][start.char] then
- -- skip
- else
- local pid = prev.id
- if not pid then
- -- nothing
- elseif pid == kern_code then
- if prev.subtype == kerning_code or prev[a_fontkern] then
- if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
- -- keep 'm
- else
- -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
- prev.subtype = userkern_code
- prev.kern = prev.kern + quaddata[lastfont]*krn -- here
- done = true
- end
- end
- elseif pid == glyph_code then
- if prev.font == lastfont then
- local prevchar, lastchar = prev.char, start.char
- if keeptogether and keeptogether(prev,start) then
- -- keep 'm
- else
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- else
- krn = quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- elseif pid == disc_code then
- -- a bit too complicated, we can best not copy and just calculate
- -- but we could have multiple glyphs involved so ...
- local disc = prev -- disc
- local prv, nxt = disc.prev, disc.next
- if disc.subtype == discretionary_code then
- -- maybe we should forget about this variant as there is no glue
- -- possible
- local pre, post, replace = disc.pre, disc.post, disc.replace
- if pre and prv then -- must pair with start.prev
- -- this one happens in most cases
- local before = copy_node(prv)
- pre.prev = before
- before.next = pre
- before.prev = nil
- pre = do_process(before,attr)
- pre = pre.next
- pre.prev = nil
- disc.pre = pre
- free_node(before)
- end
- if post and nxt then -- must pair with start
- local after = copy_node(nxt)
- local tail = find_node_tail(post)
- tail.next = after
- after.prev = tail
- after.next = nil
- post = do_process(post,attr)
- tail.next = nil
- disc.post = post
- free_node(after)
- end
- if replace and prv and nxt then -- must pair with start and start.prev
- local before = copy_node(prv)
- local after = copy_node(nxt)
- local tail = find_node_tail(replace)
- replace.prev = before
- before.next = replace
- before.prev = nil
- tail.next = after
- after.prev = tail
- after.next = nil
- replace = do_process(before,attr)
- replace = replace.next
- replace.prev = nil
- after.prev.next = nil
- disc.replace = replace
- free_node(after)
- free_node(before)
- elseif prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- else
- krn = quaddata[lastfont]*krn -- here
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- end
- else
- -- this one happens in most cases: automatic (-), explicit (\-), regular (patterns)
- if prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- else
- krn = quaddata[lastfont]*krn -- here
- end
- insert_node_before(head,start,kern_injector(fillup,krn))
+ end
+ local char = getchar(start)
+ local font = getfont(start)
+ local mark = markdata[font]
+ if not bound then
+ -- yet
+ elseif mark[char] then
+ -- skip
+ elseif previd == kern_code then
+ if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ local inject = true
+ if keeptogether then
+ if previd == glyph_code and keeptogether(prev,start) then
+ inject = false
end
end
+ if inject then
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ setfield(prev,"subtype",userkern_code)
+ setfield(prev,"kern",getfield(prev,"kern") + quaddata[font]*krn)
+ done = true
+ end
end
- elseif id == glue_code then
- local subtype = start.subtype
- if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
- local s = start.spec
- local w = s.width
- if w > 0 then
- local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
- start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
+ elseif previd == glyph_code then
+ if prevfont == font then
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ else
+ local kerns = chardata[font][prevchar].kerns
+ local kern = (kerns and kerns[char] or 0) + quaddata[font]*krn
+ insert_node_before(head,start,kern_injector(fillup,kern))
done = true
end
+ else
+ insert_node_before(head,start,kern_injector(fillup,quaddata[font]*krn))
+ done = true
+ end
+ end
+ prev = start
+ prevchar = char
+ prevfont = font
+ prevmark = mark
+ previd = id
+ bound = true
+ elseif id == disc_code then
+ local prev, next, pglyph, nglyph -- delayed till needed
+ local subtype = getsubtype(start)
+ if subtype == automatic_code then
+ -- this is kind of special, as we have already injected the
+ -- previous kern
+ local prev = getprev(start)
+ local pglyph = prev and getid(prev) == glyph_code
+ languages.expand(start,pglyph and prev)
+ -- we can have a different start now
+ elseif subtype ~= discretionary_code then
+ prev = getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ languages.expand(start,pglyph and prev)
+ end
+ local pre = getfield(start,"pre")
+ local post = getfield(start,"post")
+ local replace = getfield(start,"replace")
+ -- we really need to reasign the fields as luatex keeps track of
+ -- the tail in a temp preceding head .. kind of messy so we might
+ -- want to come up with a better solution some day like a real
+ -- pretail etc fields in a disc node
+ --
+ -- maybe i'll merge the now split functions
+ if pre then
+ local okay = false
+ if not prev then
+ prev = prev or getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ end
+ if pglyph then
+ pre, okay = inject_begin(pre,prev,keeptogether,krn,okay)
+ end
+ pre, okay = process_list(pre,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"pre",pre)
+ done = true
+ end
+ end
+ if post then
+ local okay = false
+ if not next then
+ next = getnext(start)
+ nglyph = next and getid(next) == glyph_code
+ end
+ if nglyph then
+ post, okay = inject_end(post,next,keeptogether,krn,okay)
end
- elseif id == kern_code then
- -- if start.subtype == kerning_code then -- handle with glyphs
- -- local sk = start.kern
- -- if sk > 0 then
- -- start.kern = sk*krn
- -- done = true
- -- end
- -- end
- elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
- local p = start.prev
- if p and p.id ~= glue_code then
- insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ post, okay = process_list(post,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"post",post)
done = true
end
- local n = start.next
- if n and n.id ~= glue_code then
- insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ end
+ if replace then
+ local okay = false
+ if not prev then
+ prev = prev or getprev(start)
+ pglyph = prev and getid(prev) == glyph_code
+ end
+ if pglyph then
+ replace, okay = inject_begin(replace,prev,keeptogether,krn,okay)
+ end
+ if not next then
+ next = getnext(start)
+ nglyph = next and getid(next) == glyph_code
+ end
+ if nglyph then
+ replace, okay = inject_end(replace,next,keeptogether,krn,okay)
+ end
+ replace, okay = process_list(replace,keeptogether,krn,false,okay)
+ if okay then
+ setfield(start,"replace",replace)
done = true
end
- elseif id == math_code then
- start = end_of_math(start)
+ elseif prevfont then
+ setfield(start,"replace",new_kern(quaddata[prevfont]*krn))
+ done = true
end
+ bound = false
+ elseif id == kern_code then
+ bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ prev = start
+ previd = id
+ elseif id == glue_code then
+ local subtype = getsubtype(start)
+ if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
+ local s = getfield(start,"spec")
+ local w = getfield(s,"width")
+ if w > 0 then
+ local width = w+gluefactor*w*krn
+ local stretch = getfield(s,"stretch")
+ local shrink = getfield(s,"shrink")
+ setfield(start,"spec",spec_injector(fillup,width,stretch*width/w,shrink*width/w))
+ done = true
+ end
+ end
+ bound = false
+ elseif id == hlist_code or id == vlist_code then
+ local subtype = getsubtype(start)
+ if subtype == user_list_code or subtype == box_list_code then
+ -- special case
+ local b, f = closest_bound(start,getprev)
+ if b then
+ insert_node_before(head,start,kern_injector(fillup,quaddata[f]*krn))
+ done = true
+ end
+ local b, f = closest_bound(start,getnext)
+ if b then
+ insert_node_after(head,start,kern_injector(fillup,quaddata[f]*krn))
+ done = true
+ end
+ end
+ bound = false
+ elseif id == math_code then
+ start = end_of_math(start)
+ bound = false
end
- end
- if start then
- start = start.next
+ if start then
+ start = getnext(start)
+ end
+ elseif id == kern_code then
+ bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ prev = start
+ previd = id
+ start = getnext(start)
+ else
+ bound = false
+ start = getnext(start)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -413,10 +620,11 @@ function kerns.set(factor)
return factor
end
-function kerns.handler(head)
- return do_process(head) -- no direct map, because else fourth argument is tail == true
-end
-
-- interface
-commands.setcharacterkerning = kerns.set
+interfaces.implement {
+ name = "setcharacterkerning",
+ actions = kerns.set,
+ arguments = "string"
+}
+
diff --git a/tex/context/base/typo-krn.mkiv b/tex/context/base/typo-krn.mkiv
index 3522c02fc..6d6126542 100644
--- a/tex/context/base/typo-krn.mkiv
+++ b/tex/context/base/typo-krn.mkiv
@@ -35,13 +35,10 @@
\typo_kerning_set
\fi}
-% \def\typo_kerning_set
-% {\ctxcommand{setcharacterkerning(\characterkerningparameter\c!factor)}}
-
\def\typo_kerning_set
{\usecharacterkerningstyleandcolor\c!style\c!color % goodie, maybe also strut
\useaddfontfeatureparameter\characterkerningparameter
- \ctxcommand{setcharacterkerning("\characterkerningparameter\c!factor")}}
+ \clf_setcharacterkerning{\characterkerningparameter\c!factor}}
\unexpanded\def\resetcharacterkerning % fast one
{\attribute\kernattribute\attributeunsetvalue}
@@ -70,7 +67,7 @@
% \definecharacterkerning [\v!letterspacing ] [\v!kerncharacters] [\c!features=letterspacing]
%
% \unexpanded\def\kerncharacters
-% {\doifnextoptionalelse\typo_kerning_apply_yes\typo_kerning_apply_nop}
+% {\doifnextoptionalcselse\typo_kerning_apply_yes\typo_kerning_apply_nop}
%
% \def\typo_kerning_apply_yes[#1]%
% {\groupedcommand{\typo_kerning_apply_yes_indeed{#1}}\donothing}
@@ -89,9 +86,9 @@
\appendtoks
\setuevalue{\currentcharacterkerning}%
- {\doifnextoptionalelse
+ {\doifelsenextoptional
{\typo_kerning_apply_yes{\currentcharacterkerning}}%
- {\typo_kerning_apply_nop{\currentcharacterkerning}}}
+ {\typo_kerning_apply_nop{\currentcharacterkerning}}}%
\to \everydefinecharacterkerning
\unexpanded\def\typo_kerning_apply_yes#1[#2]%
diff --git a/tex/context/base/typo-lan.lua b/tex/context/base/typo-lan.lua
index 50927f744..c42d2119e 100644
--- a/tex/context/base/typo-lan.lua
+++ b/tex/context/base/typo-lan.lua
@@ -67,6 +67,8 @@ function frequencies.averagecharwidth(language,font)
return frequencycache[language or "en"][font or currentfont()]
end
-function commands.averagecharwidth(language,font)
- context(frequencycache[language or "en"][font or currentfont()])
-end
+interfaces.implement {
+ name = "averagecharwidth",
+ actions = { frequencies.averagecharwidth, context },
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-lan.mkiv b/tex/context/base/typo-lan.mkiv
index bb4ed2042..8b633957b 100644
--- a/tex/context/base/typo-lan.mkiv
+++ b/tex/context/base/typo-lan.mkiv
@@ -30,7 +30,7 @@
\def\charwidthlanguage{\currentmainlanguage}
-\def\averagecharwidth{\dimexpr\ctxcommand{averagecharwidth("\charwidthlanguage")}\scaledpoint\relax}
+\def\averagecharwidth{\dimexpr\clf_averagecharwidth{\charwidthlanguage}\scaledpoint\relax}
\protect
diff --git a/tex/context/base/typo-lig.mkiv b/tex/context/base/typo-lig.mkiv
new file mode 100644
index 000000000..6171441d6
--- /dev/null
+++ b/tex/context/base/typo-lig.mkiv
@@ -0,0 +1,31 @@
+%D \module
+%D [ file=typo-lig,
+%D version=2014.12.01,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Ligatures,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Ligatures}
+
+%D This macro is mostly used for testing an attribute that blocks ligatures
+%D in the nodemode handler. It is not really used in practice, and mostly
+%D there as a plug in the generic font handler.
+
+\unprotect
+
+\definesystemattribute[noligature][public]
+
+\unexpanded\def\noligature#1%
+ {\dontleavehmode
+ \begingroup
+ \attribute\noligatureattribute\plusone
+ #1%
+ \endgroup}
+
+\protect \endinput
diff --git a/tex/context/base/typo-man.lua b/tex/context/base/typo-man.lua
new file mode 100644
index 000000000..6c6d7926f
--- /dev/null
+++ b/tex/context/base/typo-man.lua
@@ -0,0 +1,113 @@
+if not modules then modules = { } end modules ['typo-man'] = {
+ version = 1.001,
+ comment = "companion to typo-prc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ -- for testing stand-alone
+ require("char-def")
+ require("char-ini")
+end
+
+local lpegmatch = lpeg.match
+local P, R, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
+local global = global or _G
+
+local methods = {
+ uppercase = characters.upper,
+ lowercase = characters.lower,
+ Word = converters.Word,
+ Words = converters.Words,
+}
+
+local function nothing(s) return s end -- we already have that one somewhere
+
+-- table.setmetatableindex(methods,function(t,k)
+-- t[k] = nothing
+-- return nothing
+-- end)
+
+local splitter = lpeg.tsplitat(".")
+
+table.setmetatableindex(methods,function(t,k)
+ local s = lpegmatch(splitter,k)
+ local v = global
+ for i=1,#s do
+ v = v[s[i]]
+ if not v then
+ break
+ end
+ end
+ if not v or v == global then
+ v = nothing
+ end
+ t[k] = v
+ return v
+end)
+
+local whitespace = lpeg.patterns.whitespace^0
+local separator = whitespace * P("->") * whitespace
+local pair = C((1-separator)^1) * separator * C(P(1)^0)
+local list = Ct((C((1-separator)^1) * separator)^1) * C(P(1)^0)
+
+local pattern = Carg(1) * pair / function(methods,operation,str)
+ return methods[operation](str) or str
+end
+
+local function apply(str,m)
+ return lpegmatch(pattern,str,1,m or methods) or str
+end
+
+local function splitspecification(field,m)
+ local m, f = lpegmatch(list,field,1,m or methods)
+ if m then
+ return m, f or field
+ else
+ return nil, field
+ end
+end
+
+local function applyspecification(actions,str)
+ if actions then
+ for i=1,#actions do
+ local action = methods[actions[i]]
+ if action then
+ str = action(str) or str
+ end
+ end
+ end
+ return str
+end
+
+if not typesetters then typesetters = { } end
+
+typesetters.manipulators = {
+ methods = methods,
+ apply = apply,
+ patterns = {
+ pair = pair,
+ list = list,
+ },
+ splitspecification = splitspecification,
+ applyspecification = applyspecification,
+}
+
+local pattern = Cs((1 - P(1) * P(-1))^0 * (P(".")/"" + P(1)))
+
+methods.stripperiod = function(str) return lpegmatch(pattern,str) end
+
+-- print(apply("hans"))
+-- print(apply("uppercase->hans"))
+-- print(apply("string.reverse -> hans"))
+-- print(apply("uppercase->hans",{ uppercase = string.reverse } ))
+
+-- print(applyspecification(splitspecification("hans")))
+-- print(applyspecification(splitspecification("lowercase->uppercase->hans")))
+-- print(applyspecification(splitspecification("uppercase->stripperiod->hans.")))
+
+function commands.manipulated(str)
+ context(apply(str))
+end
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 85d5c85a8..fed9e0745 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -76,6 +76,8 @@ if not modules then modules = { } end modules ['typo-mar'] = {
local format, validstring = string.format, string.valid
local insert, remove = table.insert, table.remove
local setmetatable, next = setmetatable, next
+local formatters = string.formatters
+local toboolean = toboolean
local attributes, nodes, node, variables = attributes, nodes, node, variables
@@ -114,14 +116,32 @@ local v_continue = variables.continue
local v_first = variables.first
local v_text = variables.text
local v_column = variables.column
-
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
-local traverse_id = node.traverse_id
-local free_node_list = node.flush_list
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
+local v_line = variables.line
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local copy_node_list = nuts.copy_list
+local hpack_nodes = nuts.hpack -- nodes.fasthpack not really faster here
+local traverse_id = nuts.traverse_id
+local free_node_list = nuts.flush_list
+local insert_node_after = nuts.insert_after
+local insert_node_before = nuts.insert_before
+local linked_nodes = nuts.linked
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getsubtype = nuts.getsubtype
+local getbox = nuts.getbox
+local getlist = nuts.getlist
local nodecodes = nodes.nodecodes
local listcodes = nodes.listcodes
@@ -144,33 +164,31 @@ local userdefined_code = whatsitcodes.userdefined
local dir_code = whatsitcodes.dir
local localpar_code = whatsitcodes.localpar
-local nodepool = nodes.pool
+local nodepool = nuts.pool
local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-local new_stretch = nodepool.stretch
local new_usernumber = nodepool.usernumber
local new_latelua = nodepool.latelua
+local lateluafunction = nodepool.lateluafunction
+
local texgetcount = tex.getcount
local texgetdimen = tex.getdimen
-local texgetbox = tex.getbox
local texget = tex.get
-local points = number.points
-
local isleftpage = layouts.status.isleftpage
-local registertogether = builders.paragraphs.registertogether
-
-local jobpositions = job.positions
-local getposition = jobpositions.position
+local registertogether = builders.paragraphs.registertogether -- tonode
local a_margindata = attributes.private("margindata")
local inline_mark = nodepool.userids["margins.inline"]
-local margins = { }
+local jobpositions = job.positions
+local getposition = jobpositions.get
+local setposition = jobpositions.set
+local getreserved = jobpositions.getreserved
+
+local margins = { }
typesetters.margins = margins
local locations = { v_left, v_right, v_inner, v_outer } -- order might change
@@ -233,7 +251,7 @@ local function showstore(store,banner,location)
if next(store) then
for i, si in table.sortedpairs(store) do
local si =store[i]
- report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(getlist(si.box)))
end
else
report_margindata("%s: nothing stored in location %a",banner,location)
@@ -242,7 +260,7 @@ end
function margins.save(t)
setmetatable(t,defaults)
- local content = texgetbox(t.number)
+ local content = getbox(t.number)
local location = t.location
local category = t.category
local inline = t.inline
@@ -310,11 +328,11 @@ function margins.save(t)
-- nice is to make a special status table mechanism
local leftmargindistance = texgetdimen("naturalleftmargindistance")
local rightmargindistance = texgetdimen("naturalrightmargindistance")
- local strutbox = texgetbox("strutbox")
- t.strutdepth = strutbox.depth
- t.strutheight = strutbox.height
- t.leftskip = texget("leftskip").width -- we're not in forgetall
- t.rightskip = texget("rightskip").width -- we're not in forgetall
+ local strutbox = getbox("strutbox")
+ t.strutdepth = getfield(strutbox,"depth")
+ t.strutheight = getfield(strutbox,"height")
+ t.leftskip = getfield(texget("leftskip"),"width") -- we're not in forgetall
+ t.rightskip = getfield(texget("rightskip"),"width") -- we're not in forgetall
t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
t.rightmargindistance = rightmargindistance
t.leftedgedistance = texgetdimen("naturalleftedgedistance")
@@ -327,7 +345,7 @@ function margins.save(t)
--
-- t.realpageno = texgetcount("realpageno")
if inline then
- context(new_usernumber(inline_mark,nofsaved))
+ context(tonode(new_usernumber(inline_mark,nofsaved))) -- or use a normal node
store[nofsaved] = t -- no insert
nofinlined = nofinlined + 1
else
@@ -352,6 +370,18 @@ end
local status, nofstatus = { }, 0
+local f_anchor = formatters["_plib_.set('md:h',%i,{x=true,c=true})"]
+
+local function setanchor(h_anchor)
+ return new_latelua(f_anchor(h_anchor))
+end
+
+-- local t_anchor = { x = true, c = true }
+--
+-- local function setanchor(h_anchor)
+-- return lateluafunction(function() setposition("md:h",h_anchor,t_anchor) end)
+-- end
+
local function realign(current,candidate)
local location = candidate.location
local margin = candidate.margin
@@ -404,7 +434,7 @@ local function realign(current,candidate)
-- we assume that list is a hbox, otherwise we had to take the whole current
-- in order to get it right
- current.width = 0
+ setfield(current,"width",0)
local anchornode, move_x
-- this mess is needed for alignments (combinations) so we use that
@@ -418,12 +448,12 @@ local function realign(current,candidate)
anchor = v_text
end
if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
- -- the alignment_code check catches margintexts ste before a tabulate
+ -- the alignment_code check catches margintexts before a tabulate
h_anchors = h_anchors + 1
- anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors))
- local blob = jobpositions.get('md:h', h_anchors)
+ anchornode = setanchor(h_anchors)
+ local blob = getposition('md:h',h_anchors)
if blob then
- local reference = jobpositions.getreserved(anchor,blob.c)
+ local reference = getreserved(anchor,blob.c)
if reference then
if location == v_left then
move_x = (reference.x or 0) - (blob.x or 0)
@@ -446,9 +476,9 @@ local function realign(current,candidate)
report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
end
end
-
- current.list = hpack_nodes(anchornode .. new_kern(-delta) .. current.list .. new_kern(delta))
- current.width = 0
+ local list = hpack_nodes(linked_nodes(anchornode,new_kern(-delta),getlist(current),new_kern(delta)))
+ setfield(current,"list",list)
+ setfield(current,"width",0)
end
local function realigned(current,a)
@@ -478,24 +508,36 @@ end
-- resetstacked()
-function margins.ha(tag) -- maybe l/r keys ipv left/right keys
+local function ha(tag) -- maybe l/r keys ipv left/right keys
local p = cache[tag]
p.p = true
p.y = true
- jobpositions.set('md:v',tag,p)
+ setposition('md:v',tag,p)
cache[tag] = nil
end
-local function markovershoot(current)
+margins.ha = ha
+
+local f_anchor = formatters["typesetters.margins.ha(%s)"]
+local function setanchor(v_anchor)
+ return new_latelua(f_anchor(v_anchor))
+end
+
+-- local function setanchor(v_anchor) -- freezes the global here
+-- return lateluafunction(function() ha(v_anchor) end)
+-- end
+
+local function markovershoot(current) -- todo: alleen als offset > line
v_anchors = v_anchors + 1
cache[v_anchors] = stacked
- local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- current.list = hpack_nodes(anchor .. current.list)
+ local anchor = setanchor(v_anchors)
+ local list = hpack_nodes(linked_nodes(anchor,getlist(current)))
+ setfield(current,"list",list)
end
local function getovershoot(location)
- local p = jobpositions.get("md:v",v_anchors)
- local c = jobpositions.get("md:v",v_anchors+1)
+ local p = getposition("md:v",v_anchors)
+ local c = getposition("md:v",v_anchors+1)
if p and c and p.p and p.p == c.p then
local distance = p.y - c.y
local offset = p[location] or 0
@@ -512,10 +554,13 @@ end
local function inject(parent,head,candidate)
local box = candidate.box
- local width = box.width
- local height = box.height
- local depth = box.depth
- local shift = box.shift
+ if not box then
+ return head, nil, false -- we can have empty texts
+ end
+ local width = getfield(box,"width")
+ local height = getfield(box,"height")
+ local depth = getfield(box,"depth")
+ local shift = getfield(box,"shift")
local stack = candidate.stack
local location = candidate.location
local method = candidate.method
@@ -524,13 +569,18 @@ local function inject(parent,head,candidate)
local baseline = candidate.baseline
local strutheight = candidate.strutheight
local strutdepth = candidate.strutdepth
- local psubtype = parent.subtype
+ local psubtype = getsubtype(parent)
local offset = stacked[location]
local firstonstack = offset == false or offset == nil
nofstatus = nofstatus + 1
nofdelayed = nofdelayed + 1
status[nofstatus] = candidate
-- yet untested
+ baseline = tonumber(baseline)
+ if not baseline then
+ baseline = toboolean(baseline)
+ end
+ --
if baseline == true then
baseline = false
-- hbox vtop
@@ -546,7 +596,7 @@ local function inject(parent,head,candidate)
end
end
candidate.width = width
- candidate.hsize = parent.width -- we can also pass textwidth
+ candidate.hsize = getfield(parent,"width") -- we can also pass textwidth
candidate.psubtype = psubtype
if trace_margindata then
report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
@@ -557,10 +607,10 @@ local function inject(parent,head,candidate)
-- offset = offset + height
end
if stack == v_yes then
- offset = offset + candidate.dy
+ offset = offset + candidate.dy -- always
shift = shift + offset
elseif stack == v_continue then
- offset = offset + candidate.dy
+ offset = offset + candidate.dy -- always
if firstonstack then
offset = offset + getovershoot(location)
end
@@ -573,13 +623,23 @@ local function inject(parent,head,candidate)
-- experimental.
-- -- --
if method == v_top then
- local delta = height - parent.height
+ local delta = height - getfield(parent,"height")
if trace_margindata then
report_margindata("top aligned by %p",delta)
end
- if delta < candidate.threshold then
+ if delta < candidate.threshold then -- often we need a negative threshold here
shift = shift + voffset + delta
end
+ elseif method == v_line then
+ if getfield(parent,"depth") == 0 then
+ local delta = height - getfield(parent,"height")
+ if trace_margindata then
+ report_margindata("top aligned by %p (no depth)",delta)
+ end
+ if delta < candidate.threshold then -- often we need a negative threshold here
+ shift = shift + voffset + delta
+ end
+ end
elseif method == v_first then
if baseline then
shift = shift + voffset + height - baseline -- option
@@ -616,22 +676,23 @@ local function inject(parent,head,candidate)
shift = shift + delta
offset = offset + delta
end
- box.shift = shift
- box.width = 0
+ setfield(box,"shift",shift)
+ setfield(box,"width",0)
if not head then
head = box
- elseif head.id == whatsit_code and head.subtype == localpar_code then
+ elseif getid(head) == whatsit_code and getsubtype(head) == localpar_code then
-- experimental
- if head.dir == "TRT" then
- box.list = hpack_nodes(new_kern(candidate.hsize) .. box.list .. new_kern(-candidate.hsize))
+ if getfield(head,"dir") == "TRT" then
+ local list = hpack_nodes(linked_nodes(new_kern(candidate.hsize),getlist(box),new_kern(-candidate.hsize)))
+ setfield(box,"list",list)
end
insert_node_after(head,head,box)
else
- head.prev = box
- box.next = head
+ setfield(head,"prev",box)
+ setfield(box,"next",head)
head = box
end
- box[a_margindata] = nofstatus
+ setattr(box,a_margindata,nofstatus)
if trace_margindata then
report_margindata("injected, location %a, shift %p",location,shift)
end
@@ -656,12 +717,12 @@ local function flushinline(parent,head)
local current = head
local done = false
local continue = false
- local room, don, con
+ local room, don, con, list
while current and nofinlined > 0 do
- local id = current.id
+ local id = getid(current)
if id == whatsit_code then
- if current.subtype == userdefined_code and current.user_id == inline_mark then
- local n = current.value
+ if getsubtype(current) == userdefined_code and getfield(current,"user_id") == inline_mark then
+ local n = getfield(current,"value")
local candidate = inlinestore[n]
if candidate then -- no vpack, as we want to realign
inlinestore[n] = nil
@@ -674,11 +735,12 @@ local function flushinline(parent,head)
end
elseif id == hlist_code or id == vlist_code then
-- optional (but sometimes needed)
- current.list, don, con = flushinline(current,current.list)
+ list, don, con = flushinline(current,getlist(current))
+ setfield(current,"list",list)
continue = continue or con
done = done or don
end
- current = current.next
+ current = getnext(current)
end
return head, done, continue
end
@@ -686,7 +748,7 @@ end
local a_linenumber = attributes.private('linenumber')
local function flushed(scope,parent) -- current is hlist
- local head = parent.list
+ local head = getlist(parent)
local done = false
local continue = false
local room, con, don
@@ -695,33 +757,40 @@ local function flushed(scope,parent) -- current is hlist
for l=1,#locations do
local location = locations[l]
local store = displaystore[category][location][scope]
- while true do
- local candidate = remove(store,1) -- brr, local stores are sparse
- if candidate then -- no vpack, as we want to realign
- head, room, con = inject(parent,head,candidate)
- done = true
- continue = continue or con
- nofstored = nofstored - 1
- registertogether(parent,room)
- else
- break
+ if store then
+ while true do
+ local candidate = remove(store,1) -- brr, local stores are sparse
+ if candidate then -- no vpack, as we want to realign
+ head, room, con = inject(parent,head,candidate)
+ done = true
+ continue = continue or con
+ nofstored = nofstored - 1
+ if room then
+ registertogether(tonode(parent),room) -- !! tonode
+ end
+ else
+ break
+ end
end
+ else
+ -- report_margindata("fatal error: invalid category %a",category or "?")
end
end
end
if nofinlined > 0 then
if done then
- parent.list = head
+ setfield(parent,"list",head)
end
head, don, con = flushinline(parent,head)
continue = continue or con
done = done or don
end
if done then
- local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
- parent.list = hpack_nodes(head,parent.width,"exactly")
+ local a = getattr(head,a_linenumber) -- hack .. we need a more decent critical attribute inheritance mechanism
+ local l = hpack_nodes(head,getfield(parent,"width"),"exactly")
+ setfield(parent,"list",l)
if a then
- parent.list[a_linenumber] = a
+ setattr(l,a_linenumber,a)
end
-- resetstacked()
end
@@ -736,14 +805,15 @@ local function handler(scope,head,group)
if trace_margindata then
report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
end
+ head = tonut(head)
local current = head
local done = false
while current do
- local id = current.id
- if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
+ local id = getid(current)
+ if (id == vlist_code or id == hlist_code) and not getattr(current,a_margindata) then
local don, continue = flushed(scope,current)
if don then
- current[a_margindata] = 0 -- signal to prevent duplicate processing
+ setattr(current,a_margindata,0) -- signal to prevent duplicate processing
if continue then
markovershoot(current)
end
@@ -753,12 +823,12 @@ local function handler(scope,head,group)
done = true
end
end
- current = current.next
+ current = getnext(current)
end
-- if done then
resetstacked() -- why doesn't done work ok here?
-- end
- return head, done
+ return tonode(head), done
else
return head, false
end
@@ -789,15 +859,15 @@ function margins.globalhandler(head,group) -- check group
end
return head, false
elseif group == "hmode_par" then
- return handler("global",head,group)
+ return handler(v_global,head,group)
elseif group == "vmode_par" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
-- this needs checking as we then get quite some one liners to process and
-- we cannot look ahead then:
elseif group == "box" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
elseif group == "alignment" then -- experiment (for alignments)
- return handler("global",head,group)
+ return handler(v_global,head,group)
else
if trace_margingroup then
report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
@@ -811,11 +881,11 @@ local function finalhandler(head)
local current = head
local done = false
while current do
- local id = current.id
+ local id = getid(current)
if id == hlist_code then
- local a = current[a_margindata]
+ local a = getattr(current,a_margindata)
if not a or a == 0 then
- finalhandler(current.list)
+ finalhandler(getlist(current))
elseif realigned(current,a) then
done = true
if nofdelayed == 0 then
@@ -823,9 +893,9 @@ local function finalhandler(head)
end
end
elseif id == vlist_code then
- finalhandler(current.list)
+ finalhandler(getlist(current))
end
- current = current.next
+ current = getnext(current)
end
return head, done
else
@@ -838,7 +908,10 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
- return finalhandler(head)
+ head = tonut(head)
+ local head, done = finalhandler(head)
+ head = tonode(head)
+ return head, done
else
return head, false
end
@@ -877,3 +950,33 @@ statistics.register("margin data", function()
return nil
end
end)
+
+interfaces.implement {
+ name = "savemargindata",
+ actions = margins.save,
+ arguments = {
+ {
+ { "location" },
+ { "method" },
+ { "category" },
+ { "name" },
+ { "scope" },
+ { "number", "integer" },
+ { "margin" },
+ { "distance", "dimen" },
+ { "hoffset", "dimen" },
+ { "voffset", "dimen" },
+ { "dy", "dimen" },
+ { "bottomspace", "dimen" },
+ { "baseline"}, -- dimen or string or
+ { "threshold", "dimen" },
+ { "inline", "boolean" },
+ { "anchor" },
+ -- { "leftskip", "dimen" },
+ -- { "rightskip", "dimen" },
+ { "align" },
+ { "line", "integer" },
+ { "stack" },
+ }
+ }
+}
diff --git a/tex/context/base/typo-mar.mkiv b/tex/context/base/typo-mar.mkiv
index 595cf3756..d5869b459 100644
--- a/tex/context/base/typo-mar.mkiv
+++ b/tex/context/base/typo-mar.mkiv
@@ -14,6 +14,8 @@
%C details.
% todo: tags
+% todo: force inline with option (saves pos)
+% todo: margintitle (also less position then)
\writestatus{loading}{ConTeXt Typesetting Macros / Margindata}
@@ -113,7 +115,7 @@
\c!color=, % maybe \maintextcolor
% \c!name=,
% \c!category=,
- \c!threshold=.25ex,
+ \c!threshold=.25\exheight,
\c!margin=\v!normal,
\c!scope=\v!global,
\c!width=,
@@ -163,7 +165,7 @@
\unexpanded\def\typo_margins_data_synchronize
{\doforcedtrackpagestate\s!margintext\nofmargintexts % includes increment
\docheckpagestate\s!margintext\nofmargintexts
- %\doifrightpagestateelse\s!margintext\nofmargintexts\relax\relax
+ %\doifelserightpagestate\s!margintext\nofmargintexts\relax\relax
\realpageno\realpagestateno
\swapmargins}
@@ -199,13 +201,13 @@
\strc_references_set_page_only_destination_box_attribute\currentmarginreference\currentmarginreference
\fi
\edef\currentmargindatastrut{\margindataparameter\c!strut}%
- \dostarttagged\t!margintext\currentmargindata
+ \dostarttaggedchained\t!margintext\currentmargindata\??margindata
\ifcsname\currentmarginframedhash\s!parent\endcsname
\setbox\nextbox\hbox \currentmarginreference \bgroup
\the\everymargindatacontent
\usemargindatastyleandcolor\c!style\c!color
\setupcurrentmarginframed[\c!location=\v!normal,#textparameters]%
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\inheritedmarginframedframed\bgroup
\ifx\currentmargindatastrut\empty \else
\synchronizestrut\currentmargindatastrut
@@ -221,7 +223,7 @@
\edef\currentmargindatawidth{\margindataparameter\c!width}%
\ifx\currentmargindatawidth\empty
\setbox\nextbox\hbox \currentmarginreference \bgroup
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\the\everymargindatacontent
\usemargindatastyleandcolor\c!style\c!color
\ifx\currentmargindatastrut\empty \else
@@ -235,7 +237,7 @@
\let\currentmarginfirstheight\empty
\else
\setbox\nextbox\hbox \currentmarginreference \bgroup
-\typo_margins_data_synchronize
+ \typo_margins_data_synchronize
\dosetraggedcommand{\margindataparameter\c!align}%
\vtop \bgroup
\the\everymargindatacontent
@@ -258,49 +260,49 @@
\fi
\ifdone
\anch_positions_initialize % we use positions at the lua end
- \ctxlua{typesetters.margins.save{
- location = "\margindataparameter\c!location",
- method = "\margindataparameter\c!method",
- category = "\margindataparameter\c!category",
- name = "\margindataparameter\c!name",
- margin = "\margindataparameter\c!margin", % local normal margin edge
- distance = \number\dimexpr\margindataparameter\c!distance,
- hoffset = \number\dimexpr\margindataparameter\c!hoffset,
- voffset = \number\dimexpr\margindataparameter\c!voffset,
- dy = \number\dimexpr\margindataparameter\c!dy,
- bottomspace = \number\dimexpr\margindataparameter\c!bottomspace,
+ \clf_savemargindata
+ location {\margindataparameter\c!location}%
+ method {\margindataparameter\c!method}%
+ category {\margindataparameter\c!category}%
+ name {\margindataparameter\c!name}%
+ scope {\margindataparameter\c!scope}%
+ number \nextbox
+ margin {\margindataparameter\c!margin}% local normal margin edge
+ distance \dimexpr\margindataparameter\c!distance\relax
+ hoffset \dimexpr\margindataparameter\c!hoffset\relax
+ voffset \dimexpr\margindataparameter\c!voffset\relax
+ dy \dimexpr\margindataparameter\c!dy\relax
+ bottomspace \dimexpr\margindataparameter\c!bottomspace\relax
\ifx\currentmarginfirstheight\empty \else
- baseline = \currentmarginfirstheight,
+ baseline {\currentmarginfirstheight}%
\fi
- threshold = \number\dimexpr\margindataparameter\c!threshold, % overlap related, will change
+ threshold \dimexpr\margindataparameter\c!threshold\relax % overlap related, will change
\ifhmode
- inline = true,
+ inline true %
\fi
- anchor = "\margindataparameter\c!anchor",
+ anchor {\margindataparameter\c!anchor}%
%
% we're not in forgetall
%
% \ifzeropt\leftskip \else
- % leftskip = \number\leftskip,
+ % leftskip \dimexpr\leftskip\relax
% \fi
% \ifzeropt\leftskip \else
- % rightskip = \number\rightskip,
+ % rightskip \dimexpr\rightskip\relax
% \fi
- scope = "\margindataparameter\c!scope",
- align = "\margindataparameter\c!align",
- line = \number\margindataparameter\c!line,
- stack = "\margindataparameter\c!stack",
- number = \number\nextbox,
- }}%
+ align {\margindataparameter\c!align}%
+ line \numexpr\margindataparameter\c!line\relax
+ stack {\margindataparameter\c!stack}%
+ \relax
\else
- \ctxlua{typesetters.margins.save{
- location = "\margindataparameter\c!location",
- method = "\margindataparameter\c!method",
- category = "\margindataparameter\c!category",
- name = "\margindataparameter\c!name",
- scope = "\margindataparameter\c!scope",
- number = \number\nextbox,
- }}%
+ \clf_savemargindata
+ location {\margindataparameter\c!location}%
+ method {\margindataparameter\c!method}%
+ category {\margindataparameter\c!category}%
+ name {\margindataparameter\c!name}%
+ scope {\margindataparameter\c!scope}%
+ number \nextbox
+ \relax
\fi
\endgroup}
diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua
index 0dd75ddf9..53f79fcfc 100644
--- a/tex/context/base/typo-pag.lua
+++ b/tex/context/base/typo-pag.lua
@@ -6,6 +6,14 @@ if not modules then modules = { } end modules ['typo-pag'] = {
license = "see context related readme files"
}
+
+builders = builders or { }
+local builders = builders
+
+builders.paragraphs = builders.paragraphs or { }
+local parbuilders = builders.paragraphs
+
+local nodes = nodes
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -14,13 +22,23 @@ local glue_code = nodecodes.glue
local kern_code = nodecodes.kern
local penalty_code = nodecodes.penalty
-local insert_node_after = node.insert_after
-local new_penalty = nodes.pool.penalty
-
local unsetvalue = attributes.unsetvalue
-
local a_keeptogether = attributes.private("keeptogether")
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_after = nuts.insert_after
+local new_penalty = nuts.pool.penalty
+
local trace_keeptogether = false
local report_keeptogether = logs.reporter("parbuilders","keeptogether")
@@ -33,11 +51,14 @@ trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether =
-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
-- when no hlists are there ? ... maybe the local_par
-function builders.paragraphs.registertogether(line,specification) -- might change
+function parbuilders.registertogether(line,specification) -- might change
+ if not specification then
+ return
+ end
if not enabled then
nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
end
- local a = line[a_keeptogether]
+ local a = getattr(line,a_keeptogether)
local c = a and cache[a]
if c then
local height = specification.height
@@ -64,23 +85,21 @@ function builders.paragraphs.registertogether(line,specification) -- might chang
if not specification.slack then
specification.slack = 0
end
- line[a_keeptogether] = last
+ setattr(line,a_keeptogether,last)
end
if trace_keeptogether then
local a = a or last
local c = cache[a]
- if trace_keeptogether then
- local noflines = specification.lineheight
- local height = c.height
- local depth = c.depth
- local slack = c.slack
- if not noflines or noflines == 0 then
- noflines = "unknown"
- else
- noflines = math.round((height + depth - slack) / noflines)
- end
- report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
+ local noflines = specification.lineheight
+ local height = c.height
+ local depth = c.depth
+ local slack = c.slack
+ if not noflines or noflines == 0 then
+ noflines = "unknown"
+ else
+ noflines = math.round((height + depth - slack) / noflines)
end
+ report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
end
end
@@ -88,24 +107,24 @@ local function keeptogether(start,a)
if start then
local specification = cache[a]
if a then
- local current = start.next
+ local current = getnext(start)
local previous = start
- local total = previous.depth
+ local total = getfield(previous,"depth")
local slack = specification.slack
local threshold = specification.depth - slack
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
end
while current do
- local id = current.id
+ local id = getid(current)
if id == vlist_code or id == hlist_code then
- total = total + current.height + current.depth
+ total = total + getfield(current,"height") + getfield(current,"depth")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -114,13 +133,13 @@ local function keeptogether(start,a)
end
elseif id == glue_code then
-- hm, breakpoint, maybe turn this into kern
- total = total + current.spec.width
+ total = total + getfield(getfield(current,"spec"),"width")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -128,13 +147,13 @@ local function keeptogether(start,a)
break
end
elseif id == kern_code then
- total = total + current.kern
+ total = total + getfield(current,"kern")
if trace_keeptogether then
report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
end
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
else
insert_node_after(head,previous,new_penalty(10000))
end
@@ -143,16 +162,16 @@ local function keeptogether(start,a)
end
elseif id == penalty_code then
if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
+ if getid(previous) == penalty_code then
+ setfield(previous,"penalty",10000)
end
- current.penalty = 10000
+ setfield(current,"penalty",10000)
else
break
end
end
previous = current
- current = current.next
+ current = getnext(current)
end
end
end
@@ -160,20 +179,20 @@ end
-- also look at first non glue/kern node e.g for a dropped caps
-function builders.paragraphs.keeptogether(head)
+function parbuilders.keeptogether(head)
local done = false
- local current = head
+ local current = tonut(head)
while current do
- if current.id == hlist_code then
- local a = current[a_keeptogether]
+ if getid(current) == hlist_code then
+ local a = getattr(current,a_keeptogether)
if a and a > 0 then
keeptogether(current,a)
- current[a_keeptogether] = unsetvalue
+ setattr(current,a_keeptogether,unsetvalue)
cache[a] = nil
done = true
end
end
- current = current.next
+ current = getnext(current)
end
return head, done
end
diff --git a/tex/context/base/typo-par.mkiv b/tex/context/base/typo-par.mkiv
new file mode 100644
index 000000000..8572f31b8
--- /dev/null
+++ b/tex/context/base/typo-par.mkiv
@@ -0,0 +1,29 @@
+%D \module
+%D [ file=typo-par,
+%D version=2008.09.30,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Paragraph Building,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Node Macros / Paragraph Building}
+
+%D This is very experimental, undocumented, subjected to changes, etc. just as
+%D the underlying interfaces. But at least it's cleaned as part of the status-mkiv
+%D cleanup.
+
+% \startparbuilder[basic]
+% \input tufte \par
+% \stopparbuilder
+
+\unprotect
+
+\registerctxluafile{node-ltp}{1.001}
+\registerctxluafile{trac-par}{1.001}
+
+\protect \endinput
diff --git a/tex/context/base/typo-prc.lua b/tex/context/base/typo-prc.lua
index a6c27ede6..cde66df00 100644
--- a/tex/context/base/typo-prc.lua
+++ b/tex/context/base/typo-prc.lua
@@ -6,21 +6,25 @@ if not modules then modules = { } end modules ['typo-prc'] = {
license = "see context related readme files"
}
--- moved from strc-ini.lua
-
-local context, commands = context, commands
-local formatters = string.formatters
local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs
-- processors: syntax: processor->data ... not ok yet
-typesetters.processors = typesetters.processors or { }
-local processors = typesetters.processors
+local context = context
+local implement = interfaces.implement
+
+local formatters = string.formatters
+
+typesetters.processors = typesetters.processors or { }
+local processors = typesetters.processors
local trace_processors = false
local report_processors = logs.reporter("processors")
local registered = { }
+local ctx_applyprocessor = context.applyprocessor
+local ctx_firstofoneargument = context.firstofoneargument
+
trackers.register("typesetters.processors", function(v) trace_processors = v end)
function processors.register(p)
@@ -55,7 +59,7 @@ function processors.apply(p,s)
if trace_processors then
report_processors("applying %s processor %a, argument: %s","known",p,s)
end
- context.applyprocessor(p,s)
+ ctx_applyprocessor(p,s)
elseif s then
if trace_processors then
report_processors("applying %s processor %a, argument: %s","unknown",p,s)
@@ -78,21 +82,21 @@ function processors.startapply(p,s)
if trace_processors then
report_processors("start applying %s processor %a","known",p)
end
- context.applyprocessor(p)
+ ctx_applyprocessor(p)
context("{")
return s
elseif p then
if trace_processors then
report_processors("start applying %s processor %a","unknown",p)
end
- context.firstofoneargument()
+ ctx_firstofoneargument()
context("{")
return s
else
if trace_processors then
report_processors("start applying %s processor","ignored")
end
- context.firstofoneargument()
+ ctx_firstofoneargument()
context("{")
return str
end
@@ -121,5 +125,5 @@ end
-- interface
-commands.registerstructureprocessor = processors.register
-commands.resetstructureprocessor = processors.reset
+implement { name = "registerstructureprocessor", actions = processors.register, arguments = "string" }
+implement { name = "resetstructureprocessor", actions = processors.reset, arguments = "string" }
diff --git a/tex/context/base/typo-prc.mkvi b/tex/context/base/typo-prc.mkvi
index de221f241..49a165696 100644
--- a/tex/context/base/typo-prc.mkvi
+++ b/tex/context/base/typo-prc.mkvi
@@ -13,7 +13,11 @@
\writestatus{loading}{ConTeXt Typesetting Macros / Processors}
+%D For the moment manipulators are loaded here too, as they're in the same
+%D category as processors. This might change. (They are used in publications.)
+
\registerctxluafile{typo-prc}{1.001}
+\registerctxluafile{typo-man}{1.001}
\unprotect
@@ -51,7 +55,7 @@
\appendtoks
\letvalue{\??processorcheck\currentprocessor}\relax
- \ctxcommand{registerstructureprocessor("\currentprocessor")}% global, but it permits using processor that are yet undefined
+ \clf_registerstructureprocessor{\currentprocessor}% global, but it permits using processor that are yet undefined
\to \everydefineprocessor
%D The following command can be used by users but normally it will be
@@ -68,9 +72,12 @@
\fi}
\def\typo_processor_apply
- {\doifelse{\processorparameter\c!state}\v!stop
- \firstofoneargument
- \typo_processor_apply_indeed}
+ {\edef\p_state{\processorparameter\c!state}%
+ \ifx\p_state\v!stop
+ \expandafter\firstofoneargument
+ \else
+ \expandafter\typo_processor_apply_indeed
+ \fi}
\def\typo_processor_apply_indeed#content%
{\begingroup
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 01868f490..d95eff68e 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -10,31 +10,43 @@ if not modules then modules = { } end modules ['typo-rep'] = {
-- endure it by listening to a couple cd's by The Scene and The Lau
-- on the squeezebox on my desk.
+local next, type, tonumber = next, type, tonumber
+
local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
trackers.register("fonts.stripping", function(v) trace_stripping = v end)
local report_stripping = logs.reporter("fonts","stripping")
-local nodes, node = nodes, node
+local nodes = nodes
+local tasks = nodes.tasks
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getchar = nuts.getchar
+local getid = nuts.getid
-local delete_node = nodes.delete
-local replace_node = nodes.replace
-local copy_node = node.copy
+local getattr = nuts.getattr
+
+local delete_node = nuts.delete
+local replace_node = nuts.replace
+local copy_node = nuts.copy
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
local chardata = characters.data
local collected = false
-local a_stripping = attributes.private("stripping")
local fontdata = fonts.hashes.identifiers
-local tasks = nodes.tasks
+local a_stripping = attributes.private("stripping")
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-- todo: other namespace -> typesetters
nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
@@ -59,13 +71,13 @@ local function process(what,head,current,char)
head, current = delete_node(head,current)
elseif type(what) == "function" then
head, current = what(head,current)
- current = current.next
+ current = getnext(current)
if trace_stripping then
report_stripping("processing %C in text",char)
end
elseif what then -- assume node
head, current = replace_node(head,current,copy_node(what))
- current = current.next
+ current = getnext(current)
if trace_stripping then
report_stripping("replacing %C in text",char)
end
@@ -74,28 +86,29 @@ local function process(what,head,current,char)
end
function nodes.handlers.stripping(head)
+ head = tonut(head)
local current, done = head, false
while current do
- if current.id == glyph_code then
+ if getid(current) == glyph_code then
-- it's more efficient to keep track of what needs to be kept
- local todo = current[a_stripping]
+ local todo = getattr(current,a_stripping)
if todo == 1 then
- local char = current.char
+ local char = getchar(current)
local what = glyphs[char]
if what then
head, current = process(what,head,current,char)
done = true
else -- handling of spacing etc has to be done elsewhere
- current = current.next
+ current = getnext(current)
end
else
- current = current.next
+ current = getnext(current)
end
else
- current = current.next
+ current = getnext(current)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -125,4 +138,8 @@ tasks.disableaction("processors","nodes.handlers.stripping")
-- interface
-commands.setcharacterstripping = stripping.set
+interfaces.implement {
+ name = "setcharacterstripping",
+ actions = stripping.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-rep.mkiv b/tex/context/base/typo-rep.mkiv
index 46b439491..c1146997e 100644
--- a/tex/context/base/typo-rep.mkiv
+++ b/tex/context/base/typo-rep.mkiv
@@ -38,7 +38,7 @@
\definesystemattribute[stripping][public]
\unexpanded\def\setcharacterstripping[#1]%
- {\ctxcommand{setcharacterstripping("#1")}}
+ {\clf_setcharacterstripping{#1}}
\unexpanded\def\resetcharacterstripping
{\attribute\strippingattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index c3f50fe98..519ba3f34 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -15,10 +15,7 @@ local report_spacing = logs.reporter("typesetting","spacing")
local nodes, fonts, node = nodes, fonts, node
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local end_of_math = node.end_of_math
+local tasks = nodes.tasks
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -29,6 +26,27 @@ local unsetvalue = attributes.unsetvalue
local v_reset = interfaces.variables.reset
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getchar = nuts.getchar
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local remove_node = nuts.remove
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local math_code = nodecodes.math
@@ -36,12 +54,6 @@ local math_code = nodecodes.math
local somespace = nodes.somespace
local somepenalty = nodes.somepenalty
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-
typesetters = typesetters or { }
local typesetters = typesetters
@@ -52,7 +64,6 @@ spacings.mapping = spacings.mapping or { }
spacings.numbers = spacings.numbers or { }
local a_spacings = attributes.private("spacing")
-spacings.attribute = a_spacings
storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
@@ -67,29 +78,30 @@ end
-- todo cache lastattr
function spacings.handler(head)
+ head = tonut(head)
local done = false
local start = head
-- head is always begin of par (whatsit), so we have at least two prev nodes
-- penalty followed by glue
while start do
- local id = start.id
+ local id = getid(start)
if id == glyph_code then
- local attr = start[a_spacings]
+ local attr = getattr(start,a_spacings)
if attr and attr > 0 then
local data = mapping[attr]
if data then
- local char = start.char
+ local char = getchar(start)
local map = data.characters[char]
- start[a_spacings] = unsetvalue -- needed?
+ setattr(start,a_spacings,unsetvalue) -- needed?
if map then
local left = map.left
local right = map.right
local alternative = map.alternative
- local quad = quaddata[start.font]
- local prev = start.prev
+ local quad = quaddata[getfont(start)]
+ local prev = getprev(start)
if left and left ~= 0 and prev then
local ok = false
- local prevprev = prev.prev
+ local prevprev = getprev(prev)
if alternative == 1 then
local somespace = somespace(prev,true)
if somespace then
@@ -120,10 +132,10 @@ function spacings.handler(head)
done = true
end
end
- local next = start.next
+ local next = getnext(start)
if right and right ~= 0 and next then
local ok = false
- local nextnext = next.next
+ local nextnext = getnext(next)
if alternative == 1 then
local somepenalty = somepenalty(next,10000)
if somepenalty then
@@ -164,10 +176,10 @@ function spacings.handler(head)
start = end_of_math(start) -- weird, can return nil .. no math end?
end
if start then
- start = start.next
+ start = getnext(start)
end
end
- return head, done
+ return tonode(head), done
end
local enabled = false
@@ -218,6 +230,30 @@ end
-- interface
-commands.definecharacterspacing = spacings.define
-commands.setupcharacterspacing = spacings.setup
-commands.setcharacterspacing = spacings.set
+local implement = interfaces.implement
+
+implement {
+ name = "definecharacterspacing",
+ actions = spacings.define,
+ arguments = "string"
+}
+
+implement {
+ name = "setupcharacterspacing",
+ actions = spacings.setup,
+ arguments = {
+ "string",
+ "integer",
+ {
+ { "left", "number" },
+ { "right", "number" },
+ { "alternative", "integer" },
+ }
+ }
+}
+
+implement {
+ name = "setcharacterspacing",
+ actions = spacings.set,
+ arguments = "string"
+}
diff --git a/tex/context/base/typo-spa.mkiv b/tex/context/base/typo-spa.mkiv
index d783353d6..2e3e71bf3 100644
--- a/tex/context/base/typo-spa.mkiv
+++ b/tex/context/base/typo-spa.mkiv
@@ -35,7 +35,7 @@
\c!alternative=0]
\unexpanded\def\definecharacterspacing[#1]%
- {\ctxcommand{definecharacterspacing("#1")}}
+ {\clf_definecharacterspacing{#1}}
\unexpanded\def\setupcharacterspacing
{\dotripleargument\typo_characterspacing_setup}
@@ -43,15 +43,19 @@
\def\typo_characterspacing_setup[#1][#2][#3]% todo: #2 list
{\begingroup
\setupcurrent_p_characterspacing[#3]%
- \ctxcommand{setupcharacterspacing("#1",\number#2, { % todo: just pass #3 to the lua end
- left = \direct_p_characterspacingparameter\c!left,
- right = \direct_p_characterspacingparameter\c!right,
- alternative = \direct_p_characterspacingparameter\c!alternative
- })}%
+ \clf_setupcharacterspacing
+ {#1}%
+ \numexpr#2\relax
+ {%
+ left \direct_p_characterspacingparameter\c!left\space
+ right \direct_p_characterspacingparameter\c!right\space
+ alternative \direct_p_characterspacingparameter\c!alternative
+ }%
+ \relax
\endgroup}
\unexpanded\def\setcharacterspacing[#1]% we can store the attribute if we want speed
- {\ctxcommand{setcharacterspacing("#1")}}
+ {\clf_setcharacterspacing{#1}}
\unexpanded\def\resetcharacterspacing % fast one
{\attribute\spacingattribute\attributeunsetvalue}
diff --git a/tex/context/base/typo-sus.lua b/tex/context/base/typo-sus.lua
new file mode 100644
index 000000000..0fe8e143a
--- /dev/null
+++ b/tex/context/base/typo-sus.lua
@@ -0,0 +1,311 @@
+if not modules then modules = { } end modules ['typo-sus'] = {
+ version = 1.001,
+ comment = "companion to typo-sus.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local punctuation = {
+ po = true,
+}
+
+local openquote = {
+ ps = true,
+ pi = true,
+}
+
+local closequote = {
+ pe = true,
+ pf = true,
+}
+
+local weird = {
+ lm = true,
+ no = true,
+}
+
+local categories = characters.categories
+
+local nodecodes = nodes.nodecodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local penalty_code = nodecodes.penalty
+local glue_code = nodecodes.glue
+local math_code = nodecodes.math
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local getid = nuts.getid
+local getchar = nuts.getchar
+local getprev = nuts.getprev
+local getnext = nuts.getnext
+local getfield = nuts.getfield
+local getattr = nuts.getattr
+local getfont = nuts.getfont
+local getlist = nuts.getlist
+
+local setfield = nuts.setfield
+local setattr = nuts.setattr
+
+local setcolor = nodes.tracers.colors.set
+local insert_before = nuts.insert_before
+local insert_after = nuts.insert_after
+local end_of_math = nuts.end_of_math
+
+local nodepool = nuts.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_hlist = nodepool.hlist
+----- new_penalty = nodepool.penalty
+
+local a_characters = attributes.private("characters")
+local a_suspecting = attributes.private('suspecting')
+local a_suspect = attributes.private('suspect')
+local texsetattribute = tex.setattribute
+local enabled = false
+
+local threshold = 65536 / 4
+
+local function special(n)
+ if n then
+ local id = getid(n)
+ if id == kern_code then
+ local kern = getfield(n,"kern")
+ return kern < threshold
+ elseif id == penalty_code then
+ return true
+ elseif id == glue_code then
+ local width = getfield(getfield(n,"spec"),"width")
+ return width < threshold
+ elseif id == hlist_code then
+ local width = getfield(n,"width")
+ return width < threshold
+ end
+ else
+ return false
+ end
+end
+
+local function goback(current)
+ local prev = getprev(current)
+ while prev and special(prev) do
+ prev = getprev(prev)
+ end
+ if prev then
+ return prev, getid(prev)
+ end
+end
+
+local function goforward(current)
+ local next = getnext(current)
+ while next and special(next) do
+ next = getnext(next)
+ end
+ if next then
+ return next, getid(next)
+ end
+end
+
+local function mark(head,current,id,color)
+ if id == glue_code then
+ -- the glue can have stretch and/or shrink so the rule can overlap with the
+ -- following glyph .. no big deal as that one then sits on top of the rule
+ local width = getfield(getfield(current,"spec"),"width")
+ local rule = new_rule(width)
+ local kern = new_kern(-width)
+ head = insert_before(head,current,rule)
+ head = insert_before(head,current,kern)
+ setcolor(rule,color)
+ -- elseif id == kern_code then
+ -- local width = getfield(current,"kern")
+ -- local rule = new_rule(width)
+ -- local kern = new_kern(-width)
+ -- head = insert_before(head,current,rule)
+ -- head = insert_before(head,current,kern)
+ -- setcolor(rule,color)
+ else
+ local width = getfield(current,"width")
+ local extra = fonts.hashes.xheights[getfont(current)] / 2
+ local rule = new_rule(width,getfield(current,"height")+extra,getfield(current,"depth")+extra)
+ local hlist = new_hlist(rule)
+ head = insert_before(head,current,hlist)
+ setcolor(rule,color)
+ setcolor(current,"white")
+ end
+ return head, current
+end
+
+-- we can cache the font and skip ahead to next but it doesn't
+-- save enough time and it makes the code looks bad too ... after
+-- all, we seldom use this
+
+local colors = {
+ "darkred",
+ "darkgreen",
+ "darkblue",
+ "darkcyan",
+ "darkmagenta",
+ "darkyellow",
+ "darkgray",
+ "orange",
+}
+
+local found = 0
+
+function typesetters.marksuspects(head)
+ local head = tonut(head)
+ local current = head
+ local lastdone = nil
+ while current do
+ if getattr(current,a_suspecting) then
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local code = categories[char]
+ local done = false
+ if punctuation[code] then
+ local prev, pid = goback(current)
+ if prev and pid == glue_code then
+ done = 3 -- darkblue
+ elseif prev and pid == math_code then
+ done = 3 -- darkblue
+ else
+ local next, nid = goforward(current)
+ if next and nid ~= glue_code then
+ done = 3 -- darkblue
+ end
+ end
+ elseif openquote[code] then
+ local next, nid = goforward(current)
+ if next and nid == glue_code then
+ done = 1 -- darkred
+ end
+ elseif closequote[code] then
+ local prev, pid = goback(current)
+ if prev and pid == glue_code then
+ done = 1 -- darkred
+ end
+ elseif weird[code] then
+ done = 2 -- darkgreen
+ else
+ local prev, pid = goback(current)
+ if prev then
+ if pid == math_code then
+ done = 7-- darkgray
+ elseif pid == glyph_code and getfont(current) ~= getfont(prev) then
+ if lastdone ~= prev then
+ done = 2 -- darkgreen
+ end
+ end
+ end
+ if not done then
+ local next, nid = goforward(current)
+ if next then
+ if nid == math_code then
+ done = 7 -- darkgray
+ elseif nid == glyph_code and getfont(current) ~= getfont(next) then
+ if lastdone ~= prev then
+ done = 2 -- darkgreen
+ end
+ end
+ end
+ end
+ end
+ if done then
+ setattr(current,a_suspect,done)
+ lastdone = current
+ found = found + 1
+ end
+ current = getnext(current)
+ elseif id == math_code then
+ current = getnext(end_of_math(current))
+ elseif id == glue_code then
+ local a = getattr(current,a_characters)
+ if a then
+ local prev = getprev(current)
+ local prid = prev and getid(prev)
+ local done = false
+ if prid == penalty_code and getfield(prev,"penalty") == 10000 then
+ done = 8 -- orange
+ else
+ done = 5 -- darkmagenta
+ end
+ if done then
+ setattr(current,a_suspect,done)
+ -- lastdone = current
+ found = found + 1
+ end
+ end
+ current = getnext(current)
+ else
+ current = getnext(current)
+ end
+ else
+ current = getnext(current)
+ end
+ end
+ return tonode(head), found > 0
+end
+
+local function showsuspects(head)
+ local current = head
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local a = getattr(current,a_suspect)
+ if a then
+ head, current = mark(head,current,id,colors[a])
+ end
+ elseif id == glue_code then
+ local a = getattr(current,a_suspect)
+ if a then
+ head, current = mark(head,current,id,colors[a])
+ end
+ elseif id == math_code then
+ current = end_of_math(current)
+ elseif id == hlist_code or id == vlist_code then
+ local list = getlist(current)
+ if list then
+ local l = showsuspects(list)
+ if l ~= list then
+ setfield(current,"list",l)
+ end
+ end
+ end
+ current = getnext(current)
+ end
+ return head
+end
+
+function typesetters.showsuspects(head)
+ if found > 0 then
+ return tonode(showsuspects(tonut(head))), true
+ else
+ return head, false
+ end
+end
+
+nodes.tasks.appendaction ("processors","after", "typesetters.marksuspects")
+nodes.tasks.prependaction("shipouts", "normalizers","typesetters.showsuspects")
+
+nodes.tasks.disableaction("processors","typesetters.marksuspects")
+nodes.tasks.disableaction("shipouts", "typesetters.showsuspects")
+
+-- or maybe a directive
+
+trackers.register("typesetters.suspects",function(v)
+ texsetattribute(a_suspecting,v and 1 or unsetvalue)
+ if v and not enabled then
+ nodes.tasks.enableaction("processors","typesetters.marksuspects")
+ nodes.tasks.enableaction("shipouts", "typesetters.showsuspects")
+ enabled = true
+ end
+end)
+
diff --git a/tex/context/base/typo-sus.mkiv b/tex/context/base/typo-sus.mkiv
new file mode 100644
index 000000000..fe44e6327
--- /dev/null
+++ b/tex/context/base/typo-sus.mkiv
@@ -0,0 +1,51 @@
+%D \module
+%D [ file=typo-sus,
+%D version=2014.11.06,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Checking Suspects,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Checking Suspects}
+
+%D This is a rather special module, mostly needed by ourselves for
+%D projects where copy||editing is not that precise.
+
+\registerctxluafile{typo-sus}{1.001}
+
+\unexpanded\def\showsuspects{\enabletrackers[typesetters.suspects]}
+
+%D The suspicious spacing will be colored in the text. There can be false
+%D positives but this features is mostly used when proofreading. So, we
+%D don't worry too much about interference (and efficiency).
+%D
+%D \unexpanded\def\showsample#1%
+%D {\NC \type{#1}%
+%D \NC \enabletrackers[typesetters.suspects]#1\disabletrackers[typesetters.spacing]%
+%D \NC \NR}
+%D
+%D \starttabulate[|||][before=,after=]
+%D \showsample{foo$x$}
+%D \showsample{$x$bar}
+%D \showsample{foo$x$bar}
+%D \showsample{$f+o+o$:}
+%D \showsample{;$f+o+o$}
+%D \showsample{; bar}
+%D \showsample{foo:bar}
+%D \showsample{\quote{ foo }}
+%D \showsample{\quote{bar }}
+%D \showsample{\quote{ bar}}
+%D \showsample{(foo )}
+%D \showsample{\{foo \}}
+%D \showsample{foo{\bf gnu}bar}
+%D \showsample{foo$x^2$bar}
+%D \showsample{foo\nobreakspace bar}
+%D \stoptabulate
+
+\endinput
+
diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua
index 63a66d037..5663c3bd9 100644
--- a/tex/context/base/typo-tal.lua
+++ b/tex/context/base/typo-tal.lua
@@ -6,33 +6,60 @@ if not modules then modules = { } end modules ['typo-tal'] = {
license = "see context related readme files"
}
--- I'll make it a bit more efficient and provide named instances too.
+-- I'll make it a bit more efficient and provide named instances too which is needed for
+-- nested tables.
+--
+-- Currently we have two methods: text and number with some downward compatible
+-- defaulting.
+
+-- We can speed up by saving the current fontcharacters[font] + lastfont.
local next, type = next, type
local div = math.div
local utfbyte = utf.byte
+local splitmethod = utilities.parsers.splitmethod
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local glue_code = nodecodes.glue
local fontcharacters = fonts.hashes.characters
-local unicodes = fonts.hashes.unicodes
+----- unicodes = fonts.hashes.unicodes
local categories = characters.categories -- nd
-local insert_node_before = nodes.insert_before
-local insert_node_after = nodes.insert_after
-local traverse_list_by_id = nodes.traverse_id
-local dimensions_of_list = nodes.dimensions
-local first_glyph = nodes.first_glyph
+local variables = interfaces.variables
+local v_text = variables.text
+local v_number = variables.number
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getchar = nuts.getchar
+local getfield = nuts.getfield
+local setfield = nuts.setfield
-local nodepool = nodes.pool
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local traverse_list_by_id = nuts.traverse_id
+local dimensions_of_list = nuts.dimensions
+local first_glyph = nuts.first_glyph
+
+local nodepool = nuts.pool
local new_kern = nodepool.kern
local new_gluespec = nodepool.gluespec
local tracers = nodes.tracers
local setcolor = tracers.colors.set
-local tracedrule = tracers.pool.nodes.rule
+local tracedrule = tracers.pool.nuts.rule
local characteralign = { }
typesetters.characteralign = characteralign
@@ -47,6 +74,8 @@ local enabled = false
local datasets = false
+local implement = interfaces.implement
+
local comma = 0x002C
local period = 0x002E
local punctuationspace = 0x2008
@@ -65,14 +94,75 @@ local validsigns = {
[0x2213] = 0x2213, -- minusplus
}
+-- If needed we can have more modes which then also means a faster simple handler
+-- for non numbers.
+
+local function setcharacteralign(column,separator)
+ if not enabled then
+ nodes.tasks.enableaction("processors","typesetters.characteralign.handler")
+ enabled = true
+ end
+ if not datasets then
+ datasets = { }
+ end
+ local dataset = datasets[column] -- we can use a metatable
+ if not dataset then
+ local method, token
+ if separator then
+ method, token = splitmethod(separator)
+ if method and token then
+ separator = utfbyte(token) or comma
+ else
+ separator = utfbyte(separator) or comma
+ method = validseparators[separator] and v_number or v_text
+ end
+ else
+ separator = comma
+ method = v_number
+ end
+ dataset = {
+ separator = separator,
+ list = { },
+ maxafter = 0,
+ maxbefore = 0,
+ collected = false,
+ method = method,
+ separators = validseparators,
+ signs = validsigns,
+ }
+ datasets[column] = dataset
+ used = true
+ end
+ return dataset
+end
+
+local function resetcharacteralign()
+ datasets = false
+end
+
+characteralign.setcharacteralign = setcharacteralign
+characteralign.resetcharacteralign = resetcharacteralign
+
+implement {
+ name = "setcharacteralign",
+ actions = setcharacteralign,
+ arguments = { "integer", "string" }
+}
+
+implement {
+ name = "resetcharacteralign",
+ actions = resetcharacteralign
+}
+
local function traced_kern(w)
return tracedrule(w,nil,nil,"darkgray")
end
-function characteralign.handler(head,where)
+function characteralign.handler(originalhead,where)
if not datasets then
- return head, false
+ return originalhead, false
end
+ local head = tonut(originalhead)
-- local first = first_glyph(head) -- we could do that once
local first
for n in traverse_list_by_id(glyph_code,head) do
@@ -80,14 +170,14 @@ function characteralign.handler(head,where)
break
end
if not first then
- return head, false
+ return originalhead, false
end
- local a = first[a_characteralign]
+ local a = getattr(first,a_characteralign)
if not a or a == 0 then
- return head, false
+ return originalhead, false
end
- local column = div(a,100)
- local row = a % 100
+ local column = div(a,0xFFFF)
+ local row = a % 0xFFFF
local dataset = datasets and datasets[column] or setcharacteralign(column)
local separator = dataset.separator
local list = dataset.list
@@ -98,82 +188,128 @@ function characteralign.handler(head,where)
local c = nil
local current = first
local sign = nil
+ --
+ local validseparators = dataset.separators
+ local validsigns = dataset.signs
+ local method = dataset.method
-- we can think of constraints
- while current do
- local id = current.id
- if id == glyph_code then
- local char = current.char
- local font = current.font
- local unicode = unicodes[font][char]
- if not unicode then
- -- no unicode so forget about it
- elseif unicode == separator then
- c = current
- if trace_split then
- setcolor(current,"darkred")
- end
- dataset.hasseparator = true
- elseif categories[unicode] == "nd" or validseparators[unicode] then
- if c then
- if not a_start then
- a_start = current
- end
- a_stop = current
+ if method == v_number then
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local font = getfont(current)
+ -- local unicode = unicodes[font][char]
+ local unicode = fontcharacters[font][char].unicode or char -- ignore tables
+ if not unicode then
+ -- no unicode so forget about it
+ elseif unicode == separator then
+ c = current
if trace_split then
- setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
+ setcolor(current,"darkred")
end
- else
- if not b_start then
- if sign then
- b_start = sign
- local new = validsigns[sign.char]
- if char == new or not fontcharacters[sign.font][new] then
- if trace_split then
- setcolor(sign,"darkyellow")
+ dataset.hasseparator = true
+ elseif categories[unicode] == "nd" or validseparators[unicode] then
+ if c then
+ if not a_start then
+ a_start = current
+ end
+ a_stop = current
+ if trace_split then
+ setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
+ end
+ else
+ if not b_start then
+ if sign then
+ b_start = sign
+ local new = validsigns[getchar(sign)]
+ if char == new or not fontcharacters[getfont(sign)][new] then
+ if trace_split then
+ setcolor(sign,"darkyellow")
+ end
+ else
+ setfield(sign,"char",new)
+ if trace_split then
+ setcolor(sign,"darkmagenta")
+ end
end
+ sign = nil
+ b_stop = current
else
- sign.char = new
- if trace_split then
- setcolor(sign,"darkmagenta")
- end
+ b_start = current
+ b_stop = current
end
- sign = nil
- b_stop = current
else
- b_start = current
b_stop = current
- if trace_split then
- setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
- end
end
- else
- b_stop = current
- if trace_split then
+ if trace_split and current ~= sign then
setcolor(current,validseparators[unicode] and "darkcyan" or "darkblue")
end
end
+ elseif not b_start then
+ sign = validsigns[unicode] and current
+ -- if trace_split then
+ -- setcolor(current,"darkgreen")
+ -- end
+ end
+ elseif (b_start or a_start) and id == glue_code then
+ -- maybe only in number mode
+ -- somewhat inefficient
+ local next = getnext(current)
+ local prev = getprev(current)
+ if next and prev and getid(next) == glyph_code and getid(prev) == glyph_code then -- too much checking
+ local width = fontcharacters[getfont(b_start)][separator or period].width
+ -- local spec = getfield(current,"spec")
+ -- free_spec(spec)
+ setfield(current,"spec",new_gluespec(width))
+ setattr(current,a_character,punctuationspace)
+ if a_start then
+ a_stop = current
+ elseif b_start then
+ b_stop = current
+ end
end
- elseif not b_start then
- sign = validsigns[unicode] and current
end
- elseif (b_start or a_start) and id == glue_code then
- -- somewhat inefficient
- local next = current.next
- local prev = current.prev
- if next and prev and next.id == glyph_code and prev.id == glyph_code then -- too much checking
- local width = fontcharacters[b_start.font][separator or period].width
- -- local spec = current.spec
- -- nodes.free(spec) -- hm, we leak but not that many specs
- current.spec = new_gluespec(width)
- current[a_character] = punctuationspace
- if a_start then
- a_stop = current
- elseif b_start then
- b_stop = current
+ current = getnext(current)
+ end
+ else
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local char = getchar(current)
+ local font = getfont(current)
+ -- local unicode = unicodes[font][char]
+ local unicode = fontcharacters[font][char].unicode or char -- ignore tables
+ if not unicode then
+ -- no unicode so forget about it
+ elseif unicode == separator then
+ c = current
+ if trace_split then
+ setcolor(current,"darkred")
+ end
+ dataset.hasseparator = true
+ else
+ if c then
+ if not a_start then
+ a_start = current
+ end
+ a_stop = current
+ if trace_split then
+ setcolor(current,"darkgreen")
+ end
+ else
+ if not b_start then
+ b_start = current
+ end
+ b_stop = current
+ if trace_split then
+ setcolor(current,"darkblue")
+ end
+ end
end
end
+ current = getnext(current)
end
- current = current.next
end
local entry = list[row]
if entry then
@@ -199,7 +335,7 @@ function characteralign.handler(head,where)
local maxafter = dataset.maxafter
local before = entry.before or 0
local after = entry.after or 0
- local new_kern = trace_split and traced_kern or new_kern
+ local new_kern = trace_split and traced_kern or new_kern
if b_start then
if before < maxbefore then
head = insert_node_before(head,b_start,new_kern(maxbefore-before))
@@ -207,7 +343,7 @@ function characteralign.handler(head,where)
if not c then
-- print("[before]")
if dataset.hasseparator then
- local width = fontcharacters[b_stop.font][separator].width
+ local width = fontcharacters[getfont(b_stop)][separator].width
insert_node_after(head,b_stop,new_kern(maxafter+width))
end
elseif a_start then
@@ -229,7 +365,7 @@ function characteralign.handler(head,where)
end
else
-- print("[after]")
- local width = fontcharacters[b_stop.font][separator].width
+ local width = fontcharacters[getfont(b_stop)][separator].width
head = insert_node_before(head,a_start,new_kern(maxbefore+width))
end
if after < maxafter then
@@ -246,44 +382,10 @@ function characteralign.handler(head,where)
end
else
entry = {
- before = b_start and dimensions_of_list(b_start,b_stop.next) or 0,
- after = a_start and dimensions_of_list(a_start,a_stop.next) or 0,
+ before = b_start and dimensions_of_list(b_start,getnext(b_stop)) or 0,
+ after = a_start and dimensions_of_list(a_start,getnext(a_stop)) or 0,
}
list[row] = entry
end
- return head, true
+ return tonode(head), true
end
-
-function setcharacteralign(column,separator)
- if not enabled then
- nodes.tasks.enableaction("processors","typesetters.characteralign.handler")
- enabled = true
- end
- if not datasets then
- datasets = { }
- end
- local dataset = datasets[column] -- we can use a metatable
- if not dataset then
- dataset = {
- separator = separator and utfbyte(separator) or comma,
- list = { },
- maxafter = 0,
- maxbefore = 0,
- collected = false,
- }
- datasets[column] = dataset
- used = true
- end
- return dataset
-end
-
-local function resetcharacteralign()
- datasets = false
-end
-
-characteralign.setcharacteralign = setcharacteralign
-characteralign.resetcharacteralign = resetcharacteralign
-
-commands.setcharacteralign = setcharacteralign
-commands.resetcharacteralign = resetcharacteralign
-
diff --git a/tex/context/base/typo-tal.mkiv b/tex/context/base/typo-tal.mkiv
index 7de10a6ec..570f1a1f5 100644
--- a/tex/context/base/typo-tal.mkiv
+++ b/tex/context/base/typo-tal.mkiv
@@ -40,25 +40,26 @@
%D
%D \typebuffer \blank \getbuffer \blank
-%D \startbuffer
-%D \bTABLE
-%D \bTR \bTD[aligncharacter=yes] € 1,1 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 11,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 12\punctuationspace111,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 12 111,11 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 1.234.451,22222 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 234.451,2 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 234.451 \eTD \eTR
-%D \bTR \bTD[aligncharacter=yes] € 451 \eTD \eTR
-%D \bTR \bTD \bf some text \eTD \eTR
-%D \eTABLE
-%D \stopbuffer
-%D
-%D \typebuffer \blank \getbuffer \blank
-
-\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute=\numexpr#1*\plushundred+#2\relax}
-\unexpanded\def\setcharacteralign #1#2{\ctxcommand{setcharacteralign(\number#1,"#2")}}
-\unexpanded\def\resetcharacteralign {\ctxcommand{resetcharacteralign()}}
+% D \startbuffer
+% D \bTABLE
+% D \bTR \bTD[aligncharacter=yes] € 1,1 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 11,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 12\punctuationspace111,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 12 111,11 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 1.234.451,22222 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 234.451,2 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 234.451 \eTD \eTR
+% D \bTR \bTD[aligncharacter=yes] € 451 \eTD \eTR
+% D \bTR \bTD \bf some text \eTD \eTR
+% D \eTABLE
+% D \stopbuffer
+% D
+% D \typebuffer \blank \getbuffer \blank
+
+\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute\numexpr#1*\maxcardminusone+#2\relax} % 0xFFFF
+\unexpanded\def\setcharacteralign #1#2{\clf_setcharacteralign#1{#2}}
+\unexpanded\def\resetcharacteralign {\clf_resetcharacteralign}
+\unexpanded\def\nocharacteralign {\attribute\characteralignattribute\attributeunsetvalue}
%D Mostly downward compatible:
%D
@@ -73,6 +74,15 @@
%D
%D \typebuffer \blank \getbuffer \blank
+%D We have (currently) two modes: \type {text} and \type {number}. The handler tries
+%D to determine the mode automatically. When using periods and commas as separators
+%D the \type {number} mode is chosen. If you use for instance a \type {-} as
+%D separator, \type {text} is chosen, but you can enforce \type {number} with \type
+%D {number->-} (as with other mechanisms, the arrow indicates a methot to apply).
+%D
+%D One can use \type {\nocharacteralign} to disable this mechanism, for instance in
+%D a table cell.
+
\def\alignmentcharacter{,}
\unexpanded\def\typo_charalign_pass_one
@@ -86,19 +96,26 @@
\def\typo_charalign_pass
{\hbox\bgroup\signalcharacteralign\plusone\scratchcounter\let\next}
-\unexpanded\def\startcharacteralign#1\stopcharacteralign
+\unexpanded\def\startcharacteralign
+ {\dosingleempty\typo_charalign_start}
+
+\def\typo_charalign_start[#1]#2\stopcharacteralign
{\bgroup
+ \edef\m_temp{#1}%
+ \ifx\m_temp\empty \else
+ \let\alignmentcharacter\m_temp
+ \fi
\setcharacteralign\plusone\alignmentcharacter
\begingroup
\scratchcounter\zerocount
\let\checkcharacteralign\typo_charalign_pass_one
\settrialtypesetting
- #1\relax
+ #2\relax
\endgroup
\begingroup
\scratchcounter\zerocount
\let\checkcharacteralign\typo_charalign_pass_two
- #1\relax
+ #2\relax
\endgroup
\resetcharacteralign
\egroup}
diff --git a/tex/context/base/typo-txt.mkvi b/tex/context/base/typo-txt.mkvi
index 57f4e5f42..7562fe70c 100644
--- a/tex/context/base/typo-txt.mkvi
+++ b/tex/context/base/typo-txt.mkvi
@@ -17,7 +17,7 @@
\unprotect
-\registerctxluafile{typo-txt}{1.001}
+% registerctxluafile{typo-txt}{1.001}
%D \macros
%D {normalizefontheight,normalizefontwidth,normalizedfontsize}
@@ -194,4 +194,51 @@
%D \HL
%D \stoptabulate
+%D This is used in the beginners manual. One needs to set the font size to an
+%D acceptable value for this to work.
+
+\unexpanded\def\startnicelyfilledbox
+ {\vbox\bgroup
+ \forgetall
+ \dosingleempty\dostartnicelyfilledbox}
+
+\def\dostartnicelyfilledbox[#1]%
+ {\letdummyparameter\c!width \hsize
+ \letdummyparameter\c!height\vsize
+ \letdummyparameter\c!offset\exheight % we obey to the outer exheight
+ \letdummyparameter\c!strut \v!yes % we obey to the inner strut !
+ \getdummyparameters[#1]%
+ \scratchoffset\dummyparameter\c!offset\relax
+ \setbox\scratchbox\vbox to \dummyparameter\c!height \bgroup
+ \hsize\dummyparameter\c!width\relax
+ \emergencystretch10\scratchoffset
+ \parfillskip\zeropoint
+ \baselineskip\zeropoint plus \onepoint minus \onepoint
+ \beginofshapebox
+ \leftskip \scratchoffset
+ \rightskip\scratchoffset}
+
+\unexpanded\def\stopnicelyfilledbox
+ {\doifelse{\dummyparameter\c!strut}\v!yes
+ {\xdef\doflushnicelyfilledbox
+ {\ht\shapebox\the\strutht
+ \dp\shapebox\the\strutdp
+ \box\shapebox}}%
+ {\gdef\doflushnicelyfilledbox
+ {\box\shapebox}}%
+ \endofshapebox
+ \doreshapebox
+ {\doflushnicelyfilledbox}
+ {\penalty\shapepenalty}
+ {\kern\shapekern}
+ {\vfil}%
+ \kern\scratchoffset
+ \vfilneg
+ \flushshapebox
+ \vfilneg
+ \kern\scratchoffset
+ \egroup
+ \box\scratchbox
+ \egroup}
+
\protect \endinput
diff --git a/tex/context/base/typo-wrp.lua b/tex/context/base/typo-wrp.lua
new file mode 100644
index 000000000..07639392f
--- /dev/null
+++ b/tex/context/base/typo-wrp.lua
@@ -0,0 +1,76 @@
+if not modules then modules = { } end modules ['typo-wrp'] = {
+ version = 1.001,
+ comment = "companion to typo-wrp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- begin/end par wrapping stuff ... more to come
+
+local nodecodes = nodes.nodecodes
+
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local parfill_skip_code = nodes.gluecodes.parfillskip
+local user_penalty_code = nodes.penaltycodes.userpenalty
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local findtail = nuts.tail
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getfield = nuts.getfield
+local remove = nuts.remove
+
+local wrappers = { }
+typesetters.wrappers = wrappers
+
+local trace_wrappers = trackers.register("typesetters.wrappers",function(v) trace_wrappers = v end)
+
+local report = logs.reporter("paragraphs","wrappers")
+
+-- we really need to pass tail too ... but then we need to check all the plugins
+-- bah ... slowdown
+
+local function remove_dangling_crlf(head,tail)
+ if tail and getid(tail) == glue_code and getsubtype(tail) == parfill_skip_code then
+ tail = getprev(tail)
+ if tail and getid(tail) == penalty_code and getsubtype(tail) == user_penalty_code and getfield(tail,"penalty") == 10000 then
+ tail = getprev(tail)
+ if tail and getid(tail) == penalty_code and getsubtype(tail) == user_penalty_code and getfield(tail,"penalty") == -10000 then
+ if tail == head then
+ -- can't happen
+ else
+ if trace_wrappers then
+ report("removing a probably unwanted end-of-par break in line %s (guess)",tex.inputlineno)
+ end
+ remove(head,tail,true)
+ return head, tail, true
+ end
+ end
+ end
+ end
+ return head, tail, false
+end
+
+function wrappers.handler(head)
+ local head = tonut(head)
+ if head then
+ local tail = findtail(head)
+ local done = false
+ head, tail, done = remove_dangling_crlf(head,tail) -- will be action chain
+ end
+ return head, true
+end
+
+interfaces.implement {
+ name = "enablecrlf",
+ onlyonce = true,
+ actions = function()
+ nodes.tasks.enableaction("processors","typesetters.wrappers.handler")
+ end
+}
diff --git a/tex/context/base/typo-wrp.mkiv b/tex/context/base/typo-wrp.mkiv
new file mode 100644
index 000000000..0538a9662
--- /dev/null
+++ b/tex/context/base/typo-wrp.mkiv
@@ -0,0 +1,65 @@
+%D \module
+%D [ file=typo-wrp,
+%D version=2014.11.09,
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Wrappers,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Wrapping}
+
+\unprotect
+
+\registerctxluafile{typo-wrp}{1.001}
+
+%D This definition has moved from page-lin.tex to spac-hor.tex (due to
+%D visualization added in august 2003) and now to here (november 2014)
+%D due to cacthing border cases in dirty and messy \XML\ sources).
+
+% \unexpanded\def\crlf
+% {\ifhmode
+% \unskip
+% \prewordbreak\crlfplaceholder
+% \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
+% \break
+% \else
+% \crlfplaceholder
+% \endgraf
+% \fi}
+
+\unexpanded\def\crlf
+ {\ifhmode
+ \expandafter\spac_crlf
+ \fi}
+
+\unexpanded\def\spac_crlf
+ {\clf_enablecrlf % once
+ \unskip
+ \prewordbreak % here or in \spac_crlf_placeholder
+ \spac_crlf_placeholder
+ \ifcase\raggedstatus\hfil\or\or\or\hfil\fi
+ \break
+ \ignorespaces}
+
+\unexpanded\def\spac_crlf_placeholder
+ {\strut}
+
+\unexpanded\def\spac_crlf_placeholder_show
+ {\hbox to \zeropoint{\strut{\infofont\kern.25\emwidth}\lohi{\infofont CR}{\infofont LF}\hss}}
+
+\unexpanded\def\settestcrlf
+ {\let\spac_crlf_placeholder\spac_crlf_placeholder_show}
+
+\unexpanded\def\crlfplaceholder % for old times sake
+ {\spac_crlf_placeholder}
+
+\appendtoks
+ \let\spac_crlf_placeholder\empty
+\to \everysetnostrut
+
+\protect \endinput
diff --git a/tex/context/base/unic-ini.lua b/tex/context/base/unic-ini.lua
index cca1f0617..132c92efa 100644
--- a/tex/context/base/unic-ini.lua
+++ b/tex/context/base/unic-ini.lua
@@ -11,9 +11,13 @@ local utfchar = utf.char
-- Beware, initializing unicodechar happens at first usage and takes
-- 0.05 -- 0.1 second (lots of function calls).
-function commands.unicodechar(asked)
- local n = characters.unicodechar(asked)
- if n then
- context(utfchar(n))
+interfaces.implement {
+ name = "unicodechar",
+ arguments = "string",
+ actions = function(asked)
+ local n = characters.unicodechar(asked)
+ if n then
+ context(utfchar(n))
+ end
end
-end
+}
diff --git a/tex/context/base/unic-ini.mkiv b/tex/context/base/unic-ini.mkiv
index ece0da283..13ad4bdb9 100644
--- a/tex/context/base/unic-ini.mkiv
+++ b/tex/context/base/unic-ini.mkiv
@@ -26,8 +26,7 @@
%D
%D \typebuffer \getbuffer
-%def\unicodechar#1{\char\numexpr#1\relax} % no lookahead
-\def\unicodechar#1{\ctxcommand{unicodechar("#1")}}
+\def\unicodechar#1{\clf_unicodechar{#1}}
\unexpanded\def\unknownchar
{\dontleavehmode\hbox{\vrule\s!width.5\emwidth\s!height\exheight\s!depth\zeropoint}}
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index 785373f86..ee732b3b5 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -92,37 +92,41 @@ end
function debugger.disable()
debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+ -- counters[debug.getinfo(2,"f").func] = nil
end
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
+-- debugger.enable()
+--
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+-- print(math.sin(1*.5))
+--
+-- debugger.disable()
+--
+-- print("")
+-- debugger.showstats()
+-- print("")
+-- debugger.showstats(print,3)
+--
-- from the lua book:
-function traceback()
- local level = 1
+local function showtraceback(rep) -- from lua site / adapted
+ local level = 2 -- we don't want this function to be reported
+ local reporter = rep or report
while true do
- local info = debug.getinfo(level, "Sl")
+ local info = getinfo(level, "Sl")
if not info then
break
elseif info.what == "C" then
- print(format("%3i : C function",level))
+ reporter("%2i : %s",level-1,"C function")
else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ reporter("%2i : %s : %s",level-1,info.short_src,info.currentline)
end
level = level + 1
end
end
+
+debugger.showtraceback = showtraceback
+-- debug.showtraceback = showtraceback
diff --git a/tex/context/base/util-dim.lua b/tex/context/base/util-dim.lua
index 69061495f..2bdb870e7 100644
--- a/tex/context/base/util-dim.lua
+++ b/tex/context/base/util-dim.lua
@@ -24,13 +24,15 @@ local formatters = string.formatters
local texget = tex and tex.get or function() return 65536*10*100 end
+local p_stripzeros = lpeg.patterns.stripzeros
+
--this might become another namespace
number = number or { }
local number = number
-number.tonumberf = function(n) return match(format("%.20f",n),"(.-0?)0*$") end -- one zero too much but alas
-number.tonumberg = function(n) return format("%.20g",n) end
+number.tonumberf = function(n) return lpegmatch(p_stripzeros,format("%.20f",n)) end
+number.tonumberg = function(n) return format("%.20g",n) end
local dimenfactors = allocate {
["pt"] = 1/65536,
@@ -46,66 +48,65 @@ local dimenfactors = allocate {
["nc"] = ( 5080/65043)/65536
}
---~ print(table.serialize(dimenfactors))
---~
---~ %.99g:
---~
---~ t={
---~ ["bp"]=1.5201782378580324e-005,
---~ ["cc"]=1.1883696112892098e-006,
---~ ["cm"]=5.3628510057769479e-007,
---~ ["dd"]=1.4260435335470516e-005,
---~ ["em"]=0.000152587890625,
---~ ["ex"]=6.103515625e-005,
---~ ["in"]=2.1113586636917117e-007,
---~ ["mm"]=5.3628510057769473e-008,
---~ ["nc"]=1.1917446679504327e-006,
---~ ["nd"]=1.4300936015405194e-005,
---~ ["pc"]=1.2715657552083333e-006,
---~ ["pt"]=1.52587890625e-005,
---~ ["sp"]=1,
---~ }
---~
---~ patched %s and tonumber
---~
---~ t={
---~ ["bp"]=0.00001520178238,
---~ ["cc"]=0.00000118836961,
---~ ["cm"]=0.0000005362851,
---~ ["dd"]=0.00001426043534,
---~ ["em"]=0.00015258789063,
---~ ["ex"]=0.00006103515625,
---~ ["in"]=0.00000021113587,
---~ ["mm"]=0.00000005362851,
---~ ["nc"]=0.00000119174467,
---~ ["nd"]=0.00001430093602,
---~ ["pc"]=0.00000127156576,
---~ ["pt"]=0.00001525878906,
---~ ["sp"]=1,
---~ }
+-- print(table.serialize(dimenfactors))
+--
+-- %.99g:
+--
+-- t={
+-- ["bp"]=1.5201782378580324e-005,
+-- ["cc"]=1.1883696112892098e-006,
+-- ["cm"]=5.3628510057769479e-007,
+-- ["dd"]=1.4260435335470516e-005,
+-- ["em"]=0.000152587890625,
+-- ["ex"]=6.103515625e-005,
+-- ["in"]=2.1113586636917117e-007,
+-- ["mm"]=5.3628510057769473e-008,
+-- ["nc"]=1.1917446679504327e-006,
+-- ["nd"]=1.4300936015405194e-005,
+-- ["pc"]=1.2715657552083333e-006,
+-- ["pt"]=1.52587890625e-005,
+-- ["sp"]=1,
+-- }
+--
+-- patched %s and tonumber
+--
+-- t={
+-- ["bp"]=0.00001520178238,
+-- ["cc"]=0.00000118836961,
+-- ["cm"]=0.0000005362851,
+-- ["dd"]=0.00001426043534,
+-- ["em"]=0.00015258789063,
+-- ["ex"]=0.00006103515625,
+-- ["in"]=0.00000021113587,
+-- ["mm"]=0.00000005362851,
+-- ["nc"]=0.00000119174467,
+-- ["nd"]=0.00001430093602,
+-- ["pc"]=0.00000127156576,
+-- ["pt"]=0.00001525878906,
+-- ["sp"]=1,
+-- }
--[[ldx--
A conversion function that takes a number, unit (string) and optional
format (string) is implemented using this table.
--ldx]]--
+local f_none = formatters["%s%s"]
+local f_true = formatters["%0.5F%s"]
-local function numbertodimen(n,unit,fmt)
+local function numbertodimen(n,unit,fmt) -- will be redefined later !
if type(n) == 'string' then
return n
else
unit = unit or 'pt'
+ n = n * dimenfactors[unit]
if not fmt then
- fmt = "%s%s"
+ fmt = f_none(n,unit)
elseif fmt == true then
- fmt = "%0.5f%s"
+ fmt = f_true(n,unit)
+ else
+ return formatters[fmt](n,unit)
end
- return format(fmt,n*dimenfactors[unit],unit)
- -- if fmt then
- -- return format(fmt,n*dimenfactors[unit],unit)
- -- else
- -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit
- -- end
end
end
diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua
index 0a708ea43..b72226900 100644
--- a/tex/context/base/util-env.lua
+++ b/tex/context/base/util-env.lua
@@ -9,11 +9,11 @@ if not modules then modules = { } end modules ['util-env'] = {
local allocate, mark = utilities.storage.allocate, utilities.storage.mark
local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
+local unquoted, quoted, optionalquoted = string.unquoted, string.quoted, string.optionalquoted
local concat, insert, remove = table.concat, table.insert, table.remove
-environment = environment or { }
-local environment = environment
+environment = environment or { }
+local environment = environment
-- precautions
@@ -182,26 +182,14 @@ function environment.splitarguments(separator) -- rather special, cut-off before
end
function environment.reconstructcommandline(arg,noquote)
+ local resolveprefix = resolvers.resolve -- something rather special
arg = arg or environment.originalarguments
if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
+ return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1])
elseif #arg > 0 then
local result = { }
for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
+ result[i] = optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix)
end
return concat(result," ")
else
@@ -238,26 +226,10 @@ end
-- print(environment.relativepath("//x")) -- //x
-- print(environment.relativepath()) -- e:/tmp
--- -- to be tested:
---
--- function environment.reconstructcommandline(arg,noquote)
--- arg = arg or environment.originalarguments
--- if noquote and #arg == 1 then
--- return unquoted(resolvers.resolve(arg[1]))
--- elseif #arg > 0 then
--- local result = { }
--- for i=1,#arg do
--- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
--- end
--- return concat(result," ")
--- else
--- return ""
--- end
--- end
-
if arg then
-- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+
local newarg, instring = { }, false
for index=1,#arg do
diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua
index e5b35a727..a3c1c6f8f 100644
--- a/tex/context/base/util-prs.lua
+++ b/tex/context/base/util-prs.lua
@@ -21,6 +21,8 @@ parsers.patterns = patterns
local setmetatableindex = table.setmetatableindex
local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local tohash = table.tohash
-- we share some patterns
@@ -94,9 +96,7 @@ patterns.settings_to_hash_b = pattern_b_s
patterns.settings_to_hash_c = pattern_c_s
function parsers.make_settings_to_hash_pattern(set,how)
- if type(str) == "table" then
- return set
- elseif how == "strict" then
+ if how == "strict" then
return (pattern_c/set)^1
elseif how == "tolerant" then
return (pattern_b/set)^1
@@ -106,7 +106,9 @@ function parsers.make_settings_to_hash_pattern(set,how)
end
function parsers.settings_to_hash(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return { }
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -115,17 +117,17 @@ function parsers.settings_to_hash(str,existing)
else
return str
end
- elseif str and str ~= "" then
+ else
hash = existing or { }
lpegmatch(pattern_a_s,str)
return hash
- else
- return { }
end
end
function parsers.settings_to_hash_tolerant(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return { }
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -134,17 +136,17 @@ function parsers.settings_to_hash_tolerant(str,existing)
else
return str
end
- elseif str and str ~= "" then
+ else
hash = existing or { }
lpegmatch(pattern_b_s,str)
return hash
- else
- return { }
end
end
function parsers.settings_to_hash_strict(str,existing)
- if type(str) == "table" then
+ if not str or str == "" then
+ return nil
+ elseif type(str) == "table" then
if existing then
for k, v in next, str do
existing[k] = v
@@ -157,8 +159,6 @@ function parsers.settings_to_hash_strict(str,existing)
hash = existing or { }
lpegmatch(pattern_c_s,str)
return next(hash) and hash
- else
- return nil
end
end
@@ -167,24 +167,24 @@ local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ C((nestedbraces + (1-comma))^0)
local pattern = spaces * Ct(value*(separator*value)^0)
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
+-- "aap, {noot}, mies" : outer {} removed, leading spaces ignored
patterns.settings_to_array = pattern
-- we could use a weak table as cache
function parsers.settings_to_array(str,strict)
- if type(str) == "table" then
- return str
- elseif not str or str == "" then
+ if not str or str == "" then
return { }
+ elseif type(str) == "table" then
+ return str
elseif strict then
- if find(str,"{") then
+ if find(str,"{",1,true) then
return lpegmatch(pattern,str)
else
return { str }
end
- elseif find(str,",") then
+ elseif find(str,",",1,true) then
return lpegmatch(pattern,str)
else
return { str }
@@ -195,12 +195,40 @@ end
--
-- "{123} , 456 " -> "123" "456"
-local separator = space^0 * comma * space^0
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
- + C((nestedbraces + (1-(space^0*(comma+P(-1)))))^0)
-local withvalue = Carg(1) * value / function(f,s) return f(s) end
-local pattern_a = spaces * Ct(value*(separator*value)^0)
-local pattern_b = spaces * withvalue * (separator*withvalue)^0
+-- local separator = space^0 * comma * space^0
+-- local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+-- + C((nestedbraces + (1-(space^0*(comma+P(-1)))))^0)
+-- local withvalue = Carg(1) * value / function(f,s) return f(s) end
+-- local pattern_a = spaces * Ct(value*(separator*value)^0)
+-- local pattern_b = spaces * withvalue * (separator*withvalue)^0
+
+local cache_a = { }
+local cache_b = { }
+
+function parsers.groupedsplitat(symbol,withaction)
+ if not symbol then
+ symbol = ","
+ end
+ local pattern = (withaction and cache_b or cache_a)[symbol]
+ if not pattern then
+ local symbols = S(symbol)
+ local separator = space^0 * symbols * space^0
+ local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C((nestedbraces + (1-(space^0*(symbols+P(-1)))))^0)
+ if withaction then
+ local withvalue = Carg(1) * value / function(f,s) return f(s) end
+ pattern = spaces * withvalue * (separator*withvalue)^0
+ cache_b[symbol] = pattern
+ else
+ pattern = spaces * Ct(value*(separator*value)^0)
+ cache_a[symbol] = pattern
+ end
+ end
+ return pattern
+end
+
+local pattern_a = parsers.groupedsplitat(",",false)
+local pattern_b = parsers.groupedsplitat(",",true)
function parsers.stripped_settings_to_array(str)
if not str or str == "" then
@@ -221,8 +249,6 @@ end
-- parsers.process_stripped_settings("{123} , 456 ",function(s) print("["..s.."]") end)
-- parsers.process_stripped_settings("123 , 456 ",function(s) print("["..s.."]") end)
---
-
local function set(t,v)
t[#t+1] = v
end
@@ -236,8 +262,8 @@ end
function parsers.hash_to_string(h,separator,yes,no,strict,omit)
if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
+ local t, tn, s = { }, 0, sortedkeys(h)
+ omit = omit and tohash(omit)
for i=1,#s do
local key = s[i]
if not omit or not omit[key] then
@@ -275,15 +301,25 @@ function parsers.array_to_string(a,separator)
end
end
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
+-- function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
+-- if str then
+-- t = t or { }
+-- for s in gmatch(str,"[^, ]+") do -- space added
+-- t[s] = true
+-- end
+-- return t
+-- else
+-- return { }
+-- end
+-- end
+
+local pattern = Cf(Ct("") * Cg(C((1-S(", "))^1) * S(", ")^0 * Cc(true))^1,rawset)
+
+function utilities.parsers.settings_to_set(str,t)
+ return str and lpegmatch(pattern,str) or { }
end
+
function parsers.simple_hash_to_string(h, separator)
local t, tn = { }, 0
for k, v in sortedhash(h) do
@@ -297,7 +333,7 @@ end
-- for mtx-context etc: aaaa bbbb cccc=dddd eeee=ffff
-local str = C((1-whitespace-equal)^1)
+local str = Cs(lpegpatterns.unquoted) + C((1-whitespace-equal)^1)
local setting = Cf( Carg(1) * (whitespace^0 * Cg(str * whitespace^0 * (equal * whitespace^0 * str + Cc(""))))^1,rawset)
local splitter = setting^1
@@ -305,6 +341,12 @@ function utilities.parsers.options_to_hash(str,target)
return str and lpegmatch(splitter,str,1,target or { }) or { }
end
+local splitter = lpeg.tsplitat(" ")
+
+function utilities.parsers.options_to_array(str)
+ return str and lpegmatch(splitter,str) or { }
+end
+
-- for chem (currently one level)
local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
@@ -436,7 +478,7 @@ local defaultspecification = { separator = ",", quote = '"' }
-- database module
function parsers.csvsplitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification = specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator = specification.separator
local quotechar = specification.quote
local separator = S(separator ~= "" and separator or ",")
@@ -475,7 +517,7 @@ end
-- local list, names = mycsvsplitter(crap) inspect(list) inspect(names)
function parsers.rfc4180splitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ specification = specification and setmetatableindex(specification,defaultspecification) or defaultspecification
local separator = specification.separator --> rfc: COMMA
local quotechar = P(specification.quote) --> DQUOTE
local dquotechar = quotechar * quotechar --> 2DQUOTE
@@ -488,7 +530,7 @@ function parsers.rfc4180splitter(specification)
local field = escaped + non_escaped + Cc("")
local record = Ct(field * (separator * field)^1)
local headerline = record * Cp()
- local wholeblob = Ct((newline^-1 * record)^0)
+ local wholeblob = Ct((newline^(specification.strict and -1 or 1) * record)^0)
return function(data,getheader)
if getheader then
local header, position = lpegmatch(headerline,data)
@@ -542,8 +584,8 @@ end
--
-local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
-local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + anything)^0)
+local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + anything)^0)
patterns.unittotex = pattern
@@ -551,7 +593,7 @@ function parsers.unittotex(str,textmode)
return lpegmatch(textmode and pattern_text or pattern_math,str)
end
-local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + P(1))^0)
+local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + anything)^0)
function parsers.unittoxml(str)
return lpegmatch(pattern,str)
@@ -560,10 +602,10 @@ end
-- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %"))
local cache = { }
-local spaces = lpeg.patterns.space^0
+local spaces = lpegpatterns.space^0
local dummy = function() end
-table.setmetatableindex(cache,function(t,k)
+setmetatableindex(cache,function(t,k)
local separator = P(k)
local value = (1-separator)^0
local pattern = spaces * C(value) * separator^0 * Cp()
@@ -648,3 +690,27 @@ function utilities.parsers.runtime(time)
local seconds = mod(time,60)
return days, hours, minutes, seconds
end
+
+--
+
+local spacing = whitespace^0
+local apply = P("->")
+local method = C((1-apply)^1)
+local token = lbrace * C((1-rbrace)^1) * rbrace + C(anything^1)
+
+local pattern = spacing * (method * spacing * apply + Carg(1)) * spacing * token
+
+function utilities.parsers.splitmethod(str,default)
+ if str then
+ return lpegmatch(pattern,str,1,default or false)
+ else
+ return default or false, ""
+ end
+end
+
+-- print(utilities.parsers.splitmethod(" foo -> {bar} "))
+-- print(utilities.parsers.splitmethod("foo->{bar}"))
+-- print(utilities.parsers.splitmethod("foo->bar"))
+-- print(utilities.parsers.splitmethod("foo"))
+-- print(utilities.parsers.splitmethod("{foo}"))
+-- print(utilities.parsers.splitmethod())
diff --git a/tex/context/base/util-sbx.lua b/tex/context/base/util-sbx.lua
new file mode 100644
index 000000000..260e8b3b5
--- /dev/null
+++ b/tex/context/base/util-sbx.lua
@@ -0,0 +1,415 @@
+if not modules then modules = { } end modules ['util-sbx'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Note: we use expandname and collapsepath and these use chdir
+-- which is overloaded so we need to use originals there. Just
+-- something to keep in mind.
+
+if not sandbox then require("l-sandbox") end -- for testing
+
+local next, type = next, type
+
+local replace = utilities.templates.replace
+local collapsepath = file.collapsepath
+local expandname = dir.expandname
+local sortedhash = table.sortedhash
+local lpegmatch = lpeg.match
+local platform = os.type
+local P, S, C = lpeg.P, lpeg.S, lpeg.C
+local gsub = string.gsub
+local lower = string.lower
+local unquoted = string.unquoted
+local optionalquoted = string.optionalquoted
+
+local sandbox = sandbox
+local validroots = { }
+local validrunners = { }
+local validbinaries = { }
+local validators = { }
+local p_validroot = nil
+local finalized = nil
+local norunners = false
+local trace = false
+local p_split = lpeg.tsplitat(" ") -- more spaces?
+
+local report = logs.reporter("sandbox")
+
+trackers.register("sandbox",function(v) trace = v end) -- often too late anyway
+
+sandbox.setreporter(report)
+
+sandbox.finalizer(function()
+ finalized = true
+end)
+
+local function registerroot(root,what) -- what == read|write
+ if finalized then
+ report("roots are already finalized")
+ else
+ root = collapsepath(expandname(root))
+ if platform == "windows" then
+ root = lower(root) -- we assume ascii names
+ end
+ -- true: read & write | false: read
+ validroots[root] = what == "write" or false
+ end
+end
+
+sandbox.finalizer(function() -- initializers can set the path
+ if p_validroot then
+ report("roots are already initialized")
+ else
+ sandbox.registerroot(".","write") -- always ok
+ -- also register texmf as read
+ for name in sortedhash(validroots) do
+ if p_validroot then
+ p_validroot = P(name) + p_validroot
+ else
+ p_validroot = P(name)
+ end
+ end
+ p_validroot = p_validroot / validroots
+ end
+end)
+
+local function registerrunner(specification)
+ if finalized then
+ report("runners are already finalized")
+ else
+ local name = specification.name
+ if not name then
+ report("no runner name specified")
+ return
+ end
+ local program = specification.program
+ if type(program) == "string" then
+ -- common for all platforms
+ elseif type(program) == "table" then
+ program = program[platform]
+ end
+ if type(program) ~= "string" or program == "" then
+ report("invalid runner %a specified for platform %a",name,platform)
+ return
+ end
+ specification.program = program
+ validrunners[name] = specification
+ end
+end
+
+local function registerbinary(name)
+ if finalized then
+ report("binaries are already finalized")
+ elseif type(name) == "string" and name ~= "" then
+ validbinaries[name] = true
+ end
+end
+
+-- begin of validators
+
+local p_write = S("wa") p_write = (1 - p_write)^0 * p_write
+local p_path = S("\\/~$%:") p_path = (1 - p_path )^0 * p_path -- be easy on other arguments
+
+local function normalized(name) -- only used in executers
+ if platform == "windows" then
+ name = gsub(name,"/","\\")
+ end
+ return name
+end
+
+function sandbox.possiblepath(name)
+ return lpegmatch(p_path,name) and true or false
+end
+
+local filenamelogger = false
+
+function sandbox.setfilenamelogger(l)
+ filenamelogger = type(l) == "function" and l or false
+end
+
+local function validfilename(name,what)
+ if p_validroot and type(name) == "string" and lpegmatch(p_path,name) then
+ local asked = collapsepath(expandname(name))
+ if platform == "windows" then
+ asked = lower(asked) -- we assume ascii names
+ end
+ local okay = lpegmatch(p_validroot,asked)
+ if okay == true then
+ -- read and write access
+ if filenamelogger then
+ filenamelogger(name,"w",asked,true)
+ end
+ return name
+ elseif okay == false then
+ -- read only access
+ if not what then
+ -- no further argument to io.open so a readonly case
+ if filenamelogger then
+ filenamelogger(name,"r",asked,true)
+ end
+ return name
+ elseif lpegmatch(p_write,what) then
+ if filenamelogger then
+ filenamelogger(name,"w",asked,false)
+ end
+ return -- we want write access
+ else
+ if filenamelogger then
+ filenamelogger(name,"r",asked,true)
+ end
+ return name
+ end
+ else
+ if filenamelogger then
+ filenamelogger(name,"*",name,false)
+ end
+ end
+ else
+ return name
+ end
+end
+
+local function readable(name)
+ if platform == "windows" then
+ name = lower(name) -- we assume ascii names
+ end
+ local valid = validfilename(name,"r")
+ if valid then
+ return normalized(valid)
+ end
+end
+
+local function writeable(name)
+ if platform == "windows" then
+ name = lower(name) -- we assume ascii names
+ end
+ local valid = validfilename(name,"w")
+ if valid then
+ return normalized(valid)
+ end
+end
+
+validators.readable = readable
+validators.writeable = writeable
+validators.filename = readable
+
+table.setmetatableindex(validators,function(t,k)
+ if k then
+ t[k] = readable
+ end
+ return readable
+end)
+
+function validators.string(s)
+ return s -- can be used to prevent filename checking
+end
+
+-- end of validators
+
+sandbox.registerroot = registerroot
+sandbox.registerrunner = registerrunner
+sandbox.registerbinary = registerbinary
+sandbox.validfilename = validfilename
+
+local function filehandlerone(action,one,...)
+ local checkedone = validfilename(one)
+ if checkedone then
+ return action(one,...)
+ else
+-- report("file %a is unreachable",one)
+ end
+end
+
+local function filehandlertwo(action,one,two,...)
+ local checkedone = validfilename(one)
+ if checkedone then
+ local checkedtwo = validfilename(two)
+ if checkedtwo then
+ return action(one,two,...)
+ else
+-- report("file %a is unreachable",two)
+ end
+ else
+-- report("file %a is unreachable",one)
+ end
+end
+
+local function iohandler(action,one,...)
+ if type(one) == "string" then
+ local checkedone = validfilename(one)
+ if checkedone then
+ return action(one,...)
+ end
+ elseif one then
+ return action(one,...)
+ else
+ return action()
+ end
+end
+
+-- runners can be strings or tables
+--
+-- os.execute : string
+-- os.exec : table with program in [0|1]
+-- os.spawn : table with program in [0|1]
+--
+-- our execute: registered program with specification
+
+local function runhandler(action,name,specification)
+ local kind = type(name)
+ if kind ~= "string" then
+ return
+ end
+ if norunners then
+ report("no runners permitted, ignoring command: %s",name)
+ return
+ end
+ local spec = validrunners[name]
+ if not spec then
+ report("unknown runner: %s",name)
+ return
+ end
+ -- specs are already checked
+ local program = spec.program
+ local variables = { }
+ local checkers = spec.checkers or { }
+ if specification then
+ -- we only handle runners that are defined before the sandbox is
+ -- closed so in principle we cannot have user runs with no files
+ -- while for context runners we assume a robust specification
+ for k, v in next, specification do
+ local checker = validators[checkers[k]]
+ local value = checker(unquoted(v)) -- todo: write checkers
+ if value then
+ variables[k] = optionalquoted(value)
+ else
+ report("suspicious argument found, run blocked: %s",v)
+ return
+ end
+ end
+ end
+ local command = replace(program,variables)
+ if trace then
+ report("executing runner: %s",command)
+ end
+ return action(command)
+end
+
+-- only registered (from list) -- no checking on writable so let's assume harmless
+-- runs
+
+local function binaryhandler(action,name)
+ local kind = type(name)
+ local list = name
+ if kind == "string" then
+ list = lpegmatch(p_split,name)
+ end
+ local program = name[0] or name[1]
+ if type(program) ~= "string" or program == "" then
+ return --silently ignore
+ end
+ if norunners then
+ report("no binaries permitted, ignoring command: %s",program)
+ return
+ end
+ if not validbinaries[program] then
+ report("binary is not permitted: %s",program)
+ return
+ end
+ for i=0,#list do
+ local n = list[i]
+ if n then
+ local v = readable(unquoted(n))
+ if v then
+ list[i] = optionalquoted(v)
+ else
+ report("suspicious argument found, run blocked: %s",n)
+ return
+ end
+ end
+ end
+ return action(name)
+end
+
+sandbox.filehandlerone = filehandlerone
+sandbox.filehandlertwo = filehandlertwo
+sandbox.iohandler = iohandler
+sandbox.runhandler = runhandler
+sandbox.binaryhandler = binaryhandler
+
+function sandbox.disablerunners()
+ norunners = true
+end
+
+local execute = sandbox.original(os.execute)
+
+function sandbox.run(name,specification)
+ return runhandler(execute,name,specification)
+end
+
+-------------------
+
+local overload = sandbox.overload
+local register = sandbox.register
+
+ overload(loadfile, filehandlerone,"loadfile") -- todo
+
+if io then
+ overload(io.open, filehandlerone,"io.open")
+ overload(io.popen, filehandlerone,"io.popen")
+ overload(io.input, iohandler, "io.input")
+ overload(io.output, iohandler, "io.output")
+ overload(io.lines, filehandlerone,"io.lines")
+end
+
+if os then
+ overload(os.execute, binaryhandler, "os.execute")
+ overload(os.spawn, binaryhandler, "os.spawn")
+ overload(os.exec, binaryhandler, "os.exec")
+ overload(os.rename, filehandlertwo,"os.rename")
+ overload(os.remove, filehandlerone,"os.remove")
+end
+
+if lfs then
+ overload(lfs.chdir, filehandlerone,"lfs.chdir")
+ overload(lfs.mkdir, filehandlerone,"lfs.mkdir")
+ overload(lfs.rmdir, filehandlerone,"lfs.rmdir")
+ overload(lfs.isfile, filehandlerone,"lfs.isfile")
+ overload(lfs.isdir, filehandlerone,"lfs.isdir")
+ overload(lfs.attributes, filehandlerone,"lfs.attributes")
+ overload(lfs.dir, filehandlerone,"lfs.dir")
+ overload(lfs.lock_dir, filehandlerone,"lfs.lock_dir")
+ overload(lfs.touch, filehandlerone,"lfs.touch")
+ overload(lfs.link, filehandlertwo,"lfs.link")
+ overload(lfs.setmode, filehandlerone,"lfs.setmode")
+ overload(lfs.readlink, filehandlerone,"lfs.readlink")
+ overload(lfs.shortname, filehandlerone,"lfs.shortname")
+ overload(lfs.symlinkattributes,filehandlerone,"lfs.symlinkattributes")
+end
+
+-- these are used later on
+
+if zip then
+ zip.open = register(zip.open, filehandlerone,"zip.open")
+end
+
+if fontloader then
+ fontloader.open = register(fontloader.open,filehandlerone,"fontloader.open")
+ fontloader.info = register(fontloader.info,filehandlerone,"fontloader.info")
+end
+
+if epdf then
+ epdf.open = register(epdf.open, filehandlerone,"epdf.open")
+end
+
+-- not used in a normal mkiv run : os.spawn = os.execute
+-- not used in a normal mkiv run : os.exec = os.exec
+
+-- print(io.open("test.log"))
+-- sandbox.enable()
+-- print(io.open("test.log"))
+-- print(io.open("t:/test.log"))
diff --git a/tex/context/base/util-sci.lua b/tex/context/base/util-sci.lua
new file mode 100644
index 000000000..c3e24cd9d
--- /dev/null
+++ b/tex/context/base/util-sci.lua
@@ -0,0 +1,280 @@
+local gsub, sub, find = string.gsub, string.sub, string.find
+local concat = table.concat
+local formatters = string.formatters
+local lpegmatch = lpeg.match
+local setmetatableindex = table.setmetatableindex
+
+local scite = scite or { }
+utilities.scite = scite
+
+local report = logs.reporter("scite")
+
+local lexerroot = file.dirname(resolvers.find_file("scite-context-lexer.lua"))
+
+local knownlexers = {
+ tex = "tex", mkiv = "tex", mkvi = "tex", mkxi = "tex", mkix = "tex", mkii = "tex", cld = "tex",
+ lua = "lua", lfg = "lua", lus = "lua",
+ w = "web", ww = "web",
+ c = "cpp", h = "cpp", cpp = "cpp", hpp = "cpp", cxx = "cpp", hxx = "cpp",
+ xml = "xml", lmx = "xml", ctx = "xml", xsl = "xml", xsd = "xml", rlx = "xml", css = "xml", dtd = "xml",
+ bib = "bibtex",
+ rme = "txt",
+ -- todo: pat/hyp ori
+}
+
+lexer = nil -- main lexer, global (for the moment needed for themes)
+
+local function loadscitelexer()
+ if not lexer then
+ dir.push(lexerroot)
+ lexer = dofile("scite-context-lexer.lua")
+ dofile("themes/scite-context-theme.lua")
+ dir.pop()
+ end
+ return lexer
+end
+
+local loadedlexers = setmetatableindex(function(t,k)
+ local l = knownlexers[k] or k
+ dir.push(lexerroot)
+ loadscitelexer()
+ local v = lexer.load(formatters["scite-context-lexer-%s"](l))
+ dir.pop()
+ t[l] = v
+ t[k] = v
+ return v
+end)
+
+scite.loadedlexers = loadedlexers
+scite.knownlexers = knownlexers
+scite.loadscitelexer = loadscitelexer
+
+local f_fore_bold = formatters['.%s { display: inline ; font-weight: bold ; color: #%s%s%s ; }']
+local f_fore_none = formatters['.%s { display: inline ; font-weight: normal ; color: #%s%s%s ; }']
+local f_none_bold = formatters['.%s { display: inline ; font-weight: bold ; }']
+local f_none_none = formatters['.%s { display: inline ; font-weight: normal ; }']
+local f_div_class = formatters['%s
']
+local f_linenumber = formatters['%s
\n']
+local f_div_number = formatters['.linenumber { display: inline-block ; font-weight: normal ; width: %sem ; margin-right: 2em ; padding-right: .25em ; text-align: right ; background-color: #C7C7C7 ; }']
+
+local replacer_regular = lpeg.replacer {
+ ["<"] = "<",
+ [">"] = ">",
+ ["&"] = "&",
+}
+
+local linenumber = 0
+local linenumbers = { }
+
+local replacer_numbered = lpeg.replacer {
+ ["<"] = "<",
+ [">"] = ">",
+ ["&"] = "&",
+ [lpeg.patterns.newline] = function()
+ linenumber = linenumber + 1
+ linenumbers[linenumber] = f_linenumber(linenumber)
+ return "\n"
+ end,
+}
+
+local css = nil
+
+local function exportcsslexing()
+ if not css then
+ loadscitelexer()
+ local function black(f)
+ return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
+ end
+ local result, r = { }, 0
+ for k, v in table.sortedhash(lexer.context.styles) do
+ local bold = v.bold
+ local fore = v.fore
+ r = r + 1
+ if fore and not black(fore) then
+ if bold then
+ result[r] = f_fore_bold(k,fore[1],fore[2],fore[3])
+ else
+ result[r] = f_fore_none(k,fore[1],fore[2],fore[3])
+ end
+ else
+ if bold then
+ result[r] = f_none_bold(k)
+ else
+ result[r] = f_none_none(k)
+ end
+ end
+ end
+ css = concat(result,"\n")
+ end
+ return css
+end
+
+local function exportwhites()
+ return setmetatableindex(function(t,k)
+ local v = find(k,"white") and true or false
+ t[k] = v
+ return v
+ end)
+end
+
+local function exportstyled(lexer,text,numbered)
+ local result = lexer.lex(lexer,text,0)
+ local start = 1
+ local whites = exportwhites()
+ local buffer = { }
+ local b = 0
+ linenumber = 0
+ linenumbers = { }
+ local replacer = numbered and replacer_numbered or replacer_regular
+ local n = #result
+ for i=1,n,2 do
+ local ii = i + 1
+ local style = result[i]
+ local position = result[ii]
+ local txt = sub(text,start,position-1)
+ txt = lpegmatch(replacer,txt)
+ b = b + 1
+ if whites[style] then
+ buffer[b] = txt
+ else
+ buffer[b] = f_div_class(style,txt)
+ end
+ start = position
+ end
+ buffer = concat(buffer)
+ return buffer, concat(linenumbers)
+end
+
+local function exportcsslinenumber()
+ return f_div_number(#tostring(linenumber)/2+1)
+end
+
+local htmlfile = utilities.templates.replacer([[
+
+
+
+ %title%
+
+
+
+
+
+ %linenumbers% |
+ %lexedcontent% |
+
+
+
+
+]])
+
+function scite.tohtml(data,lexname,numbered,title)
+ local source, lines = exportstyled(loadedlexers[lexname],data or "",numbered)
+ return htmlfile {
+ lexedcontent = source, -- before numberstyles
+ lexingstyles = exportcsslexing(),
+ numberstyles = exportcsslinenumber(),
+ title = title or "context source file",
+ linenumbers = lines,
+ }
+end
+
+local function maketargetname(name)
+ if name then
+ return file.removesuffix(name) .. "-" .. file.suffix(name) .. ".html"
+ else
+ return "util-sci.html"
+ end
+end
+
+function scite.filetohtml(filename,lexname,targetname,numbered,title)
+ io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered,title or filename))
+end
+
+function scite.css()
+ return exportcsslexing() .. "\n" .. exportcsslinenumber()
+end
+
+function scite.html(data,lexname,numbered)
+ return exportstyled(loadedlexers[lexname],data or "",numbered)
+end
+
+local f_tree_entry = formatters['%s']
+
+local htmlfile = utilities.templates.replacer([[
+
+
+
+ %title%
+
+
+
+
+%dirlist%
+
+
+
+]])
+
+function scite.converttree(sourceroot,targetroot,numbered)
+ if lfs.isdir(sourceroot) then
+ statistics.starttiming()
+ local skipped = { }
+ local noffiles = 0
+ dir.makedirs(targetroot)
+ local function scan(sourceroot,targetroot,subpath)
+ local tree = { }
+ for name in lfs.dir(sourceroot) do
+ if name ~= "." and name ~= ".." then
+ local sourcename = file.join(sourceroot,name)
+ local targetname = file.join(targetroot,name)
+ local mode = lfs.attributes(sourcename,'mode')
+ local path = subpath and file.join(subpath,name) or name
+ if mode == 'file' then
+ local filetype = file.suffix(sourcename)
+ local basename = file.basename(name)
+ local targetname = maketargetname(targetname)
+ local fullname = file.join(path,name)
+ if knownlexers[filetype] then
+ report("converting file %a to %a",sourcename,targetname)
+ scite.filetohtml(sourcename,nil,targetname,numbered,fullname)
+ noffiles = noffiles + 1
+ tree[#tree+1] = f_tree_entry(file.basename(targetname),basename)
+ else
+ skipped[filetype] = true
+ report("no lexer for %a",sourcename)
+ end
+ else
+ dir.makedirs(targetname)
+ scan(sourcename,targetname,path)
+ tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name)
+ end
+ end
+ end
+ report("saving tree in %a",targetroot)
+ local htmldata = htmlfile {
+ dirlist = concat(tree,"\n"),
+ styles = "",
+ title = path or "context dir listing",
+ }
+ io.savedata(file.join(targetroot,"files.html"),htmldata)
+ end
+ scan(sourceroot,targetroot)
+ if next(skipped) then
+ report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," "))
+ end
+ statistics.stoptiming()
+ report("conversion time for %s files: %s",noffiles,statistics.elapsedtime())
+ end
+end
+
+-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true)
+-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true)
+
+-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true)
+
+return scite
diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua
index 35e693285..08fc4e95c 100644
--- a/tex/context/base/util-seq.lua
+++ b/tex/context/base/util-seq.lua
@@ -17,13 +17,15 @@ use locals to refer to them when compiling the chain.
-- todo: protect groups (as in tasks)
-local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
+local gsub, concat, gmatch = string.gsub, table.concat, string.gmatch
local type, load = type, load
utilities = utilities or { }
local tables = utilities.tables
local allocate = utilities.storage.allocate
+local formatters = string.formatters
+
local sequencers = { }
utilities.sequencers = sequencers
@@ -31,6 +33,7 @@ local functions = allocate()
sequencers.functions = functions
local removevalue = tables.removevalue
+local replacevalue = tables.replacevalue
local insertaftervalue = tables.insertaftervalue
local insertbeforevalue = tables.insertbeforevalue
@@ -189,6 +192,18 @@ function sequencers.removeaction(t,group,action,force)
end
end
+function sequencers.replaceaction(t,group,oldaction,newaction,force)
+ t = known[t]
+ if t then
+ local g = t.list[group]
+ if g and (force or validaction(oldaction)) then
+ replacevalue(g,oldaction,newaction)
+ t.dirty = true
+ t.runner = nil
+ end
+ end
+end
+
local function localize(str)
return (gsub(str,"[%.: ]+","_"))
end
@@ -204,20 +219,23 @@ local function construct(t)
for i=1,#actions do
local action = actions[i]
if not askip[action] then
+ local localized
if type(action) == "function" then
local name = localize(tostring(action))
functions[name] = action
- action = format("utilities.sequencers.functions.%s",name)
+ action = formatters["utilities.sequencers.functions.%s"](name)
+ localized = localize(name) -- shorter than action
+ else
+ localized = localize(action)
end
- local localized = localize(action)
n = n + 1
- variables[n] = format("local %s = %s",localized,action)
+ variables[n] = formatters["local %s = %s"](localized,action)
if not returnvalues then
- calls[n] = format("%s(%s)",localized,arguments)
+ calls[n] = formatters["%s(%s)"](localized,arguments)
elseif n == 1 then
- calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments)
+ calls[n] = formatters["local %s = %s(%s)"](returnvalues,localized,arguments)
else
- calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments)
+ calls[n] = formatters["%s = %s(%s)"](returnvalues,localized,arguments)
end
end
end
@@ -230,9 +248,9 @@ local function construct(t)
variables = concat(variables,"\n")
calls = concat(calls,"\n")
if results then
- t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
+ t.compiled = formatters["%s\nreturn function(%s)\n%s\nreturn %s\nend"](variables,arguments,calls,results)
else
- t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
+ t.compiled = formatters["%s\nreturn function(%s)\n%s\nend"](variables,arguments,calls)
end
end
-- print(t.compiled)
@@ -258,6 +276,7 @@ compile = function(t,compiler,n) -- already referred to in sequencers.new
if compiled == "" then
runner = false
else
+-- inspect(compiled)
runner = compiled and load(compiled)() -- we can use loadstripped here
end
t.runner = runner
@@ -314,12 +333,12 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
if not askip[action] then
local localized = localize(action)
n = n + 1
- vars[n] = format("local %s = %s",localized,action)
+ vars[n] = formatters["local %s = %s"](localized,action)
-- only difference with tostring is kind and rets (why no return)
if kind[action] == "nohead" then
- calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args)
+ calls[n] = formatters[" ok = %s(head%s) done = done or ok"](localized,args)
else
- calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
+ calls[n] = formatters[" head, ok = %s(head%s) done = done or ok"](localized,args)
end
-- local s = " print('" .. tostring(group) .. " " .. tostring(action) .. " : ' .. tostring(head)) "
-- calls[n] = s .. calls[n] .. s
@@ -327,6 +346,6 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
end
end
end
- local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
+ local processor = #calls > 0 and formatters[template_yes](concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
return processor
end
diff --git a/tex/context/base/util-sql-loggers.lua b/tex/context/base/util-sql-loggers.lua
index 7fceb8032..ceb1ff75c 100644
--- a/tex/context/base/util-sql-loggers.lua
+++ b/tex/context/base/util-sql-loggers.lua
@@ -52,7 +52,7 @@ table.setmetatableindex(fromtype,function() return "info" end)
loggers.totype = totype
loggers.fromtype = fromtype
-local template =[[
+local template = [[
CREATE TABLE IF NOT EXISTS %basename% (
`id` int(11) NOT NULL AUTO_INCREMENT,
`time` int(11) NOT NULL,
diff --git a/tex/context/base/util-sta.lua b/tex/context/base/util-sta.lua
index 1a61ec4e6..27ab5a624 100644
--- a/tex/context/base/util-sta.lua
+++ b/tex/context/base/util-sta.lua
@@ -81,6 +81,8 @@ end
function stacker.new(name)
+ local report = logs.reporter("stacker",name or nil)
+
local s
local stack = { }
@@ -126,8 +128,18 @@ function stacker.new(name)
end
end
- local tops = { }
- local top, switch
+ local tops = { }
+ local top = nil
+ local switch = nil
+
+ local function resolve_reset(mode)
+ if #tops > 0 then
+ report("resetting %s left-over states of %a",#tops,name)
+ end
+ tops = { }
+ top = nil
+ switch = nil
+ end
local function resolve_begin(mode)
if mode then
@@ -206,8 +218,7 @@ function stacker.new(name)
local function resolve_end()
-- resolve_step(s.unset)
- local noftop = #top
- if noftop > 0 then
+ if #tops > 0 then -- was #top brrr
local result = s.stop(s,top,1,#top)
remove(tops)
top = tops[#tops]
@@ -224,8 +235,6 @@ function stacker.new(name)
resolve_end()
end
- local report = logs.reporter("stacker",name or nil)
-
s = {
name = name or "unknown",
unset = -1,
@@ -240,6 +249,7 @@ function stacker.new(name)
resolve_begin = resolve_begin,
resolve_step = resolve_step,
resolve_end = resolve_end,
+ resolve_reset = resolve_reset,
}
return s -- we can overload functions
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index af8b1651e..de4a87e9f 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -20,21 +20,44 @@ local utfchar, utfbyte = utf.char, utf.byte
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
-local loadstripped = _LUAVERSION < 5.2 and load or function(str)
- return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
+local loadstripped = nil
+
+if _LUAVERSION < 5.2 then
+
+ loadstripped = function(str,shortcuts)
+ return load(str)
+ end
+
+else
+
+ loadstripped = function(str,shortcuts)
+ if shortcuts then
+ return load(dump(load(str),true),nil,nil,shortcuts)
+ else
+ return load(dump(load(str),true))
+ end
+ end
+
end
-- todo: make a special namespace for the formatter
if not number then number = { } end -- temp hack for luatex-fonts
-local stripper = patterns.stripzeros
+local stripper = patterns.stripzeros
+local newline = patterns.newline
+local endofstring = patterns.endofstring
+local whitespace = patterns.whitespace
+local spacer = patterns.spacer
+local spaceortab = patterns.spaceortab
local function points(n)
+ n = tonumber(n)
return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
end
local function basepoints(n)
+ n = tonumber(n)
return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
end
@@ -44,12 +67,12 @@ number.basepoints = basepoints
-- str = " \n \ntest \n test\ntest "
-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
-local rubish = patterns.spaceortab^0 * patterns.newline
-local anyrubish = patterns.spaceortab + patterns.newline
+local rubish = spaceortab^0 * newline
+local anyrubish = spaceortab + newline
local anything = patterns.anything
-local stripped = (patterns.spaceortab^1 / "") * patterns.newline
+local stripped = (spaceortab^1 / "") * newline
local leading = rubish^0 / ""
-local trailing = (anyrubish^1 * patterns.endofstring) / ""
+local trailing = (anyrubish^1 * endofstring) / ""
local redundant = rubish^3 / "\n"
local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0)
@@ -111,7 +134,7 @@ local pattern =
return ""
end
end
- + patterns.newline * Cp() / function(position)
+ + newline * Cp() / function(position)
extra, start = 0, position
end
+ patterns.anything
@@ -136,17 +159,105 @@ end
-- print(strings.tabtospace(t[k]))
-- end
-function strings.striplong(str) -- strips all leading spaces
- str = gsub(str,"^%s*","")
- str = gsub(str,"[\n\r]+ *","\n")
- return str
+-- todo: lpeg
+
+-- function strings.striplong(str) -- strips all leading spaces
+-- str = gsub(str,"^%s*","")
+-- str = gsub(str,"[\n\r]+ *","\n")
+-- return str
+-- end
+
+local space = spacer^0
+local nospace = space/""
+local endofline = nospace * newline
+
+local stripend = (whitespace^1 * endofstring)/""
+
+local normalline = (nospace * ((1-space*(newline+endofstring))^1) * nospace)
+
+local stripempty = endofline^1/""
+local normalempty = endofline^1
+local singleempty = endofline * (endofline^0/"")
+local doubleempty = endofline * endofline^-1 * (endofline^0/"")
+
+local stripstart = stripempty^0
+
+local p_prune_normal = Cs ( stripstart * ( stripend + normalline + normalempty )^0 )
+local p_prune_collapse = Cs ( stripstart * ( stripend + normalline + doubleempty )^0 )
+local p_prune_noempty = Cs ( stripstart * ( stripend + normalline + singleempty )^0 )
+local p_retain_normal = Cs ( ( normalline + normalempty )^0 )
+local p_retain_collapse = Cs ( ( normalline + doubleempty )^0 )
+local p_retain_noempty = Cs ( ( normalline + singleempty )^0 )
+
+-- function striplines(str,prune,collapse,noempty)
+-- if prune then
+-- if noempty then
+-- return lpegmatch(p_prune_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_prune_collapse,str) or str
+-- else
+-- return lpegmatch(p_prune_normal,str) or str
+-- end
+-- else
+-- if noempty then
+-- return lpegmatch(p_retain_noempty,str) or str
+-- elseif collapse then
+-- return lpegmatch(p_retain_collapse,str) or str
+-- else
+-- return lpegmatch(p_retain_normal,str) or str
+-- end
+-- end
+-- end
+
+local striplinepatterns = {
+ ["prune"] = p_prune_normal,
+ ["prune and collapse"] = p_prune_collapse, -- default
+ ["prune and no empty"] = p_prune_noempty,
+ ["retain"] = p_retain_normal,
+ ["retain and collapse"] = p_retain_collapse,
+ ["retain and no empty"] = p_retain_noempty,
+ ["collapse"] = patterns.collapser, -- how about: stripper fullstripper
+}
+
+setmetatable(striplinepatterns,{ __index = function(t,k) return p_prune_collapse end })
+
+strings.striplinepatterns = striplinepatterns
+
+function strings.striplines(str,how)
+ return str and lpegmatch(striplinepatterns[how],str) or str
end
--- local template = string.striplong([[
+-- also see: string.collapsespaces
+
+strings.striplong = strings.striplines -- for old times sake
+
+-- local str = table.concat( {
+-- " ",
+-- " aap",
+-- " noot mies",
+-- " ",
+-- " ",
+-- " zus wim jet",
+-- "zus wim jet",
+-- " zus wim jet",
+-- " ",
+-- }, "\n")
+
+-- local str = table.concat( {
+-- " aaaa",
+-- " bb",
+-- " cccccc",
+-- }, "\n")
+
+-- for k, v in table.sortedhash(utilities.strings.striplinepatterns) do
+-- logs.report("stripper","method: %s, result: [[%s]]",k,utilities.strings.striplines(str,k))
+-- end
+
+-- inspect(strings.striplong([[
-- aaaa
-- bb
-- cccccc
--- ]])
+-- ]]))
function strings.nice(str)
str = gsub(str,"[:%-+_]+"," ") -- maybe more
@@ -178,6 +289,7 @@ end
-- octal %...o number
-- string %...s string number
-- float %...f number
+-- checked float %...F number
-- exponential %...e number
-- exponential %...E number
-- autofloat %...g number
@@ -249,10 +361,10 @@ strings.tracers = tracedchars
function string.tracedchar(b)
-- todo: table
if type(b) == "number" then
- return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")")
+ return tracedchars[b] or (utfchar(b) .. " (U+" .. format("%05X",b) .. ")")
else
local c = utfbyte(b)
- return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")")
+ return tracedchars[c] or (b .. " (U+" .. (c and format("%05X",c) or "?????") .. ")")
end
end
@@ -291,33 +403,67 @@ function number.sparseexponent(f,n)
return tostring(n)
end
-local preamble = [[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-local formattednumber = number.formatted
-local sparseexponent = number.sparseexponent
-]]
-
local template = [[
%s
%s
return function(%s) return %s end
]]
+local preamble, environment = "", { }
+
+if _LUAVERSION < 5.2 then
+
+ preamble = [[
+local lpeg=lpeg
+local type=type
+local tostring=tostring
+local tonumber=tonumber
+local format=string.format
+local concat=table.concat
+local signed=number.signed
+local points=number.points
+local basepoints= number.basepoints
+local utfchar=utf.char
+local utfbyte=utf.byte
+local lpegmatch=lpeg.match
+local nspaces=string.nspaces
+local tracedchar=string.tracedchar
+local autosingle=string.autosingle
+local autodouble=string.autodouble
+local sequenced=table.sequenced
+local formattednumber=number.formatted
+local sparseexponent=number.sparseexponent
+ ]]
+
+else
+
+ environment = {
+ global = global or _G,
+ lpeg = lpeg,
+ type = type,
+ tostring = tostring,
+ tonumber = tonumber,
+ format = string.format,
+ concat = table.concat,
+ signed = number.signed,
+ points = number.points,
+ basepoints = number.basepoints,
+ utfchar = utf.char,
+ utfbyte = utf.byte,
+ lpegmatch = lpeg.match,
+ nspaces = string.nspaces,
+ tracedchar = string.tracedchar,
+ autosingle = string.autosingle,
+ autodouble = string.autodouble,
+ sequenced = table.sequenced,
+ formattednumber = number.formatted,
+ sparseexponent = number.sparseexponent,
+ }
+
+end
+
+-- -- --
+
local arguments = { "a1" } -- faster than previously used (select(n,...))
setmetatable(arguments, { __index =
@@ -368,7 +514,7 @@ local format_i = function(f)
if f and f ~= "" then
return format("format('%%%si',a%s)",f,n)
else
- return format("format('%%i',a%s)",n)
+ return format("format('%%i',a%s)",n) -- why not just tostring()
end
end
@@ -384,6 +530,24 @@ local format_f = function(f)
return format("format('%%%sf',a%s)",f,n)
end
+-- The next one formats an integer as integer and very small values as zero. This is needed
+-- for pdf backend code.
+--
+-- 1.23 % 1 : 0.23
+-- - 1.23 % 1 : 0.77
+--
+-- We could probably use just %s with integers but who knows what Lua 5.3 will do? So let's
+-- for the moment use %i.
+
+local format_F = function(f) -- beware, no cast to number
+ n = n + 1
+ if not f or f == "" then
+ return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
+
local format_g = function(f)
n = n + 1
return format("format('%%%sg',a%s)",f,n)
@@ -657,7 +821,7 @@ local builder = Cs { "start",
V("!") -- new
+ V("s") + V("q")
+ V("i") + V("d")
- + V("f") + V("g") + V("G") + V("e") + V("E")
+ + V("f") + V("F") + V("g") + V("G") + V("e") + V("E")
+ V("x") + V("X") + V("o")
--
+ V("c")
@@ -680,7 +844,7 @@ local builder = Cs { "start",
+ V("m") + V("M") -- new
+ V("z") -- new
--
- + V("*") -- ignores probably messed up %
+ -- + V("?") -- ignores probably messed up %
)
+ V("*")
)
@@ -692,6 +856,7 @@ local builder = Cs { "start",
["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["F"] = (prefix_any * P("F")) / format_F, -- %F => regular %f (float) but 0/1 check
["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
@@ -734,34 +899,45 @@ local builder = Cs { "start",
["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
--
["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%)
+ ["?"] = Cs(((1-P("%"))^1 )^1) / format_rest, -- rest (including %%)
--
["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
}
-- we can be clever and only alias what is needed
+-- local direct = Cs (
+-- P("%")/""
+-- * Cc([[local format = string.format return function(str) return format("%]])
+-- * (S("+- .") + R("09"))^0
+-- * S("sqidfgGeExXo")
+-- * Cc([[",str) end]])
+-- * P(-1)
+-- )
+
local direct = Cs (
- P("%")/""
- * Cc([[local format = string.format return function(str) return format("%]])
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * Cc([[",str) end]])
- * P(-1)
- )
+ P("%")
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
local p = lpegmatch(direct,str)
if p then
+ -- f = loadstripped(p)()
+ -- print("builder 1 >",p)
f = loadstripped(p)()
else
n = 0
- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ -- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ p = lpegmatch(builder,str,1,t._connector_,t._extensions_) -- after this we know n
if n > 0 then
p = format(template,preamble,t._preamble_,arguments[n],p)
--- print("builder>",p)
- f = loadstripped(p)()
+ -- print("builder 2 >",p)
+ f = loadstripped(p,t._environment_)() -- t._environment is not populated (was experiment)
else
f = function() return str end
end
@@ -816,10 +992,28 @@ strings.formatters = { }
-- table (metatable) in which case we could better keep a count and
-- clear that table when a threshold is reached
-function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
+-- _connector_ is an experiment
+
+if _LUAVERSION < 5.2 then
+
+ function strings.formatters.new(noconcat)
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
+else
+
+ function strings.formatters.new(noconcat)
+ local e = { } -- better make a copy as we can overload
+ for k, v in next, environment do
+ e[k] = v
+ end
+ local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = "", _environment_ = e }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+ end
+
end
-- function strings.formatters.new()
@@ -838,8 +1032,12 @@ string.formatter = function(str,...) return formatters[str](...) end -- someti
local function add(t,name,template,preamble)
if type(t) == "table" and t._type_ == "formatter" then
t._extensions_[name] = template or "%s"
- if preamble then
+ if type(preamble) == "string" then
t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ elseif type(preamble) == "table" then
+ for k, v in next, preamble do
+ t._environment_[k] = v
+ end
end
end
end
@@ -856,9 +1054,23 @@ patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"
-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
-- faster again when other q-escapables are found (the ones we don't need to escape)
-add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+-- add(formatters,"xml", [[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+-- add(formatters,"tex", [[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+-- add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+
+if _LUAVERSION < 5.2 then
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+
+else
+
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape = lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape = lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape = lpeg.patterns.luaescape })
+
+end
-- -- yes or no:
--
@@ -885,3 +1097,29 @@ add(formatters,"lua", [[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patte
-- string.formatteds = formatteds
--
-- setmetatable(formatteds, { __index = make, __call = use })
+
+-- This is a somewhat silly one used in commandline reconstruction but the older
+-- method, using a combination of fine, gsub, quoted and unquoted was not that
+-- reliable.
+--
+-- '"foo"bar \"and " whatever"' => "foo\"bar \"and \" whatever"
+-- 'foo"bar \"and " whatever' => "foo\"bar \"and \" whatever"
+
+local dquote = patterns.dquote -- P('"')
+local equote = patterns.escaped + dquote / '\\"' + 1
+local space = patterns.space
+local cquote = Cc('"')
+
+local pattern =
+ Cs(dquote * (equote - P(-2))^0 * dquote) -- we keep the outer but escape unescaped ones
+ + Cs(cquote * (equote - space)^0 * space * equote^0 * cquote) -- we escape unescaped ones
+
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+
+local pattern = Cs((newline / os.newline + 1)^0)
+
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index ae44269bb..618f34cee 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -11,7 +11,7 @@ utilities.tables = utilities.tables or { }
local tables = utilities.tables
local format, gmatch, gsub, sub = string.format, string.gmatch, string.gsub, string.sub
-local concat, insert, remove = table.concat, table.insert, table.remove
+local concat, insert, remove, sort = table.concat, table.insert, table.remove, table.sort
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
@@ -21,27 +21,29 @@ local utftoeight = utf.toeight
local splitter = lpeg.tsplitat(".")
-function tables.definetable(target,nofirst,nolast) -- defines undefined tables
- local composed, shortcut, t = nil, nil, { }
+function utilities.tables.definetable(target,nofirst,nolast) -- defines undefined tables
+ local composed, t = nil, { }
local snippets = lpegmatch(splitter,target)
for i=1,#snippets - (nolast and 1 or 0) do
local name = snippets[i]
if composed then
- composed = shortcut .. "." .. name
- shortcut = shortcut .. "_" .. name
- t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ composed = composed .. "." .. name
+ t[#t+1] = formatters["if not %s then %s = { } end"](composed,composed)
else
composed = name
- shortcut = name
if not nofirst then
t[#t+1] = formatters["%s = %s or { }"](composed,composed)
end
end
end
- if nolast then
- composed = shortcut .. "." .. snippets[#snippets]
+ if composed then
+ if nolast then
+ composed = composed .. "." .. snippets[#snippets]
+ end
+ return concat(t,"\n"), composed -- could be shortcut
+ else
+ return "", target
end
- return concat(t,"\n"), composed
end
-- local t = tables.definedtable("a","b","c","d")
@@ -73,7 +75,7 @@ end
function tables.migratetable(target,v,root)
local t = root or _G
- local names = string.split(target,".")
+ local names = lpegmatch(splitter,target)
for i=1,#names-1 do
local name = names[i]
t[name] = t[name] or { }
@@ -96,6 +98,17 @@ function tables.removevalue(t,value) -- todo: n
end
end
+function tables.replacevalue(t,oldvalue,newvalue)
+ if oldvalue and newvalue then
+ for i=1,#t do
+ if t[i] == oldvalue then
+ t[i] = newvalue
+ -- replace all, so no: return
+ end
+ end
+ end
+end
+
function tables.insertbeforevalue(t,value,extra)
for i=1,#t do
if t[i] == extra then
@@ -316,7 +329,7 @@ function table.fastserialize(t,prefix)
-- not sorted
-- only number and string indices (currently)
- local r = { prefix or "return" }
+ local r = { type(prefix) == "string" and prefix or "return" }
local m = 1
local function fastserialize(t,outer) -- no mixes
@@ -376,7 +389,6 @@ function table.fastserialize(t,prefix)
end
return r
end
-
return concat(fastserialize(t,true))
end
@@ -494,7 +506,8 @@ end
-- The next version is somewhat faster, although in practice one will seldom
-- serialize a lot using this one. Often the above variants are more efficient.
--- If we would really need this a lot, we could hash q keys.
+-- If we would really need this a lot, we could hash q keys, or just not used
+-- indented code.
-- char-def.lua : 0.53 -> 0.38
-- husayni.tma : 0.28 -> 0.19
@@ -558,8 +571,42 @@ function table.serialize(root,name,specification)
local t -- = { }
local n = 1
+-- local function simple_table(t)
+-- local ts = #t
+-- if ts > 0 then
+-- local n = 0
+-- for _, v in next, t do
+-- n = n + 1
+-- if type(v) == "table" then
+-- return nil
+-- end
+-- end
+-- if n == ts then
+-- local tt = { }
+-- local nt = 0
+-- for i=1,ts do
+-- local v = t[i]
+-- local tv = type(v)
+-- nt = nt + 1
+-- if tv == "number" then
+-- tt[nt] = v
+-- elseif tv == "string" then
+-- tt[nt] = format("%q",v) -- f_string(v)
+-- elseif tv == "boolean" then
+-- tt[nt] = v and "true" or "false"
+-- else
+-- return nil
+-- end
+-- end
+-- return tt
+-- end
+-- end
+-- return nil
+-- end
+
local function simple_table(t)
- if #t > 0 then
+ local nt = #t
+ if nt > 0 then
local n = 0
for _, v in next, t do
n = n + 1
@@ -567,19 +614,17 @@ function table.serialize(root,name,specification)
return nil
end
end
- if n == #t then
+ if n == nt then
local tt = { }
- local nt = 0
- for i=1,#t do
+ for i=1,nt do
local v = t[i]
local tv = type(v)
- nt = nt + 1
if tv == "number" then
- tt[nt] = v
+ tt[i] = v -- not needed tostring(v)
elseif tv == "string" then
- tt[nt] = format("%q",v) -- f_string(v)
+ tt[i] = format("%q",v) -- f_string(v)
elseif tv == "boolean" then
- tt[nt] = v and "true" or "false"
+ tt[i] = v and "true" or "false"
else
return nil
end
@@ -610,7 +655,7 @@ function table.serialize(root,name,specification)
depth = depth + 1
end
-- we could check for k (index) being number (cardinal)
- if root and next(root) then
+ if root and next(root) ~= nil then
local first = nil
local last = 0
last = #root
@@ -623,19 +668,19 @@ function table.serialize(root,name,specification)
if last > 0 then
first = 1
end
- local sk = sortedkeys(root) -- inline fast version?
+ local sk = sortedkeys(root) -- inline fast version?\
for i=1,#sk do
local k = sk[i]
local v = root[k]
local tv = type(v)
local tk = type(k)
- if first and tk == "number" and k >= first and k <= last then
+ if first and tk == "number" and k <= last and k >= first then
if tv == "number" then
n = n + 1 t[n] = f_val_num(depth,v)
elseif tv == "string" then
n = n + 1 t[n] = f_val_str(depth,v)
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
n = n + 1 t[n] = f_val_not(depth)
else
local st = simple_table(v)
@@ -665,13 +710,13 @@ function table.serialize(root,name,specification)
n = n + 1 t[n] = f_key_boo_value_str(depth,k,v)
end
elseif tv == "table" then
- if not next(v) then
+ if next(v) == nil then
if tk == "number" then
- n = n + 1 t[n] = f_key_num_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_num_value_not(depth,k)
elseif tk == "string" then
- n = n + 1 t[n] = f_key_str_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_str_value_not(depth,k)
elseif tk == "boolean" then
- n = n + 1 t[n] = f_key_boo_value_not(depth,k,v)
+ n = n + 1 t[n] = f_key_boo_value_not(depth,k)
end
else
local st = simple_table(v)
@@ -729,7 +774,7 @@ function table.serialize(root,name,specification)
root._w_h_a_t_e_v_e_r_ = nil
end
-- Let's forget about empty tables.
- if next(root) then
+ if next(root) ~= nil then
do_serialize(root,name,1,0)
end
end
diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua
index 67d058221..468dd429c 100644
--- a/tex/context/base/util-tpl.lua
+++ b/tex/context/base/util-tpl.lua
@@ -52,7 +52,7 @@ local sqlescape = lpeg.replacer {
-- { "\t", "\\t" },
}
-local sqlquoted = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+local sqlquoted = Cs(Cc("'") * sqlescape * Cc("'"))
lpegpatterns.sqlescape = sqlescape
lpegpatterns.sqlquoted = sqlquoted
@@ -111,13 +111,26 @@ local luaescaper = escapers.lua
local quotedluaescaper = quotedescapers.lua
local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how == false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper = how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
end
local function replacekeyquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
+ if how == false then
+ return replacekey(s,t,how,recurse)
+ else
+ local escaper = how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+ end
+end
+
+local function replaceoptional(l,m,r,t,how,recurse)
+ local v = t[l]
+ return v and v ~= "" and lpegmatch(replacer,r,1,t,how or "lua",recurse or false) or ""
end
local single = P("%") -- test %test% test : resolves test
@@ -135,12 +148,19 @@ local norquoted = rquoted / ''
local nolquotedq = lquotedq / ''
local norquotedq = rquotedq / ''
-local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle
-local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq
-local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted
+local noloptional = P("%?") / ''
+local noroptional = P("?%") / ''
+local nomoptional = P(":") / ''
+
+
+local args = Carg(1) * Carg(2) * Carg(3)
+local key = nosingle * ((C((1-nosingle )^1) * args) / replacekey ) * nosingle
+local quoted = nolquotedq * ((C((1-norquotedq )^1) * args) / replacekeyquoted ) * norquotedq
+local unquoted = nolquoted * ((C((1-norquoted )^1) * args) / replacekeyunquoted) * norquoted
+local optional = noloptional * ((C((1-nomoptional)^1) * nomoptional * C((1-noroptional)^1) * args) / replaceoptional) * noroptional
local any = P(1)
- replacer = Cs((unquoted + quoted + escape + key + any)^0)
+ replacer = Cs((unquoted + quoted + escape + optional + key + any)^0)
local function replace(str,mapping,how,recurse)
if mapping and str then
@@ -156,6 +176,7 @@ end
-- print(replace("test '%[x]%' test",{ x = [[a '%y%' a]], y = "oeps" },'sql',true))
-- print(replace([[test %[x]% test]],{ x = [[a "x" a]]}))
-- print(replace([[test %(x)% test]],{ x = [[a "x" a]]}))
+-- print(replace([[convert %?x: -x "%x%" ?% %?y: -y "%y%" ?%]],{ x = "yes" }))
templates.replace = replace
@@ -188,3 +209,5 @@ end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false,true))
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false))
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index 992c37eae..51f401e66 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -7,266 +7,2095 @@ if not modules then modules = { } end modules ['x-asciimath'] = {
}
--[[ldx--
-Some backgrounds are discussed in x-asciimath.mkiv.
+Some backgrounds are discussed in x-asciimath.mkiv. This is a third version. I first
+tried a to make a proper expression parser but it's not that easy. First we have to avoid left
+recursion, which is not that trivial (maybe a future version of lpeg will provide that), and
+second there is not really a syntax but a mix of expressions and sequences with some fuzzy logic
+applied. Most problematic are fractions and we also need to handle incomplete expressions. So,
+instead we (sort of) tokenize the string and then do some passes over the result. Yes, it's real
+ugly and unsatisfying code mess down here. Don't take this as an example.
--ldx]]--
-local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end
+-- todo: spaces around all elements in cleanup?
+-- todo: filter from files listed in tuc file
-local asciimath = { }
-local moduledata = moduledata or { }
-moduledata.asciimath = asciimath
+local trace_mapping = false if trackers then trackers.register("modules.asciimath.mapping", function(v) trace_mapping = v end) end
+local trace_detail = false if trackers then trackers.register("modules.asciimath.detail", function(v) trace_detail = v end) end
+local trace_digits = false if trackers then trackers.register("modules.asciimath.digits", function(v) trace_digits = v end) end
local report_asciimath = logs.reporter("mathematics","asciimath")
-local format = string.format
-local lpegmatch = lpeg.match
-local S, P, R, C, V, Cc, Ct, Cs = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs
-
-local letter = lpeg.patterns.utf8
-local space = S(" \n\r\t")
-local spaces = space^0/""
-local integer = P("-")^-1 * R("09")^1
-local realpart = P("-")^-1 * R("09")^1 * S(".")^1 * R("09")^1
-local number = integer -- so we can support nice formatting if needed
-local real = realpart -- so we can support nice formatting if needed
-local float = realpart * P("E") * integer -- so we can support nice formatting if needed
-local texnic = P("\\") * (R("az","AZ")^1)
-
-local premapper = Cs ( (
-
- P("@") / "\\degrees " +
- P("O/") / "\\varnothing " +
- P("o+") / "\\oplus " +
- P("o.") / "\\ocirc " +
- P("!in") / "\\not\\in " +
- P("!=") / "\\neq " +
- P("**") / "\\star " +
- P("*") / "\\cdot " +
- P("//") / "\\slash " +
- P("/_") / "\\angle " +
- P("\\\\") / "\\backslash " +
- P("^^^") / "\\wedge " +
- P("^^") / "\\wedge " +
- P("<<") / "\\left\\langle " +
- P(">>") / "\\right\\rangle " +
- P("<=") / "\\leq " +
- P(">=") / "\\geq " +
- P("-<") / "\\precc " +
- P(">-") / "\\succ " +
- P("~=") / "\\cong " +
- P("~~") / "\\approx " +
- P("=>") / "\\Rightarrow " +
- P("(:") / "\\left\\langle " +
- P(":)") / "\\right\\rangle " +
- P(":.") / "\\therefore " +
- P("~|") / "\\right\\rceil " +
- P("_|_") / "\\bot " +
- P("_|") / "\\right\\rfloor " +
- P("+-") / "\\pm " +
- P("|--") / "\\vdash " +
- P("|==") / "\\models " +
- P("|_") / "\\left\\lfloor " +
- P("|~") / "\\left\\lceil " +
- P("-:") / "\\div " +
- P("_=") / "\\equiv " +
-
- P("|") / "\\middle\\| " +
-
- P("dx") / "(dx)" +
- P("dy") / "(dy)" +
- P("dz") / "(dz)" +
-
- letter + P(1)
+local asciimath = { }
+local moduledata = moduledata or { }
+moduledata.asciimath = asciimath
-)^0 )
+if not characters then
+ require("char-def")
+ require("char-ini")
+ require("char-ent")
+end
+
+local type, rawget = type, rawget
+local concat, insert, remove = table.concat, table.insert, table.remove
+local rep, gmatch, gsub, find = string.rep, string.gmatch, string.gsub, string.find
+local utfchar, utfbyte = utf.char, utf.byte
+
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local S, P, R, C, V, Cc, Ct, Cs, Carg = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct, lpeg.Cs, lpeg.Carg
+
+local sortedhash = table.sortedhash
+local sortedkeys = table.sortedkeys
+local formatters = string.formatters
+
+local entities = characters.entities or { }
+
+local xmltext = xml.text
+local xmlinclusion = xml.inclusion
+local xmlcollected = xml.collected
+
+-- todo: use private unicodes as temporary slots ... easier to compare
+
+local s_lparent = "\\left\\lparent"
+local s_lbrace = "\\left\\lbrace"
+local s_lbracket = "\\left\\lbracket"
+local s_langle = "\\left\\langle"
+local s_lfloor = "\\left\\lfloor"
+local s_lceil = "\\left\\lceil"
+local s_left = "\\left."
+
+local s_rparent = "\\right\\rparent"
+local s_rbrace = "\\right\\rbrace"
+local s_rbracket = "\\right\\rbracket"
+local s_rangle = "\\right\\rangle"
+local s_rfloor = "\\right\\rfloor"
+local s_rceil = "\\right\\rceil"
+local s_right = "\\right."
+
+local s_mslash = "\\middle/"
+
+local s_lbar = "\\left\\|"
+local s_mbar = "\\middle\\|"
+local s_rbar = "\\right\\|"
+
+local s_lnothing = "\\left ." -- space fools checker
+local s_rnothing = "\\right ." -- space fools checker
local reserved = {
- ["aleph"] = "\\aleph ",
- ["vdots"] = "\\vdots ",
- ["ddots"] = "\\ddots ",
- ["oint"] = "\\oint ",
- ["grad"] = "\\nabla ",
- ["prod"] = "\\prod ",
- ["prop"] = "\\propto ",
- ["sube"] = "\\subseteq ",
- ["supe"] = "\\supseteq ",
- ["sinh"] = "\\sinh ",
- ["cosh"] = "\\cosh ",
- ["tanh"] = "\\tanh ",
- ["sum"] = "\\sum ",
- ["vvv"] = "\\vee ",
- ["nnn"] = "\\cap ",
- ["uuu"] = "\\cup ",
- ["sub"] = "\\subset ",
- ["sup"] = "\\supset ",
- ["not"] = "\\lnot ",
- ["iff"] = "\\Leftrightarrow ",
- ["int"] = "\\int ",
- ["del"] = "\\partial ",
- ["and"] = "\\and ",
- ["not"] = "\\not ",
- ["sin"] = "\\sin ",
- ["cos"] = "\\cos ",
- ["tan"] = "\\tan ",
- ["csc"] = "\\csc ",
- ["sec"] = "\\sec ",
- ["cot"] = "\\cot ",
- ["log"] = "\\log ",
- ["det"] = "\\det ",
- ["lim"] = "\\lim ",
- ["mod"] = "\\mod ",
- ["gcd"] = "\\gcd ",
- ["lcm"] = "\\lcm ",
- ["min"] = "\\min ",
- ["max"] = "\\max ",
- ["xx"] = "\\times ",
- ["in"] = "\\in ",
- ["ox"] = "\\otimes ",
- ["vv"] = "\\vee ",
- ["nn"] = "\\cap ",
- ["uu"] = "\\cup ",
- ["oo"] = "\\infty ",
- ["ln"] = "\\ln ",
- ["or"] = "\\or ",
-
- ["AA"] = "\\forall ",
- ["EE"] = "\\exists ",
- ["TT"] = "\\top ",
- ["CC"] = "\\Bbb{C}",
- ["NN"] = "\\Bbb{N}",
- ["QQ"] = "\\Bbb{Q}",
- ["RR"] = "\\Bbb{R}",
- ["ZZ"] = "\\Bbb{Z}",
+ ["prod"] = { false, "\\prod" },
+ ["sinh"] = { false, "\\sinh" },
+ ["cosh"] = { false, "\\cosh" },
+ ["tanh"] = { false, "\\tanh" },
+ ["sum"] = { false, "\\sum" },
+ ["int"] = { false, "\\int" },
+ ["sin"] = { false, "\\sin" },
+ ["cos"] = { false, "\\cos" },
+ ["tan"] = { false, "\\tan" },
+ ["csc"] = { false, "\\csc" },
+ ["sec"] = { false, "\\sec" },
+ ["cot"] = { false, "\\cot" },
+ ["log"] = { false, "\\log" },
+ ["det"] = { false, "\\det" },
+ ["lim"] = { false, "\\lim" },
+ ["mod"] = { false, "\\mod" },
+ ["gcd"] = { false, "\\gcd" },
+ ["min"] = { false, "\\min" },
+ ["max"] = { false, "\\max" },
+ ["ln"] = { false, "\\ln" },
+
+ ["atan"] = { false, "\\atan" }, -- extra
+ ["acos"] = { false, "\\acos" }, -- extra
+ ["asin"] = { false, "\\asin" }, -- extra
+ -- extra
+ ["arctan"] = { false, "\\arctan" }, -- extra
+ ["arccos"] = { false, "\\arccos" }, -- extra
+ ["arcsin"] = { false, "\\arcsin" }, -- extra
+
+ ["and"] = { false, "\\text{and}" },
+ ["or"] = { false, "\\text{or}" },
+ ["if"] = { false, "\\text{if}" },
+
+ ["sqrt"] = { false, "\\asciimathsqrt", "unary" },
+ ["root"] = { false, "\\asciimathroot", "binary" },
+ ["frac"] = { false, "\\frac", "binary" },
+ ["stackrel"] = { false, "\\asciimathstackrel", "binary" },
+ ["hat"] = { false, "\\widehat", "unary" },
+ ["bar"] = { false, "\\overbar", "unary" },
+ ["overbar"] = { false, "\\overbar", "unary" },
+ ["underline"] = { false, "\\underline", "unary" },
+ ["ul"] = { false, "\\underline", "unary" },
+ ["vec"] = { false, "\\overrightarrow", "unary" },
+ ["dot"] = { false, "\\dot", "unary" }, -- 0x2D9
+ ["ddot"] = { false, "\\ddot", "unary" }, -- 0xA8
+
+ -- binary operators
+
+ ["+"] = { true, "+" },
+ ["-"] = { true, "-" },
+ ["*"] = { true, "⋅" },
+ ["**"] = { true, "⋆" },
+ ["//"] = { true, "⁄" }, -- \slash
+ ["\\"] = { true, "\\" },
+ ["xx"] = { true, "×" },
+ ["times"] = { true, "×" },
+ ["-:"] = { true, "÷" },
+ ["@"] = { true, "∘" },
+ ["circ"] = { true, "∘" },
+ ["o+"] = { true, "⊕" },
+ ["ox"] = { true, "⊗" },
+ ["o."] = { true, "⊙" },
+ ["^^"] = { true, "∧" },
+ ["vv"] = { true, "∨" },
+ ["nn"] = { true, "∩" },
+ ["uu"] = { true, "∪" },
+
+ -- big operators
+
+ ["^^^"] = { true, "⋀" },
+ ["vvv"] = { true, "⋁" },
+ ["nnn"] = { true, "⋂" },
+ ["uuu"] = { true, "⋃" },
+ ["int"] = { true, "∫" },
+ ["oint"] = { true, "∮" },
+
+ -- brackets
+
+ ["("] = { true, "(" },
+ [")"] = { true, ")" },
+ ["["] = { true, "[" },
+ ["]"] = { true, "]" },
+ ["{"] = { true, "{" },
+ ["}"] = { true, "}" },
+
+ -- binary relations
+
+ ["="] = { true, "=" },
+ ["eq"] = { true, "=" },
+ ["!="] = { true, "≠" },
+ ["ne"] = { true, "≠" },
+ ["neq"] = { true, "≠" },
+ ["<"] = { true, "<" },
+ ["lt"] = { true, "<" },
+ [">"] = { true, ">" },
+ ["gt"] = { true, ">" },
+ ["<="] = { true, "≤" },
+ ["le"] = { true, "≤" },
+ ["leq"] = { true, "≤" },
+ [">="] = { true, "≥" },
+ ["ge"] = { true, "≥" },
+ ["geq"] = { true, "≥" },
+ ["-<"] = { true, "≺" },
+ [">-"] = { true, "≻" },
+ ["in"] = { true, "∈" },
+ ["!in"] = { true, "∉" },
+ ["sub"] = { true, "⊂" },
+ ["sup"] = { true, "⊃" },
+ ["sube"] = { true, "⊆" },
+ ["supe"] = { true, "⊇" },
+ ["-="] = { true, "≡" },
+ ["~="] = { true, "≅" },
+ ["~~"] = { true, "≈" },
+ ["prop"] = { true, "∝" },
+
+ -- arrows
+
+ ["rarr"] = { true, "→" },
+ ["->"] = { true, "→" },
+ ["larr"] = { true, "←" },
+ ["harr"] = { true, "↔" },
+ ["uarr"] = { true, "↑" },
+ ["darr"] = { true, "↓" },
+ ["rArr"] = { true, "⇒" },
+ ["lArr"] = { true, "⇐" },
+ ["hArr"] = { true, "⇔" },
+ ["|->"] = { true, "↦" },
+
+ -- logical
+
+ ["not"] = { true, "¬" },
+ ["=>"] = { true, "⇒" },
+ ["iff"] = { true, "⇔" },
+ ["AA"] = { true, "∀" },
+ ["EE"] = { true, "∃" },
+ ["_|_"] = { true, "⊥" },
+ ["TT"] = { true, "⊤" },
+ ["|--"] = { true, "⊢" },
+ ["|=="] = { true, "⊨" },
+
+ -- miscellaneous
+
+ ["del"] = { true, "∂" },
+ ["grad"] = { true, "∇" },
+ ["+-"] = { true, "±" },
+ ["O/"] = { true, "∅" },
+ ["oo"] = { true, "∞" },
+ ["aleph"] = { true, "ℵ" },
+ ["angle"] = { true, "∠" },
+ ["/_"] = { true, "∠" },
+ [":."] = { true, "∴" },
+ ["..."] = { true, "..." }, -- ldots
+ ["ldots"] = { true, "..." }, -- ldots
+ ["cdots"] = { true, "⋯" },
+ ["vdots"] = { true, "⋮" },
+ ["ddots"] = { true, "⋱" },
+ ["diamond"] = { true, "⋄" },
+ ["square"] = { true, "□" },
+ ["|__"] = { true, "⌊" },
+ ["__|"] = { true, "⌋" },
+ ["|~"] = { true, "⌈" },
+ ["~|"] = { true, "⌉" },
+
+ -- more
+
+ ["_="] = { true, "≡" },
+
+ -- bonus
+
+ ["prime"] = { true, "′" }, -- bonus
+ ["'"] = { true, "′" }, -- bonus
+ ["''"] = { true, "″" }, -- bonus
+ ["'''"] = { true, "‴" }, -- bonus
+
+ -- special
+
+ ["%"] = { false, "\\mathpercent" },
+ ["&"] = { false, "\\mathampersand" },
+ ["#"] = { false, "\\mathhash" },
+ ["$"] = { false, "\\mathdollar" },
+
+ -- blackboard
+
+ ["CC"] = { true, "ℂ" },
+ ["NN"] = { true, "ℕ" },
+ ["QQ"] = { true, "ℚ" },
+ ["RR"] = { true, "ℝ" },
+ ["ZZ"] = { true, "ℤ" },
+
+ -- greek lowercase
+
+ ["alpha"] = { true, "α" },
+ ["beta"] = { true, "β" },
+ ["gamma"] = { true, "γ" },
+ ["delta"] = { true, "δ" },
+ ["epsilon"] = { true, "ε" },
+ ["varepsilon"] = { true, "ɛ" },
+ ["zeta"] = { true, "ζ" },
+ ["eta"] = { true, "η" },
+ ["theta"] = { true, "θ" },
+ ["vartheta"] = { true, "ϑ" },
+ ["iota"] = { true, "ι" },
+ ["kappa"] = { true, "κ" },
+ ["lambda"] = { true, "λ" },
+ ["mu"] = { true, "μ" },
+ ["nu"] = { true, "ν" },
+ ["xi"] = { true, "ξ" },
+ ["pi"] = { true, "π" },
+ ["rho"] = { true, "ρ" },
+ ["sigma"] = { true, "σ" },
+ ["tau"] = { true, "τ" },
+ ["upsilon"] = { true, "υ" },
+ ["phi"] = { true, "φ" },
+ ["varphi"] = { true, "ϕ" },
+ ["chi"] = { true, "χ" },
+ ["psi"] = { true, "ψ" },
+ ["omega"] = { true, "ω" },
+
+ -- greek uppercase
+
+ ["Gamma"] = { true, "Γ" },
+ ["Delta"] = { true, "Δ" },
+ ["Theta"] = { true, "Θ" },
+ ["Lambda"] = { true, "Λ" },
+ ["Xi"] = { true, "Ξ" },
+ ["Pi"] = { true, "Π" },
+ ["Sigma"] = { true, "Σ" },
+ ["Phi"] = { true, "Φ" },
+ ["Psi"] = { true, "Ψ" },
+ ["Omega"] = { true, "Ω" },
+
+ -- blackboard
+
+ ["bbb a"] = { true, "𝕒" },
+ ["bbb b"] = { true, "𝕓" },
+ ["bbb c"] = { true, "𝕔" },
+ ["bbb d"] = { true, "𝕕" },
+ ["bbb e"] = { true, "𝕖" },
+ ["bbb f"] = { true, "𝕗" },
+ ["bbb g"] = { true, "𝕘" },
+ ["bbb h"] = { true, "𝕙" },
+ ["bbb i"] = { true, "𝕚" },
+ ["bbb j"] = { true, "𝕛" },
+ ["bbb k"] = { true, "𝕜" },
+ ["bbb l"] = { true, "𝕝" },
+ ["bbb m"] = { true, "𝕞" },
+ ["bbb n"] = { true, "𝕟" },
+ ["bbb o"] = { true, "𝕠" },
+ ["bbb p"] = { true, "𝕡" },
+ ["bbb q"] = { true, "𝕢" },
+ ["bbb r"] = { true, "𝕣" },
+ ["bbb s"] = { true, "𝕤" },
+ ["bbb t"] = { true, "𝕥" },
+ ["bbb u"] = { true, "𝕦" },
+ ["bbb v"] = { true, "𝕧" },
+ ["bbb w"] = { true, "𝕨" },
+ ["bbb x"] = { true, "𝕩" },
+ ["bbb y"] = { true, "𝕪" },
+ ["bbb z"] = { true, "𝕫" },
+
+ ["bbb A"] = { true, "𝔸" },
+ ["bbb B"] = { true, "𝔹" },
+ ["bbb C"] = { true, "ℂ" },
+ ["bbb D"] = { true, "𝔻" },
+ ["bbb E"] = { true, "𝔼" },
+ ["bbb F"] = { true, "𝔽" },
+ ["bbb G"] = { true, "𝔾" },
+ ["bbb H"] = { true, "ℍ" },
+ ["bbb I"] = { true, "𝕀" },
+ ["bbb J"] = { true, "𝕁" },
+ ["bbb K"] = { true, "𝕂" },
+ ["bbb L"] = { true, "𝕃" },
+ ["bbb M"] = { true, "𝕄" },
+ ["bbb N"] = { true, "ℕ" },
+ ["bbb O"] = { true, "𝕆" },
+ ["bbb P"] = { true, "ℙ" },
+ ["bbb Q"] = { true, "ℚ" },
+ ["bbb R"] = { true, "ℝ" },
+ ["bbb S"] = { true, "𝕊" },
+ ["bbb T"] = { true, "𝕋" },
+ ["bbb U"] = { true, "𝕌" },
+ ["bbb V"] = { true, "𝕍" },
+ ["bbb W"] = { true, "𝕎" },
+ ["bbb X"] = { true, "𝕏" },
+ ["bbb Y"] = { true, "𝕐" },
+ ["bbb Z"] = { true, "ℤ" },
+
+ -- fraktur
+
+ ["fr a"] = { true, "𝔞" },
+ ["fr b"] = { true, "𝔟" },
+ ["fr c"] = { true, "𝔠" },
+ ["fr d"] = { true, "𝔡" },
+ ["fr e"] = { true, "𝔢" },
+ ["fr f"] = { true, "𝔣" },
+ ["fr g"] = { true, "𝔤" },
+ ["fr h"] = { true, "𝔥" },
+ ["fr i"] = { true, "𝔦" },
+ ["fr j"] = { true, "𝔧" },
+ ["fr k"] = { true, "𝔨" },
+ ["fr l"] = { true, "𝔩" },
+ ["fr m"] = { true, "𝔪" },
+ ["fr n"] = { true, "𝔫" },
+ ["fr o"] = { true, "𝔬" },
+ ["fr p"] = { true, "𝔭" },
+ ["fr q"] = { true, "𝔮" },
+ ["fr r"] = { true, "𝔯" },
+ ["fr s"] = { true, "𝔰" },
+ ["fr t"] = { true, "𝔱" },
+ ["fr u"] = { true, "𝔲" },
+ ["fr v"] = { true, "𝔳" },
+ ["fr w"] = { true, "𝔴" },
+ ["fr x"] = { true, "𝔵" },
+ ["fr y"] = { true, "𝔶" },
+ ["fr z"] = { true, "𝔷" },
+
+ ["fr A"] = { true, "𝔄" },
+ ["fr B"] = { true, "𝔅" },
+ ["fr C"] = { true, "ℭ" },
+ ["fr D"] = { true, "𝔇" },
+ ["fr E"] = { true, "𝔈" },
+ ["fr F"] = { true, "𝔉" },
+ ["fr G"] = { true, "𝔊" },
+ ["fr H"] = { true, "ℌ" },
+ ["fr I"] = { true, "ℑ" },
+ ["fr J"] = { true, "𝔍" },
+ ["fr K"] = { true, "𝔎" },
+ ["fr L"] = { true, "𝔏" },
+ ["fr M"] = { true, "𝔐" },
+ ["fr N"] = { true, "𝔑" },
+ ["fr O"] = { true, "𝔒" },
+ ["fr P"] = { true, "𝔓" },
+ ["fr Q"] = { true, "𝔔" },
+ ["fr R"] = { true, "ℜ" },
+ ["fr S"] = { true, "𝔖" },
+ ["fr T"] = { true, "𝔗" },
+ ["fr U"] = { true, "𝔘" },
+ ["fr V"] = { true, "𝔙" },
+ ["fr W"] = { true, "𝔚" },
+ ["fr X"] = { true, "𝔛" },
+ ["fr Y"] = { true, "𝔜" },
+ ["fr Z"] = { true, "ℨ" },
+
+ -- script
+
+ ["cc a"] = { true, "𝒶" },
+ ["cc b"] = { true, "𝒷" },
+ ["cc c"] = { true, "𝒸" },
+ ["cc d"] = { true, "𝒹" },
+ ["cc e"] = { true, "ℯ" },
+ ["cc f"] = { true, "𝒻" },
+ ["cc g"] = { true, "ℊ" },
+ ["cc h"] = { true, "𝒽" },
+ ["cc i"] = { true, "𝒾" },
+ ["cc j"] = { true, "𝒿" },
+ ["cc k"] = { true, "𝓀" },
+ ["cc l"] = { true, "𝓁" },
+ ["cc m"] = { true, "𝓂" },
+ ["cc n"] = { true, "𝓃" },
+ ["cc o"] = { true, "ℴ" },
+ ["cc p"] = { true, "𝓅" },
+ ["cc q"] = { true, "𝓆" },
+ ["cc r"] = { true, "𝓇" },
+ ["cc s"] = { true, "𝓈" },
+ ["cc t"] = { true, "𝓉" },
+ ["cc u"] = { true, "𝓊" },
+ ["cc v"] = { true, "𝓋" },
+ ["cc w"] = { true, "𝓌" },
+ ["cc x"] = { true, "𝓍" },
+ ["cc y"] = { true, "𝓎" },
+ ["cc z"] = { true, "𝓏" },
+
+ ["cc A"] = { true, "𝒜" },
+ ["cc B"] = { true, "ℬ" },
+ ["cc C"] = { true, "𝒞" },
+ ["cc D"] = { true, "𝒟" },
+ ["cc E"] = { true, "ℰ" },
+ ["cc F"] = { true, "ℱ" },
+ ["cc G"] = { true, "𝒢" },
+ ["cc H"] = { true, "ℋ" },
+ ["cc I"] = { true, "ℐ" },
+ ["cc J"] = { true, "𝒥" },
+ ["cc K"] = { true, "𝒦" },
+ ["cc L"] = { true, "ℒ" },
+ ["cc M"] = { true, "ℳ" },
+ ["cc N"] = { true, "𝒩" },
+ ["cc O"] = { true, "𝒪" },
+ ["cc P"] = { true, "𝒫" },
+ ["cc Q"] = { true, "𝒬" },
+ ["cc R"] = { true, "ℛ" },
+ ["cc S"] = { true, "𝒮" },
+ ["cc T"] = { true, "𝒯" },
+ ["cc U"] = { true, "𝒰" },
+ ["cc V"] = { true, "𝒱" },
+ ["cc W"] = { true, "𝒲" },
+ ["cc X"] = { true, "𝒳" },
+ ["cc Y"] = { true, "𝒴" },
+ ["cc Z"] = { true, "𝒵" },
+
+ -- bold
+
+ ["bb a"] = { true, "𝒂" },
+ ["bb b"] = { true, "𝒃" },
+ ["bb c"] = { true, "𝒄" },
+ ["bb d"] = { true, "𝒅" },
+ ["bb e"] = { true, "𝒆" },
+ ["bb f"] = { true, "𝒇" },
+ ["bb g"] = { true, "𝒈" },
+ ["bb h"] = { true, "𝒉" },
+ ["bb i"] = { true, "𝒊" },
+ ["bb j"] = { true, "𝒋" },
+ ["bb k"] = { true, "𝒌" },
+ ["bb l"] = { true, "𝒍" },
+ ["bb m"] = { true, "𝒎" },
+ ["bb n"] = { true, "𝒏" },
+ ["bb o"] = { true, "𝒐" },
+ ["bb p"] = { true, "𝒑" },
+ ["bb q"] = { true, "𝒒" },
+ ["bb r"] = { true, "𝒓" },
+ ["bb s"] = { true, "𝒔" },
+ ["bb t"] = { true, "𝒕" },
+ ["bb u"] = { true, "𝒖" },
+ ["bb v"] = { true, "𝒗" },
+ ["bb w"] = { true, "𝒘" },
+ ["bb x"] = { true, "𝒙" },
+ ["bb y"] = { true, "𝒚" },
+ ["bb z"] = { true, "𝒛" },
+
+ ["bb A"] = { true, "𝑨" },
+ ["bb B"] = { true, "𝑩" },
+ ["bb C"] = { true, "𝑪" },
+ ["bb D"] = { true, "𝑫" },
+ ["bb E"] = { true, "𝑬" },
+ ["bb F"] = { true, "𝑭" },
+ ["bb G"] = { true, "𝑮" },
+ ["bb H"] = { true, "𝑯" },
+ ["bb I"] = { true, "𝑰" },
+ ["bb J"] = { true, "𝑱" },
+ ["bb K"] = { true, "𝑲" },
+ ["bb L"] = { true, "𝑳" },
+ ["bb M"] = { true, "𝑴" },
+ ["bb N"] = { true, "𝑵" },
+ ["bb O"] = { true, "𝑶" },
+ ["bb P"] = { true, "𝑷" },
+ ["bb Q"] = { true, "𝑸" },
+ ["bb R"] = { true, "𝑹" },
+ ["bb S"] = { true, "𝑺" },
+ ["bb T"] = { true, "𝑻" },
+ ["bb U"] = { true, "𝑼" },
+ ["bb V"] = { true, "𝑽" },
+ ["bb W"] = { true, "𝑾" },
+ ["bb X"] = { true, "𝑿" },
+ ["bb Y"] = { true, "𝒀" },
+ ["bb Z"] = { true, "𝒁" },
+
+ -- sans
+
+ ["sf a"] = { true, "𝖺" },
+ ["sf b"] = { true, "𝖻" },
+ ["sf c"] = { true, "𝖼" },
+ ["sf d"] = { true, "𝖽" },
+ ["sf e"] = { true, "𝖾" },
+ ["sf f"] = { true, "𝖿" },
+ ["sf g"] = { true, "𝗀" },
+ ["sf h"] = { true, "𝗁" },
+ ["sf i"] = { true, "𝗂" },
+ ["sf j"] = { true, "𝗃" },
+ ["sf k"] = { true, "𝗄" },
+ ["sf l"] = { true, "𝗅" },
+ ["sf m"] = { true, "𝗆" },
+ ["sf n"] = { true, "𝗇" },
+ ["sf o"] = { true, "𝗈" },
+ ["sf p"] = { true, "𝗉" },
+ ["sf q"] = { true, "𝗊" },
+ ["sf r"] = { true, "𝗋" },
+ ["sf s"] = { true, "𝗌" },
+ ["sf t"] = { true, "𝗍" },
+ ["sf u"] = { true, "𝗎" },
+ ["sf v"] = { true, "𝗏" },
+ ["sf w"] = { true, "𝗐" },
+ ["sf x"] = { true, "𝗑" },
+ ["sf y"] = { true, "𝗒" },
+ ["sf z"] = { true, "𝗓" },
+
+ ["sf A"] = { true, "𝖠" },
+ ["sf B"] = { true, "𝖡" },
+ ["sf C"] = { true, "𝖢" },
+ ["sf D"] = { true, "𝖣" },
+ ["sf E"] = { true, "𝖤" },
+ ["sf F"] = { true, "𝖥" },
+ ["sf G"] = { true, "𝖦" },
+ ["sf H"] = { true, "𝖧" },
+ ["sf I"] = { true, "𝖨" },
+ ["sf J"] = { true, "𝖩" },
+ ["sf K"] = { true, "𝖪" },
+ ["sf L"] = { true, "𝖫" },
+ ["sf M"] = { true, "𝖬" },
+ ["sf N"] = { true, "𝖭" },
+ ["sf O"] = { true, "𝖮" },
+ ["sf P"] = { true, "𝖯" },
+ ["sf Q"] = { true, "𝖰" },
+ ["sf R"] = { true, "𝖱" },
+ ["sf S"] = { true, "𝖲" },
+ ["sf T"] = { true, "𝖳" },
+ ["sf U"] = { true, "𝖴" },
+ ["sf V"] = { true, "𝖵" },
+ ["sf W"] = { true, "𝖶" },
+ ["sf X"] = { true, "𝖷" },
+ ["sf Y"] = { true, "𝖸" },
+ ["sf Z"] = { true, "𝖹" },
+
+ -- monospace
+
+ ["tt a"] = { true, "𝚊" },
+ ["tt b"] = { true, "𝚋" },
+ ["tt c"] = { true, "𝚌" },
+ ["tt d"] = { true, "𝚍" },
+ ["tt e"] = { true, "𝚎" },
+ ["tt f"] = { true, "𝚏" },
+ ["tt g"] = { true, "𝚐" },
+ ["tt h"] = { true, "𝚑" },
+ ["tt i"] = { true, "𝚒" },
+ ["tt j"] = { true, "𝚓" },
+ ["tt k"] = { true, "𝚔" },
+ ["tt l"] = { true, "𝚕" },
+ ["tt m"] = { true, "𝚖" },
+ ["tt n"] = { true, "𝚗" },
+ ["tt o"] = { true, "𝚘" },
+ ["tt p"] = { true, "𝚙" },
+ ["tt q"] = { true, "𝚚" },
+ ["tt r"] = { true, "𝚛" },
+ ["tt s"] = { true, "𝚜" },
+ ["tt t"] = { true, "𝚝" },
+ ["tt u"] = { true, "𝚞" },
+ ["tt v"] = { true, "𝚟" },
+ ["tt w"] = { true, "𝚠" },
+ ["tt x"] = { true, "𝚡" },
+ ["tt y"] = { true, "𝚢" },
+ ["tt z"] = { true, "𝚣" },
+
+ ["tt A"] = { true, "𝙰" },
+ ["tt B"] = { true, "𝙱" },
+ ["tt C"] = { true, "𝙲" },
+ ["tt D"] = { true, "𝙳" },
+ ["tt E"] = { true, "𝙴" },
+ ["tt F"] = { true, "𝙵" },
+ ["tt G"] = { true, "𝙶" },
+ ["tt H"] = { true, "𝙷" },
+ ["tt I"] = { true, "𝙸" },
+ ["tt J"] = { true, "𝙹" },
+ ["tt K"] = { true, "𝙺" },
+ ["tt L"] = { true, "𝙻" },
+ ["tt M"] = { true, "𝙼" },
+ ["tt N"] = { true, "𝙽" },
+ ["tt O"] = { true, "𝙾" },
+ ["tt P"] = { true, "𝙿" },
+ ["tt Q"] = { true, "𝚀" },
+ ["tt R"] = { true, "𝚁" },
+ ["tt S"] = { true, "𝚂" },
+ ["tt T"] = { true, "𝚃" },
+ ["tt U"] = { true, "𝚄" },
+ ["tt V"] = { true, "𝚅" },
+ ["tt W"] = { true, "𝚆" },
+ ["tt X"] = { true, "𝚇" },
+ ["tt Y"] = { true, "𝚈" },
+ ["tt Z"] = { true, "𝚉" },
+
+ -- some more undocumented
+
+ ["dx"] = { false, { "d", "x" } }, -- "{dx}" "\\left(dx\\right)"
+ ["dy"] = { false, { "d", "y" } }, -- "{dy}" "\\left(dy\\right)"
+ ["dz"] = { false, { "d", "z" } }, -- "{dz}" "\\left(dz\\right)"
+
+ -- fences
+
+ ["(:"] = { true, "(:" },
+ ["{:"] = { true, "{:" },
+ ["[:"] = { true, "[:" },
+ ["("] = { true, "(" },
+ ["["] = { true, "[" },
+ ["{"] = { true, "{" },
+ ["<<"] = { true, "⟨" }, -- why not <:
+ ["|_"] = { true, "⌊" },
+ ["|~"] = { true, "⌈" },
+ ["⟨"] = { true, "⟨" },
+ ["〈"] = { true, "⟨" },
+ ["〈"] = { true, "⟨" },
+
+ [":)"] = { true, ":)" },
+ [":}"] = { true, ":}" },
+ [":]"] = { true, ":]" },
+ [")"] = { true, ")" },
+ ["]"] = { true, "]" },
+ ["}"] = { true, "}" },
+ [">>"] = { true, "⟩" }, -- why not :>
+ ["~|"] = { true, "⌉" },
+ ["_|"] = { true, "⌋" },
+ ["⟩"] = { true, "⟩" },
+ ["〉"] = { true, "⟩" },
+ ["〉"] = { true, "⟩" },
+
+ ["lparent"] = { true, "(" },
+ ["lbracket"] = { true, "[" },
+ ["lbrace"] = { true, "{" },
+ ["langle"] = { true, "⟨" },
+ ["lfloor"] = { true, "⌊" },
+ ["lceil"] = { true, "⌈" },
+
+ ["rparent"] = { true, ")" },
+ ["rbracket"] = { true, "]" },
+ ["rbrace"] = { true, "}" },
+ ["rangle"] = { true, "⟩" },
+ ["rfloor"] = { true, "⌋" },
+ ["rceil"] = { true, "⌉" },
+
+ -- a bit special:
+
+ ["\\frac"] = { true, "frac" },
+
+ -- now it gets real crazy, only these two:
+
+ [">"] = { true, ">" },
+ ["<"] = { true, "<" },
+
+}
+
+for k, v in next, characters.data do
+ local name = v.mathname
+ if name and not reserved[name] then
+ reserved[name] = { true, utfchar(k) }
+ end
+ local spec = v.mathspec
+ -- if spec then
+ -- for i=1,#spec do
+ -- local name = spec[i].name
+ -- if name and not reserved[name] then
+ -- reserved[name] = { true, utfchar(k) }
+ -- end
+ -- end
+ -- end
+end
+
+reserved.P = nil
+reserved.S = nil
+
+local isbinary = {
+ ["\\frac"] = true,
+ ["\\root"] = true,
+ ["\\asciimathroot"] = true,
+ ["\\asciimathstackrel"] = true,
}
-table.setmetatableindex(reserved,characters.entities)
+local isunary = {
+ ["\\sqrt"] = true,
+ ["\\asciimathsqrt"] = true,
+ ["\\text"] = true, -- mathoptext
+ ["\\mathoptext"] = true, -- mathoptext
+ ["\\asciimathoptext"] = true, -- mathoptext
+ ["\\hat"] = true, -- widehat
+ ["\\widehat"] = true, -- widehat
+ ["\\bar"] = true, --
+ ["\\overbar"] = true, --
+ ["\\underline"] = true, --
+ ["\\vec"] = true, -- overrightarrow
+ ["\\overrightarrow"] = true, -- overrightarrow
+ ["\\dot"] = true, --
+ ["\\ddot"] = true, --
-local postmapper = Cs ( (
+}
+
+local isfunny = {
+ ["\\sin"] = true,
+}
+
+local isinfix = {
+ ["^"] = true,
+ ["_"] = true,
+}
+
+local isleft = {
+ [s_lparent] = true,
+ [s_lbrace] = true,
+ [s_lbracket] = true,
+ [s_langle] = true,
+ [s_lfloor] = true,
+ [s_lceil] = true,
+ [s_left] = true,
+}
+
+local isright = {
+ [s_rparent] = true,
+ [s_rbrace] = true,
+ [s_rbracket] = true,
+ [s_rangle] = true,
+ [s_rfloor] = true,
+ [s_rceil] = true,
+ [s_right] = true,
+}
+
+local issimplified = {
+}
+
+--
+
+-- special mess
- P("\\mathoptext ") * spaces * (P("\\bgroup ")/"{") * (1-P("\\egroup "))^1 * (P("\\egroup ")/"}") +
+local d_one = R("09")
+local d_two = d_one * d_one
+local d_three = d_two * d_one
+local d_four = d_three * d_one
+local d_split = P(-1) + P(",")
- (P("\\bgroup ")) / "{" +
- (P("\\egroup ")) / "}" +
+local d_spaced = (Carg(1) * d_three)^1
- P("\\") * (R("az","AZ")^2) +
+local digitized_1 = Cs ( (
+ d_three * d_spaced * d_split +
+ d_two * d_spaced * d_split +
+ d_one * d_spaced * d_split +
+ P(1)
+ )^1 )
- (R("AZ","az")^2) / reserved +
+local p_fourbefore = d_four * d_split
+local p_fourafter = d_four * P(-1)
- P("{:") / "\\left." +
- P(":}") / "\\right." +
- P("(") / "\\left(" +
- P(")") / "\\right)" +
- P("[") / "\\left[" +
- P("]") / "\\right]" +
- P("{") / "\\left\\{" +
- P("}") / "\\right\\}" +
+local p_beforecomma = d_three * d_spaced * d_split
+ + d_two * d_spaced * d_split
+ + d_one * d_spaced * d_split
+ + d_one * d_split
+
+local p_aftercomma = p_fourafter
+ + d_three * d_spaced
+ + d_two * d_spaced
+ + d_one * d_spaced
+
+local digitized_2 = Cs (
+ p_fourbefore * (p_aftercomma^0) +
+ p_beforecomma * ((p_aftercomma + d_one^1)^0)
+ )
+
+local p_fourbefore = d_four * d_split
+local p_fourafter = d_four
+local d_spaced = (Carg(1) * (d_three + d_two + d_one))^1
+local p_aftercomma = p_fourafter * P(-1)
+ + d_three * d_spaced * P(1)^0
+ + d_one^1
+
+-- local digitized_3 = Cs (
+-- p_fourbefore * p_aftercomma^0 +
+-- p_beforecomma * p_aftercomma^0
+-- )
+
+local digitized_3 = Cs((p_fourbefore + p_beforecomma) * p_aftercomma^0)
+
+local splitmethods = {
+ digitized_1,
+ digitized_2,
+ digitized_3,
+}
+
+local splitmethod = nil
+
+function asciimath.setup(settings)
+ splitmethod = splitmethods[tonumber(settings.splitmethod) or 0]
+ if splitmethod then
+ local separator = settings.separator
+ if separator == true or not interfaces or interfaces.variables.yes then
+ digitseparator = utfchar(0x2008)
+ elseif type(separator) == "string" and separator ~= "" then
+ digitseparator = separator
+ else
+ splitmethod = nil
+ end
+ end
+end
- letter + P(1)
+local collected_digits = { }
+local collected_filename = "asciimath-digits.lua"
+
+function numbermess(s)
+ if splitmethod then
+ local d = lpegmatch(splitmethod,s,1,digitseparator)
+ if d then
+ if trace_digits and s ~= d then
+ collected_digits[s] = d
+ end
+ return d
+ end
+ end
+ return s
+end
+
+-- asciimath.setup { splitmethod = 3 }
+-- local t = {
+-- "1", "12", "123", "1234", "12345", "123456", "1234567", "12345678", "123456789",
+-- "1,1",
+-- "12,12",
+-- "123,123",
+-- "1234,123",
+-- "1234,1234",
+-- "12345,1234",
+-- "1234,12345",
+-- "12345,12345",
+-- "123456,123456",
+-- "1234567,1234567",
+-- "12345678,12345678",
+-- "123456789,123456789",
+-- "0,1234",
+-- "1234,0",
+-- "1234,00",
+-- "0,123456789",
+-- }
+-- for i=1,#t do print(formatters["%-20s : [%s]"](t[i],numbermess(t[i]))) end
+
+statistics.register("asciimath",function()
+ if trace_digits then
+ local n = table.count(collected_digits)
+ if n > 0 then
+ table.save(collected_filename,collected_digits)
+ return string.format("%s digit conversions saved in %s",n,collected_filename)
+ else
+ os.remove(collected_filename)
+ end
+ end
+end)
+
+local p_number_base = patterns.cpnumber or patterns.cnumber or patterns.number
+local p_number = C(p_number_base)
+----- p_number = p_number_base
+local p_spaces = patterns.whitespace
+
+local p_utf_base = patterns.utf8character
+local p_utf = C(p_utf_base)
+-- local p_entity = (P("&") * C((1-P(";"))^2) * P(";"))/ entities
+
+-- entities["gt"] = ">"
+-- entities["lt"] = "<"
+-- entities["amp"] = "&"
+-- entities["dquot"] = '"'
+-- entities["quot"] = "'"
+
+local p_onechar = p_utf_base * P(-1)
+
+----- p_number = Cs((patterns.cpnumber or patterns.cnumber or patterns.number)/function(s) return (gsub(s,",","{,}")) end)
+
+local sign = P("-")^-1
+local digits = R("09")^1
+local integer = sign * digits
+local real = digits * (S(".") * digits)^-1
+local float = real * (P("E") * integer)^-1
+
+-- local number = C(float + integer)
+-- local p_number = C(float)
+local p_number = float / numbermess
+
+local k_reserved = sortedkeys(reserved)
+local k_commands = { }
+local k_unicode = { }
+
+asciimath.keys = {
+ reserved = k_reserved
+}
+
+local k_reserved_different = { }
+local k_reserved_words = { }
+
+for k, v in sortedhash(reserved) do
+ local replacement = v[2]
+ if v[1] then
+ k_unicode[k] = replacement
+ else
+ k_unicode[k] = k -- keep them ... later we remap these
+ if k ~= replacement then
+ k_reserved_different[#k_reserved_different+1] = k
+ end
+ end
+ if not find(k,"[^[a-zA-Z]+$]") then
+ k_unicode["\\"..k] = k -- dirty trick, no real unicode
+ end
+ if not find(k,"[^a-zA-Z]") then
+ k_reserved_words[#k_reserved_words+1] = k
+ end
+ k_commands[k] = replacement
+end
+
+local p_reserved =
+ lpeg.utfchartabletopattern(k_reserved_different) / k_commands
+
+local p_unicode =
+ lpeg.utfchartabletopattern(table.keys(k_unicode)) / k_unicode
+
+-- inspect(k_reserved_different)
+
+local p_texescape = patterns.texescape
+
+local function texescaped(s)
+ return lpegmatch(p_texescape,s)
+end
+
+local p_text =
+ P("text")
+ * p_spaces^0
+ * Cc("\\asciimathoptext")
+ * ( -- maybe balanced
+ Cs( P("{") * ((1-P("}"))^0/texescaped) * P("}") )
+ + Cs((P("(")/"{") * ((1-P(")"))^0/texescaped) * (P(")")/"}"))
+ )
+ + Cc("\\asciimathoptext") * Cs(Cc("{") * (patterns.undouble/texescaped) * Cc("}"))
+
+local m_left = {
+ ["(:"] = s_langle,
+ ["{:"] = s_left,
+ ["[:"] = s_left,
+ ["("] = s_lparent,
+ ["["] = s_lbracket,
+ ["{"] = s_lbrace,
+ ["⟨"] = s_langle,
+ ["⌈"] = s_lceil,
+ ["⌊"] = s_lfloor,
+
+ -- ["<<"] = s_langle, -- why not <:
+ -- ["|_"] = s_lfloor,
+ -- ["|~"] = s_lceil,
+ -- ["〈"] = s_langle,
+ -- ["〈"] = s_langle,
+
+ -- ["lparent"] = s_lparent,
+ -- ["lbracket"] = s_lbracket,
+ -- ["lbrace"] = s_lbrace,
+ -- ["langle"] = s_langle,
+ -- ["lfloor"] = s_lfloor,
+ -- ["lceil"] = s_lceil,
+}
+
+local m_right = {
+ [":)"] = s_rangle,
+ [":}"] = s_right,
+ [":]"] = s_right,
+ [")"] = s_rparent,
+ ["]"] = s_rbracket,
+ ["}"] = s_rbrace,
+ ["⟩"] = s_rangle,
+ ["⌉"] = s_rceil,
+ ["⌋"] = s_rfloor,
+
+ -- [">>"] = s_rangle, -- why not :>
+ -- ["~|"] = s_rceil,
+ -- ["_|"] = s_rfloor,
+ -- ["〉"] = s_rangle,
+ -- ["〉"] = s_rangle,
+
+ -- ["rparent"] = s_rparent,
+ -- ["rbracket"] = s_rbracket,
+ -- ["rbrace"] = s_rbrace,
+ -- ["rangle"] = s_rangle,
+ -- ["rfloor"] = s_rfloor,
+ -- ["rceil"] = s_rceil,
+}
+
+local islimits = {
+ ["\\sum"] = true,
+ -- ["∑"] = true,
+ ["\\prod"] = true,
+ -- ["∏"] = true,
+ ["\\lim"] = true,
+}
+
+local p_left =
+ lpeg.utfchartabletopattern(m_left) / m_left
+local p_right =
+ lpeg.utfchartabletopattern(m_right) / m_right
+
+-- special cases
+
+-- local p_special =
+-- C("/")
+-- + P("\\ ") * Cc("{}") * p_spaces^0 * C(S("^_"))
+-- + P("\\ ") * Cc("\\space")
+-- + P("\\\\") * Cc("\\backslash")
+-- + P("\\") * (R("az","AZ")^1/entities)
+-- + P("|") * Cc("\\|")
+--
+-- faster bug also uglier:
+
+local p_special =
+ P("|") * Cc("\\|") -- s_mbar -- maybe always add left / right as in mml ?
+ + P("\\") * (
+ (
+ P(" ") * (
+ Cc("{}") * p_spaces^0 * C(S("^_"))
+ + Cc("\\space")
+ )
+ )
+ + P("\\") * Cc("\\backslash")
+ -- + (R("az","AZ")^1/entities)
+ + C(R("az","AZ")^1)
+ )
+
+-- open | close :: {: | :}
+
+local u_parser = Cs ( (
+ patterns.doublequoted +
+ P("text") * p_spaces^0 * P("(") * (1-P(")"))^0 * P(")") + -- -- todo: balanced
+ p_unicode +
+ p_utf_base
)^0 )
-local parser
+local a_parser = Ct { "tokenizer",
+ tokenizer = (
+ p_spaces
+ + p_number
+ + p_text
+ -- + Ct(p_open * V("tokenizer") * p_close) -- {: (a+b,=,1),(a+b,=,7) :}
+ -- + Ct(p_open * V("tokenizer") * p_close_right) -- { (a+b,=,1),(a+b,=,7) :}
+ -- + Ct(p_open_left * V("tokenizer") * p_right) -- {: (a+b,=,1),(a+b,=,7) }
+ + Ct(p_left * V("tokenizer") * p_right) -- { (a+b,=,1),(a+b,=,7) }
+ + p_special
+ + p_reserved
+ -- + p_utf - p_close - p_right
+ + (p_utf - p_right)
+ )^1,
+}
-local function converted(original,totex)
- local ok, result
- if trace_mapping then
- report_asciimath("original : %s",original)
+local collapse = nil
+local serialize = table.serialize
+local f_state = formatters["level %s : %s : intermediate"]
+
+local function show_state(t,level,state)
+ report_asciimath(serialize(t,f_state(level,state)))
+end
+
+local function show_result(original,unicoded,texcoded)
+ report_asciimath("original > %s",original)
+ report_asciimath("unicoded > %s",unicoded)
+ report_asciimath("texcoded > %s",texcoded)
+end
+
+local function collapse_matrices(t)
+ local n = #t
+ if n > 4 and t[3] == "," then
+ local l1 = t[1]
+ local r1 = t[n]
+ if isleft[l1] and isright[r1] then
+ local l2 = t[2]
+ local r2 = t[n-1]
+ if type(l2) == "table" and type(r2) == "table" then
+ -- we have a matrix
+ local valid = true
+ for i=3,n-2,2 do
+ if t[i] ~= "," then
+ valid = false
+ break
+ end
+ end
+ if valid then
+ for i=2,n-1,2 do
+ local ti = t[i]
+ local tl = ti[1]
+ local tr = ti[#ti]
+ if isleft[tl] and isright[tr] then
+ -- ok
+ else
+ valid = false
+ break
+ end
+ end
+ if valid then
+ local omit = l1 == s_left and r1 == s_right
+ if omit then
+ t[1] = "\\startmatrix"
+ else
+ t[1] = l1 .. "\\startmatrix"
+ end
+ for i=2,n-1 do
+ if t[i] == "," then
+ t[i] = "\\NR"
+ else
+ local ti = t[i]
+ ti[1] = "\\NC"
+ for i=2,#ti-1 do
+ if ti[i] == "," then
+ ti[i] = "\\NC"
+ end
+ end
+ ti[#ti] = nil
+ end
+ end
+ if omit then
+ t[n] = "\\NR\\stopmatrix"
+ else
+ t[n] = "\\NR\\stopmatrix" .. r1
+ end
+ end
+ end
+ end
+ end
+ end
+ return t
+end
+
+local function collapse_bars(t)
+ local n, i, l, m = #t, 1, false, 0
+ while i <= n do
+ local current = t[i]
+ if current == "\\|" then
+ if l then
+ m = m + 1
+ t[l] = s_lbar
+ t[i] = s_rbar
+ t[m] = { unpack(t,l,i) }
+ l = false
+ else
+ l = i
+ end
+ elseif not l then
+ m = m + 1
+ t[m] = current
+ end
+ i = i + 1
end
- local premapped = lpegmatch(premapper,original)
- if premapped then
- if trace_mapping then
- report_asciimath("prepared : %s",premapped)
+ if l then
+ local tt = { s_lnothing } -- space fools final checker
+ local tm = 1
+ for i=1,m do
+ tm = tm + 1
+ tt[tm] = t[i]
end
- local parsed = lpegmatch(parser,premapped)
- if parsed then
- if trace_mapping then
- report_asciimath("parsed : %s",parsed)
+ tm = tm + 1
+ tt[tm] = s_mbar
+ for i=l+1,n do
+ tm = tm + 1
+ tt[tm] = t[i]
+ end
+ tm = tm + 1
+ tt[tm] = s_rnothing -- space fools final checker
+ m = tm
+ t = tt
+ elseif m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_pairs(t)
+ local n, i = #t, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ local tn = t[i+2]
+ if type(tl) == "table" then
+ if isleft[tl[1]] and isright[tl[#tl]] then
+ tl[1] = "" -- todo: remove
+ tl[#tl] = nil
+ end
+ end
+ if type(tr) == "table" then
+ if tn == "^" then
+ -- brr 1/(1+x)^2
+ elseif isleft[tr[1]] and isright[tr[#tr]] then
+ tr[1] = "" -- todo: remove
+ tr[#tr] = nil
+ end
+ end
+ i = i + 2
+ elseif current == "," or current == ";" then
+ -- t[i] = current .. "\\thinspace" -- look sbad in (a,b)
+ i = i + 1
+ else
+ i = i + 1
+ end
+ end
+ return t
+end
+
+local function collapse_parentheses(t)
+ local n, i = #t, 1
+ if n > 2 then
+ while i < n do
+ local current = t[i]
+ if type(current) == "table" and isleft[t[i-1]] and isright[t[i+1]] then
+ local c = #current
+ if c > 2 and isleft[current[1]] and isright[current[c]] then
+ remove(current,c)
+ remove(current,1)
+ end
+ i = i + 3
+ else
+ i = i + 1
+ end
+ end
+ end
+ return t
+end
+
+local function collapse_signs(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isunary[current] then
+ local one = t[i+1]
+ if not one then
+-- m = m + 1
+ t[m] = current .. "{}" -- error
+return t
+-- break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ elseif one == "-" and i + 2 <= n then -- or another sign ? or unary ?
+ local t2 = t[i+2]
+ if type(t2) == "string" then
+ one = one .. t2
+ i = i + 1
+ end
+ end
+ t[m] = current .. "{" .. one .. "}"
+ i = i + 2
+ elseif i + 2 <= n and isfunny[current] then
+ local one = t[i+1]
+ if isinfix[one] then
+ local two = t[i+2]
+ if two == "-" then -- or another sign ? or unary ?
+ local three = t[i+3]
+ if three then
+ if type(three) == "table" then
+ three = collapse(three,level)
+ end
+ t[m] = current .. one .. "{" .. two .. three .. "}"
+ i = i + 4
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_binaries(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if isbinary[current] then
+ local one = t[i+1]
+ local two = t[i+2]
+ if not one then
+ t[m] = current .. "{}{}" -- error
+return t
+-- break
+ end
+ if type(one) == "table" then
+ if isleft[one[1]] and isright[one[#one]] then
+ remove(one,#one)
+ remove(one,1)
+ end
+ one = collapse(one,level)
+ end
+ if not two then
+ t[m] = current .. "{" .. one .. "}{}"
+return t
+-- break
+ end
+ if type(two) == "table" then
+ if isleft[two[1]] and isright[two[#two]] then
+ remove(two,#two)
+ remove(two,1)
+ end
+ two = collapse(two,level)
+ end
+ t[m] = current .. "{" .. one .. "}{" .. two .. "}"
+ i = i + 3
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_infixes_1(t)
+ local n, i = #t, 1
+ while i <= n do
+ local current = t[i]
+ if isinfix[current] then
+ local what = t[i+1]
+ if what then
+ if type(what) == "table" then
+ local f, l = what[1], what[#what]
+ if isleft[f] and isright[l] then
+ remove(what,#what)
+ remove(what,1)
+ end
+ t[i+1] = collapse(what,level) -- collapse ?
+ end
+ i = i + 2
+ else
+ break
end
- local postmapped = lpegmatch(postmapper,parsed)
- if postmapped then
- if trace_mapping then
- report_asciimath("finalized: %s",postmapped)
+ else
+ i = i + 1
+ end
+ end
+ return t
+end
+
+function collapse_limits(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if islimits[current] then
+ local one, two, first, second = nil, nil, t[i+1], t[i+3]
+ if first and isinfix[first] then
+ one = t[i+2]
+ if one then
+ -- if type(one) == "table" then
+ -- if isleft[one[1]] and isright[one[#one]] then
+ -- remove(one,#one)
+ -- remove(one,1)
+ -- end
+ -- one = collapse(one,level)
+ -- end
+ if second and isinfix[second] then
+ two = t[i+4]
+ -- if type(two) == "table" then
+ -- if isleft[two[1]] and isright[two[#two]] then
+ -- remove(two,#two)
+ -- remove(two,1)
+ -- end
+ -- two = collapse(two,level)
+ -- end
+ end
+ if two then
+ t[m] = current .. "\\limits" .. first .. "{" .. one .. "}" .. second .. "{" .. two .. "}"
+ i = i + 5
+ else
+ t[m] = current .. "\\limits" .. first .. "{" .. one .. "}"
+ i = i + 3
+ end
+ else
+ t[m] = current
+ i = i + 1
end
- result, ok = postmapped, true
else
- result = "error in postmapping"
+ t[m] = current
+ i = i + 1
+ end
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_tables(t)
+ local n, m, i = #t, 0, 1
+ while i <= n do
+ m = m + 1
+ local current = t[i]
+ if type(current) == "table" then
+ if current[1] == "\\NC" then
+ t[m] = collapse(current,level)
+ else
+ t[m] = "{" .. collapse(current,level) .. "}"
+ end
+ i = i + 1
+ else
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then -- yes?
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_infixes_2(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if isinfix[current] and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ local ti = t[i+2]
+ local tn = t[i+3]
+ if ti and tn and isinfix[ti] then
+ t[m] = tl .. current .. "{" .. tr .. "}" .. ti .. "{" .. tn .. "}"
+ i = i + 4
+ else
+ t[m] = tl .. current .. "{" .. tr .. "}"
+ i = i + 2
+ end
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_fractions_1(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "/" and i > 1 then
+ local tl = t[i-1]
+ local tr = t[i+1]
+ t[m] = "\\frac{" .. tl .. "}{" .. tr .. "}"
+ i = i + 2
+ if i < n then
+ m = m + 1
+ t[m] = t[i]
+ i = i + 1
end
else
- result = "error in mapping"
+ m = m + 1
+ t[m] = current
+ i = i + 1
end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_fractions_2(t)
+ local n, m, i = #t, 0, 1
+ while i < n do
+ local current = t[i]
+ if current == "⁄" and i > 1 then -- \slash
+ t[m] = "{" .. s_left .. t[i-1] .. s_mslash .. t[i+1] .. s_right .. "}"
+ i = i + 2
+ if i < n then
+ m = m + 1
+ t[m] = t[i]
+ i = i + 1
+ end
+ else
+ m = m + 1
+ t[m] = current
+ i = i + 1
+ end
+ end
+ if i == n then
+ m = m + 1
+ t[m] = t[n]
+ end
+ if m < n then
+ for i=n,m+1,-1 do
+ t[i] = nil
+ end
+ end
+ return t
+end
+
+local function collapse_result(t)
+ local n = #t
+ if t[1] == s_left and t[n] == s_right then -- see bar .. space needed there
+ return concat(t," ",2,n-1)
else
- result = "error in premapping"
+ return concat(t," ")
end
- if totex then
- if ok then
- context.mathematics(result)
+end
+
+collapse = function(t,level)
+ -- check
+ if not t then
+ return ""
+ end
+ -- tracing
+ if trace_detail then
+ if level then
+ level = level + 1
else
- context.type(result) -- some day monospaced
+ level = 1
+ end
+ show_state(t,level,"parsed")
+ end
+ -- steps
+ t = collapse_matrices (t) if trace_detail then show_state(t,level,"matrices") end
+ t = collapse_bars (t) if trace_detail then show_state(t,level,"bars") end
+ t = collapse_pairs (t) if trace_detail then show_state(t,level,"pairs") end
+ t = collapse_parentheses(t) if trace_detail then show_state(t,level,"parentheses") end
+ t = collapse_signs (t) if trace_detail then show_state(t,level,"signs") end
+ t = collapse_binaries (t) if trace_detail then show_state(t,level,"binaries") end
+ t = collapse_infixes_1 (t) if trace_detail then show_state(t,level,"infixes (1)") end
+ t = collapse_limits (t) if trace_detail then show_state(t,level,"limits") end
+ t = collapse_tables (t) if trace_detail then show_state(t,level,"tables") end
+ t = collapse_infixes_2 (t) if trace_detail then show_state(t,level,"infixes (2)") end
+ t = collapse_fractions_1(t) if trace_detail then show_state(t,level,"fractions (1)") end
+ t = collapse_fractions_2(t) if trace_detail then show_state(t,level,"fractions (2)") end
+ -- done
+ return collapse_result(t)
+end
+
+-- todo: cache simple ones, say #str < 10, maybe weak
+
+local context = context
+local ctx_mathematics = context and context.mathematics or report_asciimath
+local ctx_type = context and context.type or function() end
+local ctx_inleft = context and context.inleft or function() end
+
+local function convert(str,totex)
+ local unicoded = lpegmatch(u_parser,str) or str
+ local texcoded = collapse(lpegmatch(a_parser,unicoded))
+ if trace_mapping then
+ show_result(str,unicoded,texcoded)
+ end
+ if totex then
+ ctx_mathematics(texcoded)
+ else
+ return texcoded
+ end
+end
+
+local n = 0
+local p = (
+ (S("{[(") + P("\\left" )) / function() n = n + 1 end
+ + (S("}])") + P("\\right")) / function() n = n - 1 end
+ + p_utf_base
+)^0
+
+local function invalidtex(str)
+ n = 0
+ lpegmatch(p,str)
+ if n == 0 then
+ return false
+ elseif n < 0 then
+ return formatters["too many left fences: %s"](-n)
+ elseif n > 0 then
+ return formatters["not enough right fences: %s"](n)
+ end
+end
+
+local collected = { }
+local indexed = { }
+
+-- bonus
+
+local p_reserved_spaced =
+ C(lpeg.utfchartabletopattern(k_reserved_words)) / " %1 "
+
+local p_text =
+ C(P("text")) / " %1 "
+ * p_spaces^0
+ * ( -- maybe balanced
+ (P("{") * (1-P("}"))^0 * P("}"))
+ + (P("(") * (1-P(")"))^0 * P(")"))
+ )
+ + patterns.doublequoted
+
+local p_expand = Cs((p_text + p_reserved_spaced + p_utf_base)^0)
+local p_compress = patterns.collapser
+
+local function cleanedup(str)
+ return lpegmatch(p_compress,lpegmatch(p_expand,str)) or str
+end
+
+-- so far
+
+local function register(s,cleanedup,collected,shortname)
+ local c = cleanedup(s)
+ local f = collected[c]
+ if f then
+ f.count = f.count + 1
+ f.files[shortname] = (f.files[shortname] or 0) + 1
+ if s ~= c then
+ f.cleanedup = f.cleanedup + 1
end
+ f.dirty[s] = (f.dirty[s] or 0) + 1
else
- return result
- end
-end
-
-local function onlyconverted(str)
- local parsed = lpegmatch(parser,str)
- return parsed or str
-end
-
-local sqrt = P("sqrt") / "\\rootradical \\bgroup \\egroup "
-local root = P("root") / "\\rootradical "
-local frac = P("frac") / "\\frac "
-local stackrel = P("stackrel") / "\\stackrel "
-local text = P("text") / "\\mathoptext "
-local hat = P("hat") / "\\widehat "
-local overbar = P("bar") / "\\overbar "
-local underline = P("ul") / "\\underline "
-local vec = P("vec") / "\\overrightarrow "
-local dot = P("dot") / "\\dot "
-local ddot = P("ddot") / "\\ddot "
-
-local left = P("(:") + P("{:") + P("(") + P("[") + P("{")
-local right = P(":)") + P(":}") + P(")") + P("]") + P("}")
-local leftnorright = 1 - left - right
-local singles = sqrt + text + hat + underline + overbar + vec + ddot + dot
-local doubles = root + frac + stackrel
-local ignoreleft = (left/"") * spaces * spaces
-local ignoreright = spaces * (right/"") * spaces
-local ignoreslash = spaces * (P("/")/"") * spaces
-local comma = P(",")
-local nocomma = 1-comma
-local anychar = P(1)
-local openmatrix = left * spaces * Cc("\\matrix\\bgroup ")
-local closematrix = Cc("\\egroup ") * spaces * right
-local nextcolumn = spaces * (comma/"&") * spaces
-local nextrow = spaces * (comma/"\\cr ") * spaces
-local finishrow = Cc("\\cr ")
-local opengroup = left/"\\bgroup "
-local closegroup = right/"\\egroup "
-local somescript = S("^_") * spaces
-local beginargument = Cc("\\bgroup ")
-local endargument = Cc("\\egroup ")
-
-parser = Cs { "main",
-
- scripts = somescript * V("argument"),
- division = Cc("\\frac") * V("argument") * spaces * ignoreslash * spaces * V("argument"),
- double = doubles * spaces * V("argument") * spaces * V("argument"),
- single = singles * spaces * V("argument"),
-
- balanced = opengroup * (C((leftnorright + V("balanced"))^0)/onlyconverted) * closegroup,
- argument = V("balanced") + V("token"),
-
- element = (V("step") + (V("argument") + V("step")) - ignoreright - nextcolumn - comma)^1,
- commalist = ignoreleft * V("element") * (nextcolumn * spaces * V("element"))^0 * ignoreright,
- matrix = openmatrix * spaces * (V("commalist") * (nextrow * V("commalist"))^0) * finishrow * closematrix,
-
- token = beginargument * (texnic + float + real + number + letter) * endargument,
-
- step = V("scripts") + V("division") + V("single") + V("double"),
- main = (V("matrix") + V("step") + anychar)^0,
+ local texcoded = convert(s)
+ local message = invalidtex(texcoded)
+ if message then
+ report_asciimath("%s: %s : %s",message,s,texcoded)
+ end
+ collected[c] = {
+ count = 1,
+ files = { [shortname] = 1 },
+ texcoded = texcoded,
+ message = message,
+ cleanedup = s ~= c and 1 or 0,
+ dirty = { [s] = 1 }
+ }
+ end
+end
-}
+local function wrapup(collected,indexed)
+ local n = 0
+ for k, v in sortedhash(collected) do
+ n = n + 1
+ v.n= n
+ indexed[n] = k
+ end
+end
+function collect(fpattern,element,collected,indexed)
+ local element = element or "am"
+ local mpattern = formatters["<%s>(.-)%s>"](element,element)
+ local filenames = resolvers.findtexfile(fpattern)
+ if filenames and filenames ~= "" then
+ filenames = { filenames }
+ else
+ filenames = dir.glob(fpattern)
+ end
+ local cfpattern = gsub(fpattern,"^%./",lfs.currentdir())
+ local cfpattern = gsub(cfpattern,"\\","/")
+ local wildcard = string.split(cfpattern,"*")[1]
+ if not collected then
+ collected = { }
+ indexed = { }
+ end
+ for i=1,#filenames do
+ filename = gsub(filenames[i],"\\","/")
+ local splitname = (wildcard and wildcard ~= "" and string.split(filename,wildcard)[2]) or filename
+ local shortname = gsub(splitname or file.basename(filename),"^%./","")
+ if shortname == "" then
+ shortname = filename
+ end
+ local fullname = resolvers.findtexfile(filename) or filename
+ if fullname ~= "" then
+ for s in gmatch(io.loaddata(fullname),mpattern) do
+ register(s,cleanedup,collected,shortname)
+ end
+ end
+ end
+ wrapup(collected,indexed)
+ return collected, indexed
+end
+
+function filter(root,pattern,collected,indexed)
+ if not pattern or pattern == "" then
+ pattern = "am"
+ end
+ if not collected then
+ collected = { }
+ indexed = { }
+ end
+ for c in xmlcollected(root,pattern) do
+ register(xmltext(c),cleanedup,collected,xmlinclusion(c) or "" )
+ end
+ wrapup(collected,indexed)
+ return collected, indexed
+end
+
+asciimath.convert = convert
asciimath.reserved = reserved
-asciimath.convert = converted
+asciimath.collect = collect
+asciimath.filter = filter
+asciimath.invalidtex = invalidtex
+asciimath.cleanedup = cleanedup
+
+-- sin(x) = 1 : 3.3 uncached 1.2 cached , so no real gain (better optimize the converter then)
+
+local uncrapped = {
+ ["%"] = "\\mathpercent",
+ ["&"] = "\\mathampersand",
+ ["#"] = "\\mathhash",
+ ["$"] = "\\mathdollar",
+ ["^"] = "\\Hat{\\enspace}", -- terrible hack ... tex really does it sbest to turn any ^ into a superscript
+ ["_"] = "\\underline{\\enspace}",
+}
+
+local function convert(str,nowrap)
+ if #str > 0 then
+ local unicoded = lpegmatch(u_parser,str) or str
+ if lpegmatch(p_onechar,unicoded) then
+ ctx_mathematics(uncrapped[unicoded] or unicoded)
+ else
+ local texcoded = collapse(lpegmatch(a_parser,unicoded))
+ if trace_mapping then
+ show_result(str,unicoded,texcoded)
+ end
+ if #texcoded == 0 then
+ report_asciimath("error in asciimath: %s",str)
+ else
+ local message = invalidtex(texcoded)
+ if message then
+ report_asciimath("%s: %s : %s",message,str,texcoded)
+ ctx_type(formatters["<%s>"](message))
+ elseif nowrap then
+ context(texcoded)
+ else
+ ctx_mathematics(texcoded)
+ end
+ end
+ end
+ end
+end
+
+
+local context = context
+
+if not context then
+
+-- trace_mapping = true
+-- trace_detail = true
+
+-- report_asciimath(cleanedup([[ac+sinx+xsqrtx+sinsqrtx+sinsqrt(x)]]))
+-- report_asciimath(cleanedup([[a "αsinsqrtx" b]]))
+-- report_asciimath(cleanedup([[a "α" b]]))
+-- report_asciimath(cleanedup([[//4]]))
+
+-- convert([[\^{1/5}log]])
+-- convert("sqrt")
+-- convert("^")
+
+-- convert("\\frac{a}{b}")
+-- convert("frac{a}{b}")
+-- convert("\\sin{a}{b}")
+-- convert("sin{a}{b}")
+-- convert("1: rightarrow")
+-- convert("2: \\rightarrow")
+
+-- convert("((1,2,3),(4,5,6),(7,8,9))")
+
+-- convert("1/(t+x)^2")
+
+-- convert("AA a > 0 ^^ b > 0 | {:log_g:} a + {:log_g:} b")
+-- convert("AA a > 0 ^^ b > 0 | {:log_g:} a + {:log_g:} b")
+
+-- convert("10000,00001")
+-- convert("4/18*100text(%)~~22,2")
+-- convert("4/18*100text(%)≈22,2")
+-- convert("62541/(197,6)≈316,05")
+
+-- convert([[sum x]])
+-- convert([[sum^(1)_(2) x]])
+-- convert([[lim_(1)^(2) x]])
+-- convert([[lim_(1) x]])
+-- convert([[lim^(2) x]])
+
+-- convert([[{: rangle]])
+-- convert([[\langle\larr]])
+-- convert([[langlelarr]])
+-- convert([[D_f=[0 ,→〉]])
+-- convert([[ac+sinx+xsqrtx]])
+-- convert([[ac+\alpha x+xsqrtx-cc b*pi**psi-3alephx / bb X]])
+-- convert([[ac+\ ^ x+xsqrtx]])
+-- convert([[d/dx(x^2+1)]])
+-- convert([[a "αsinsqrtx" b]])
+-- convert([[a "α" b]])
+-- convert([[//4]])
+-- convert([[ {(a+b,=,1),(a+b,=,7)) ]])
+
+-- convert([[ 2/a // 5/b = (2 b) / ( a b) // ( 5 a ) / ( a b ) = (2 b ) / ( 5 a ) ]])
+-- convert([[ (2+x)/a // 5/b ]])
+
+-- convert([[ ( 2/a ) // ( 5/b ) = ( (2 b) / ( a b) ) // ( ( 5 a ) / ( a b ) ) = (2 b ) / ( 5 a ) ]])
+
+-- convert([[ (x/y)^3 = x^3/y^3 ]])
+
+-- convert([[ {: (1,2) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) :} ]])
+-- convert([[ {: (a+b,=,1),(a+b,=,7) } ]])
+-- convert([[ { (a+b,=,1),(a+b,=,7) } ]])
+
+-- convert([[(1,5 ±sqrt(1,25 ),0 )]])
+-- convert([[1//2]])
+-- convert([[(p)/sqrt(p)]])
+-- convert([[u_tot]])
+-- convert([[u_tot=4,4 L+0,054 T]])
+
+-- convert([[ [←;0,2] ]])
+-- convert([[ [←;0,2⟩ ]])
+-- convert([[ ⟨←;0,2 ) ]])
+-- convert([[ ⟨←;0,2 ] ]])
+-- convert([[ ⟨←;0,2⟩ ]])
+
+-- convert([[ x^2(x-1/16)=0 ]])
+-- convert([[ y = ax + 3 - 3a ]])
+-- convert([[ y= ((1/4)) ^x ]])
+-- convert([[ x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4) ]])
+-- convert([[ x=\ ^glog(y) ]])
+-- convert([[ x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x)) ]])
+-- convert([[ x^2(10 -x)>2 x^2 ]])
+-- convert([[ x^4>x ]])
+
+ return
+
+end
+
+interfaces.implement {
+ name = "asciimath",
+ actions = convert,
+ arguments = "string"
+}
+
+interfaces.implement {
+ name = "justasciimath",
+ actions = convert,
+ arguments = { "string", true },
+}
+
+local ctx_typebuffer = context.typebuffer
+local ctx_mathematics = context.mathematics
+local ctx_color = context.color
+
+local sequenced = table.sequenced
+local assign_buffer = buffers.assign
+
+local show = { }
+asciimath.show = show
+
+local collected, indexed, ignored = { }, { }, { }
+
+local color = { "darkred" }
+
+function show.ignore(n)
+ if type(n) == "string" then
+ local c = collected[n]
+ n = c and c.n
+ end
+ if n then
+ ignored[n] = true
+ end
+end
+
+function show.count(n,showcleanedup)
+ local v = collected[indexed[n]]
+ local count = v.count
+ local cleanedup = v.cleanedup
+ if not showcleanedup or cleanedup == 0 then
+ context(count)
+ elseif count == cleanedup then
+ ctx_color(color,count)
+ else
+ context("%s+",count-cleanedup)
+ ctx_color(color,cleanedup)
+ end
+end
+
+local h = { }
+local am = { "am" }
+
+function show.nofdirty(n)
+ local k = indexed[n]
+ local v = collected[k]
+ local n = v.cleanedup
+ h = { }
+ if n > 0 then
+ for d, n in sortedhash(v.dirty) do
+ if d ~= k then
+ h[#h+1] = { d, n }
+ end
+ end
+ end
+ context(#h)
+end
+
+function show.dirty(m,wrapped)
+ local d = h[m]
+ if d then
+ ctx_inleft(d[2])
+ if wrapped then
+ assign_buffer("am",'"' .. d[1] .. '"')
+ else
+ assign_buffer("am",d[1])
+ end
+ ctx_typebuffer(am)
+ end
+end
+
+function show.files(n)
+ context(sequenced(collected[indexed[n]].files," "))
+end
+
+function show.input(n,wrapped)
+ if wrapped then
+ assign_buffer("am",'"' .. indexed[n] .. '"')
+ else
+ assign_buffer("am",indexed[n])
+ end
+ ctx_typebuffer(am)
+end
+
+function show.result(n)
+ local v = collected[indexed[n]]
+ if ignored[n] then
+ context("ignored")
+ elseif v.message then
+ ctx_color(color, v.message)
+ else
+ ctx_mathematics(v.texcoded)
+ end
+end
+
+function show.load(str,element)
+ collected, indexed, ignored = { }, { }, { }
+ local t = utilities.parsers.settings_to_array(str)
+ for i=1,#t do
+ asciimath.collect(t[i],element or "am",collected,indexed)
+ end
+end
+
+function show.filter(id,element)
+ collected, indexed, ignored = { }, { }, { }
+ asciimath.filter(lxml.getid(id),element or "am",collected,indexed)
+end
+
+function show.max()
+ context(#indexed)
+end
+
+function show.statistics()
+ local usedfiles = { }
+ local noffiles = 0
+ local nofokay = 0
+ local nofbad = 0
+ local nofcleanedup = 0
+ for k, v in next, collected do
+ if ignored[v.n] then
+ nofbad = nofbad + v.count
+ elseif v.message then
+ nofbad = nofbad + v.count
+ else
+ nofokay = nofokay + v.count
+ end
+ nofcleanedup = nofcleanedup + v.cleanedup
+ for k, v in next, v.files do
+ local u = usedfiles[k]
+ if u then
+ usedfiles[k] = u + 1
+ else
+ noffiles = noffiles + 1
+ usedfiles[k] = 1
+ end
+ end
+ end
+ local NC = context.NC
+ local NR = context.NR
+ local EQ = context.EQ
+ context.starttabulate { "|B||" }
+ NC() context("files") EQ() context(noffiles) NC() NR()
+ NC() context("formulas") EQ() context(nofokay+nofbad) NC() NR()
+ NC() context("uniques") EQ() context(#indexed) NC() NR()
+ NC() context("cleanedup") EQ() context(nofcleanedup) NC() NR()
+ NC() context("errors") EQ() context(nofbad) NC() NR()
+ context.stoptabulate()
+end
+
+function show.save(name)
+ table.save(name ~= "" and name or "dummy.lua",collected)
+end
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
index b555115ff..1d62fb93d 100644
--- a/tex/context/base/x-asciimath.mkiv
+++ b/tex/context/base/x-asciimath.mkiv
@@ -1,6 +1,6 @@
%D \module
-%D [ file=m-asciimath,
-%D version=2006.04.24, % 1999.11.06,
+%D [ file=x-asciimath,
+%D version=2014.06.01, % 2006.04.24, % 1999.11.06,
%D title=\CONTEXT\ Modules,
%D subtitle=AsciiMath,
%D author=Hans Hagen,
@@ -11,88 +11,383 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D Lua code.
-
\registerctxluafile{x-asciimath}{}
-\def\ctxmoduleasciimath#1{\ctxlua{moduledata.asciimath.#1}}
-
-%D The following code is not officially supported and is only meant
-%D for the Math4All project.
+%D When the Math4All project started, we immediately started using content \MATHML.
+%D Because in school math there is often a reference to calculator input, we also
+%D provided what we called \quote {calcmath}: a predictable expression based way
+%D entering math. At some point \OPENMATH\ was also used but that was later
+%D abandoned because editing is more cumbersome.
%D
-%D The following code kind of maps ascii math
-%D http://www1.chapman.edu/~jipsen/mathml/asciimath.html onto \TEX. The
-%D code was written for the math4all project but in retrospect we
-%D could have used just tex code as the web version can handle that
-%D as well. Anyhow, as we use \MATHML\ as basis it makes sense to add
-%D this to the repertoire as annotation variant, so now we have
-%D content \MATHML\ (prefered), presentation \MATHML\ (often messy),
-%D \OPENMATH\ (what was which we started with in this project)
-%D calcmath (handy for students who are accustomed to calculators),
-%D asciimath (to make Frits's live easier) and of course \TEX. Of
-%D course all are used mixed.
+%D Due to limitations in the web variant (which is independent of rendering for
+%D paper but often determines the coding of document, not seldom for the worse) the
+%D switch was made to presentational \MATHML. But even that proved to be too complex
+%D for rendering on the web, so it got converted to so called \ASCIIMATH\ which
+%D can be rendered using some \JAVASCRIPT\ magic. However, all the formulas (and
+%D we're talking of tens of thousands of them) were very precisely coded by the main
+%D author. Because in intermediate stages of the editing (by additional authors) a
+%D mixture of \MATHML\ and \ASCIIMATH\ was used, we wrote the first version of this
+%D module. As reference we took \url
+%D {http://www1.chapman.edu/~jipsen/mathml/asciimath.html} and. The idea was to
+%D stick to \MATHML\ as reference and if needed use \ASCIIMATH\ as annotation.
%D
-%D We don't support all quirks of asciimath as I am not in the mood to
-%D write a complex parser while a bit of sane coding can work as well.
+%D Eventually we ended up with supporting several math encodings in \CONTEXT\ that
+%D could be used mixed: content \MATHML\ (preferred), presentation \MATHML\ (often
+%D messy), \OPENMATH\ (somewhat minimalistic) calcmath (handy for students who are
+%D accustomed to calculators), \ASCIIMATH\ (to make web support easier) and of
+%D course \TEX.
%D
+%D The first version had some limitations as we didn't want to support all quirks of
+%D \ASCIIMATH\ and also because I was not really in the mood to write a complex parser
+%D when a bit of sane coding can work equally well. Some comments from that version:
+%D
+%D \startnarrower
%D \startitemize
-%D \item We support only the syntactically clear variants and as long
-%D as lpeg does not support left recursion this is as far as we
-%D want to go.
-%D \item The parser is rather insensitive for spaces but yet the advice is
-%D to avoid weird coding like \type {d/dxf(x)} but use \type {d/dx
-%D f(x)} instead. After all we're not in a compact coding cq.\
-%D parser challenge.
-%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2}
-%D nor \type {root3x} (although the second one kind of works). A bit
-%D of defensive coding does not hurt.
-%D \item We can process \type {a/b/c/d} but it's not compatible with the
-%D default behaviour of asciimath. Use grouping instead. Yes, we do
-%D support the somewhat nonstandard grouping token mix.
-%D \item You should use explicit \type {text(..)} directives as one can
-%D never be sure what is a reserved word and not.
+%D \item We support only the syntactically clear variants and as long as lpeg does
+%D not support left recursion this is as far as we want to go.
+%D \item The parser is rather insensitive for spaces but yet the advice is to avoid
+%D weird coding like \type {d/dxf(x)} but use \type {d/dx f(x)} instead. After
+%D all we're not in a compact coding cq.\ parser challenge.
+%D \item We also don't support the somewhat confusing \type {sqrt sqrt 2} nor \type
+%D {root3x} (although the second one kind of works). A bit of defensive coding
+%D does not hurt.
+%D \item We can process \type {a/b/c/d} but it's not compatible with the default
+%D behaviour of \ASCIIMATH. Use grouping instead. Yes, we do support the somewhat
+%D nonstandard grouping token mix.
+%D \item You should use explicit \type {text(..)} directives as one can never be sure
+%D what is a reserved word and not.
%D \stopitemize
%D
-%D Actually, as the only parsing sensitive elements of \TEX\ are
-%D fractions (\type {\over} and friends, a restricted use of \TEX\
-%D coding is probably as comprehensive and parseble.
-%D
-%D The webpage with examples served as starting point so anything beyond
+%D Actually, as the only parsing sensitive elements of \TEX\ are fractions (\type {\over}
+%D and friends, a restricted use of \TEX\ coding is probably as comprehensive and
+%D parsable. The webpage with examples served as starting point so anything beyond
%D what can be found there isn't supported.
+%D \stopnarrower
+%D
+%D Then in 2014 something bad happened. Following the fashion of minimal encoding
+%D (which of course means messy encoding of complex cases and which can make authors
+%D sloppy too) the web based support workflow of the mentioned project ran into some
+%D limitations and magically one day all carefully coded \MATHML\ was converted into
+%D \ASCIIMATH. As there was no way to recover the original thousands of files and
+%D tens of thousands of formulas we were suddenly stuck with \ASCIIMATH. Because the
+%D conversion had be done automagically, we also saw numerous errors and were forced
+%D to come up with some methods to check formulas. Because \MATHML\ poses some
+%D restrictions it has predictable rendering; \ASCIIMATH\ on the other hand enforces
+%D no structure. Also, because \MATHML\ has to be valid \XML\ it always processes.
+%D Of course, during the decade that the project had run we also had to built in
+%D some catches for abuse but at least we had a relatively stable and configurable
+%D subsystem. So, in order to deal with less predictable cases as well as extensive
+%D checking, a new \ASCIIMATH\ parser was written, one that could also be used to
+%D trace bad coding.
+%D
+%D Because the formal description is incomplete, and because some links to resources
+%D are broken, and because some testing on the web showed that sequences of characters
+%D are interpreted that were not mentioned anywhere (visible), and because we noticed
+%D that the parser was dangerously tolerant, the new code is quite different from the
+%D old code.
+%D
+%D One need to keep in mind that because spaces are optional, the only robust way to
+%D edit \ASCIIMATH\ is to use a \WYSIWYG\ editor and hope that the parser doesn't
+%D change ever. Keys are picked up from spaceless sequences and when not recognized
+%D a (sequence) of characters is considered to be variables. So, \type {xsqrtx} is
+%D valid and renders as \type {$x\sqrt{x}$}, \type {xx} becomes \type {×} (times)
+%D but \type {ac} becomes \type {$a c$} (a times c). We're lucky that \type {AC} is
+%D not turned into Alternating Current, but who knows what happens a few years from
+%D now. So, we do support this spaceless mess, but users are warned: best use a
+%D spacy sequence. The extra amount of spaces (at one byte each) an author has to
+%D include in his|/|her active writing time probably stays below the size of one
+%D holiday picture. Another complication is that numbers (in Dutch) use commas instead
+%D of periods, but vectors use commas as well. We also hav esome different names for
+%D functions which then can conflict with the expectations about collapsed variables.
+%D
+%D It must be noted that simplified encodings (that seem to be the fashion today)
+%D can demand from applications to apply fuzzy logic to make something work out
+%D well. Because we have sequential data that gets rendered, sometimes wrong input
+%D gets obscured simply by the rendering: like the comma's in numbers as well as
+%D for separators (depending on space usage), or plain wrong symbols that somehow
+%D get a representation anyway. This in itself is more a side effect of trying to
+%D use the simplified encoding without applying rules (in the input) or to use it
+%D beyong its intended usage, which then of course can lead to adapted parsers and
+%D catches that themselves trigger further abuse. Imagine that instead of developing
+%D new cars, planes, space ships, mobile phones, computers we would have adapted
+%D horse cars, kites, firework, old fashioned phones and mechanical calculators in a
+%D similar way: patch upon patch of traditional means for sure would not have
+%D worked. So, when you use \ASCIIMATH\ best check immediately how it gets rendered
+%D in the browser as well as on paper. And be prepared to check the more complex
+%D code in the future again. We don't offer any guarantees but of course will try to
+%D keep up.
+%D
+%D In retrospect I sometimes wonder if the energy put into constantly adapting to
+%D the fashion of the day pays off. Probably not. It definitely doesn't pay of.
+%D
+%D More complex crap:
+%D
+%D 1: $x + \stackrel{comment}{\stackrel{\utfchar{"23DE}}{yyyyyyyy}} = y$ \blank
+%D 2: \asciimath{x + stackrel{\utfchar{"23DE}}{yyyyyyyy} = y} \blank
+%D 3: \asciimath{x + stackrel{yyyyyyyy}{\utfchar{"23DE}} = y} \blank
+%D 4: \asciimath{x + stackrel{"comment"}{stackrel{\utfchar{"23DE}}{yyyyyyyy}} = y} \blank
+
+\usemodule[mathml-basics]
+
+\startmodule[asciimath]
\unprotect
\writestatus{asciimath}{beware, this is an experimental (m4all only) module}
-\unexpanded\def\asciimath#1{\ctxmoduleasciimath{convert(\!!bs\detokenize{#1}\!!es,true)}}
+%D Hacks:
+
+\unexpanded\def\asciimathoptext #1{\ifmmode\mathoptext{#1}\else#1\fi}
+\unexpanded\def\asciimathoptexttraced #1{\ifmmode\mathoptext{\color[darkgreen]{#1}}\else\color[darkgreen]{#1}\fi}
+\unexpanded\def\asciimathstackrel #1#2{\mathematics{\mathop{\let\limits\relax\mover{#2}{#1}}}}
+\unexpanded\def\asciimathroot #1#2{\sqrt[#1]{#2}}
+\unexpanded\def\asciimathsqrt #1{\sqrt{#1}}
+
+%D The core commands:
+
+% if we need to set
+
+\installsetuponlycommandhandler {asciimath} {asciimath}
+
+\appendtoks
+ \ctxlua{moduledata.asciimath.setup {
+ splitmethod = "\asciimathparameter\c!splitmethod",
+ separator = "\asciimathparameter\c!separator",
+ }}%
+\to \everysetupasciimath
+
+\newtoks\everyascimath
+
+% \appendtoks
+% \ignorediscretionaries
+% \to \everyasciimath
+
+\unexpanded\def\asciimath
+ {\doifnextoptionalelse\asciimath_yes\asciimath_nop}
+
+\def\asciimath_yes[#1]#2%
+ {\mathematics[#1]{\clf_justasciimath{\detokenize\expandafter{\normalexpanded{#2}}}}}
+
+\def\asciimath_nop#1%
+ {\mathematics{\clf_justasciimath{\detokenize\expandafter{\normalexpanded{#1}}}}}
+
+\unexpanded\def\ctxmoduleasciimath#1%
+ {\ctxlua{moduledata.asciimath.#1}}
+
+%D Some tracing commands. Using tex commands is 10\% slower that directly piping
+%D from \LUA, but this is non|-|critical code.
+
+\unexpanded\def\ShowAsciiMathLoad [#1]{\ctxlua{moduledata.asciimath.show.load("#1")}}
+\unexpanded\def\ShowAsciiMathIgnore[#1]{\ctxlua{moduledata.asciimath.show.ignore("#1")}}
+\unexpanded\def\ShowAsciiMathXML #1#2{\ctxlua{moduledata.asciimath.show.filter("#1","#2")}}
+\unexpanded\def\ShowAsciiMathStats {\ctxlua{moduledata.asciimath.show.statistics()}}
+\unexpanded\def\ShowAsciiMathMax {\ctxlua{moduledata.asciimath.show.max()}}
+
+\unexpanded\def\ShowAsciiMathResult#1%
+ {\begingroup
+ \blank
+ % if we are in vmode, we don't get positions i.e. a smaller tuc file
+ \inleft{\ttbf#1\hfill\ctxlua{moduledata.asciimath.show.count(#1,true)}}%
+ \dontleavehmode
+ \begingroup
+ \ttbf
+ \ctxlua{moduledata.asciimath.show.files(#1)}
+ \endgroup
+ \blank[medium,samepage]
+ \startcolor[darkblue]
+ \ctxlua{moduledata.asciimath.show.input(#1,true)}
+ \stopcolor
+ \blank[medium,samepage]
+ \doifmode{asciimath:show:dirty} {
+ \dorecurse{\ctxlua{moduledata.asciimath.show.nofdirty(#1)}} {
+ \ctxlua{moduledata.asciimath.show.dirty(\recurselevel,true)}
+ \blank[medium,samepage]
+ }
+ }
+ \ctxlua{moduledata.asciimath.show.result(#1)}
+ \blank
+ \endgroup}
+
+\unexpanded\def\ShowAsciiMathStart
+ {\begingroup
+ \let\asciimathoptext\asciimathoptexttraced
+ \setuptyping[\v!buffer][\c!before=,\c!after=]
+ \setupmargindata[\v!left][\c!style=]}
+
+\unexpanded\def\ShowAsciiMathStop
+ {\endgroup}
+
+\unexpanded\def\ShowAsciiMath
+ {\dodoubleempty\doShowAsciiMath}
+
+\unexpanded\def\doShowAsciiMath[#1][#2]%
+ {\iffirstargument
+ \ShowAsciiMathStart
+ \ShowAsciiMathLoad[#1]
+ \ifsecondargument
+ \ShowAsciiMathIgnore[#2]
+ \fi
+ \dorecurse{\ShowAsciiMathMax}{\ShowAsciiMathResult\recurselevel}
+ \page
+ \ShowAsciiMathStats
+ \ShowAsciiMathStop
+ \fi}
+
+\unexpanded\def\xmlShowAsciiMath#1#2%
+ {\iffirstargument
+ \ShowAsciiMathStart
+ \ShowAsciiMathXML{#1}{#2}%
+ \dorecurse{\ShowAsciiMathMax}{\ShowAsciiMathResult\recurselevel}
+ \page
+ \ShowAsciiMathStats
+ \ShowAsciiMathStop
+ \fi}
+
+\unexpanded\def\ShowAsciiMathSave
+ {\dosingleempty\doShowAsciiMathSave}
+
+\unexpanded\def\doShowAsciiMathSave[#1]%
+ {\ctxlua{moduledata.asciimath.show.save("#1")}}
\protect
+\startsetups asciimath:layout
+
+ \setupbodyfont
+ % [pagella,10pt]
+ [dejavu,10pt]
+
+ \setuplayout
+ [backspace=35mm,
+ leftmargin=20mm,
+ rightmargindistance=0pt,
+ leftmargindistance=5mm,
+ cutspace=1cm,
+ topspace=1cm,
+ bottomspace=1cm,
+ width=middle,
+ height=middle,
+ header=0cm,
+ footer=1cm]
+
+ \setupheadertexts
+ []
+
+ \setupfootertexts
+ [\currentdate][\pagenumber]
+
+ \setupalign
+ [flushleft,verytolerant,stretch]
+
+ \dontcomplain
+
+\stopsetups
+
+\stopmodule
+
\continueifinputfile{x-asciimath.mkiv}
-\enabletrackers[modules.asciimath.mapping]
+%D This will become an extra.
-\starttext
+\showframe
+
+\setups[asciimath:layout]
+
+% \enabletrackers[modules.asciimath.mapping]
+% \enabletrackers[modules.asciimath.detail]
+% \starttext
+% \enablemode[asciimath:show:dirty]
+% \ShowAsciiMath[e:/temporary/asciimath/*.xml]
+% % \ShowAsciiMathSave[e:/temporary/asciimath/asciimath.lua]
+% \stoptext
+
+\starttext
+\unexpanded\def\MyAsciiMath#1{\startformula\asciimath{#1}\stopformula}
\startlines
-\asciimath{x^2+y_1+z_12^34}
-\asciimath{sin^-1(x)}
-\asciimath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
-\asciimath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
-\asciimath{int_0^1 f(x)dx}
-\asciimath{int^1_0 f(x)dx}
-\asciimath{a//b}
-\asciimath{(a/b)/(d/c)}
-\asciimath{((a*b))/(d/c)}
-\asciimath{[[a,b],[c,d]]((n),(k))}
-\asciimath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
-\asciimath{{ (1,2), (x,(x + text(x))) }}
-\asciimath{{(1,2),(x,(x+text(x))),(x,text(x))}}
-\asciimath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
-\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
-\asciimath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
-\asciimath{<> text{and} {:(x,y),(u,v):}}
-\asciimath{(a,b] = {x text(in) RR | a < x <= b}}
+\MyAsciiMath{x^2 / 10 // z_12^34 / 20}
+% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
+% \MyAsciiMath{x^2+y_1+z_12^34}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+% \MyAsciiMath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+% \MyAsciiMath{int_0^1 f(x)dx}
+% \MyAsciiMath{int^1_0 f(x)dx}
+% \MyAsciiMath{a//b}
+% \MyAsciiMath{a//\alpha}
+% \MyAsciiMath{(a/b)/(d/c)}
+% \MyAsciiMath{((a*b))/(d/c)}
+% \MyAsciiMath{[[a,b],[c,d]]((n),(k))}
+% \MyAsciiMath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+% \MyAsciiMath{{ (1,2), (x,(x + text(x))) }}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+% \MyAsciiMath{<> text{and} {:(x,y),(u,v):}}
+% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
+% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
+% \MyAsciiMath{ (a/b) // (c/d) = ( (a * d) / (b * d) ) // ( (b * c) / (b * d) ) = (a * d) / (b * c)}
+% \MyAsciiMath{sin(x+1)_3^2/b / c/d}
+% \MyAsciiMath{{:{:sin(x+1)_3^2:}/b:} / {:c/d:}}
+% \MyAsciiMath{cos(a) + sin(x+1)_3^2/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c)}
+% \MyAsciiMath{S_(11)}
+% \MyAsciiMath{f(x)}
+% \MyAsciiMath{sin(x)}
+% \MyAsciiMath{sin(x+1)}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{sin(2x)}
+% \MyAsciiMath{a_2^2}
+% \MyAsciiMath{( (S_(11),S_(12),S_(1n)),(vdots,ddots,vdots),(S_(m1),S_(m2),S_(mn)) ]}
+% \MyAsciiMath{frac a b}
+% \MyAsciiMath{sin(x)/2 // cos(x)/pi}
+% \MyAsciiMath{a/13 // c/d}
+% \MyAsciiMath{a/b // c/d}
+% \MyAsciiMath{x}
+% \MyAsciiMath{x^2}
+% \MyAsciiMath{sqrt x}
+% \MyAsciiMath{sqrt (x)}
+% \MyAsciiMath{root 2 x}
+% \MyAsciiMath{x+x}
+% \MyAsciiMath{x/3}
+% \MyAsciiMath{x^2 / 10}
+% \MyAsciiMath{x^2 / 10 // z_12^34 / 20}
+% \MyAsciiMath{a^23}
+% \MyAsciiMath{a^{:b^23:}+3x}
+% \MyAsciiMath{a/b / c/d}
+% \MyAsciiMath{sin(x)/b / c/d}
+% \MyAsciiMath{sin(x)/b // c/d}
+% \MyAsciiMath{a/b / c/d = (a * d) / (b * d) / (b * c) / (b * d) = (a * d) / (b * c) }
+% \MyAsciiMath{{:{:x^2:} / 10:} // {:{:z_12^34 :} / 20:}}
+% \MyAsciiMath{x^2+y_1+z_12^34}
+% \MyAsciiMath{sin^-1(x)}
+% \MyAsciiMath{d/dx f(x)=lim_(h->0) (f(x+h)-f(x))/h}
+% \MyAsciiMath{f(x)=sum_(n=0)^oo(f^((n))(a))/(n!)(x-a)^n}
+% \MyAsciiMath{int_0^1 f(x)dx}
+% \MyAsciiMath{int^1_0 f(x)dx}
+% \MyAsciiMath{2x}
+% \MyAsciiMath{a//b}
+% \MyAsciiMath{a//\alpha}
+% \MyAsciiMath{(a/b)/(d/c)}
+% \MyAsciiMath{((a*b))/(d/c)}
+% \MyAsciiMath{[[a,b],[c,d]]((n),(k))}
+% \MyAsciiMath{1/x={(1,text{if } x!=0),(text{undefined},if x=0):}}
+% \MyAsciiMath{{ (1,2), (x,(x + text(x))) }}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,text(x))}}
+% \MyAsciiMath{{(1,2),(x,(x+text(x))),(x,x text(x))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,x text(xyz))}}
+% \MyAsciiMath{{(1,2/2),(x,(x+x^22+sqrt(xx))),(x,text(xyz)+1+text(hans))}}
+% \MyAsciiMath{<> text{and} {:(x,y),(u,v):}}
+% \MyAsciiMath{(a,b] = {x text(in) RR | a < x <= b}}
+% \MyAsciiMath{x^-2}
+% \MyAsciiMath{x^2(x-1/16)=0}
+% \MyAsciiMath{y= ((1/4)) ^x}
+% \MyAsciiMath{log (0,002) / (log(1/4))}
+% \MyAsciiMath{x=ax+b \ oeps}
+% \MyAsciiMath{x=\ ^ (1/4) log(x)}
+% \MyAsciiMath{x=\ ^ (1/4) log(0 ,002 )= log(0,002) / (log(1/4))}
+% \MyAsciiMath{x^ (-1 1/2) =1/x^ (1 1/2)=1/ (x^1*x^ (1/2)) =1/ (xsqrt(x))}
+% \MyAsciiMath{x^2(10 -x)>2 x^2}
+% \MyAsciiMath{x^4>x}
\stoplines
-
\stoptext
diff --git a/tex/context/base/x-calcmath.lua b/tex/context/base/x-calcmath.lua
index 1394f3450..c96d8d0ac 100644
--- a/tex/context/base/x-calcmath.lua
+++ b/tex/context/base/x-calcmath.lua
@@ -16,6 +16,8 @@ local calcmath = { }
local moduledata = moduledata or { }
moduledata.calcmath = calcmath
+local context = context
+
local list_1 = {
"median", "min", "max", "round", "ln", "log",
"sin", "cos", "tan", "sinh", "cosh", "tanh"
@@ -46,8 +48,8 @@ local function freeze()
for k=1,#list_2 do
local v = list_2[k]
list_2_1[v .. "%((.-),(.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}_{%2}{%3}"
- list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}"
- list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}"
+ list_2_2[v .. "%((.-),(.-)%)"] = "\\" .. upper(v) .. "^{%1}{%2}"
+ list_2_3[v .. "%((.-)%)"] = "\\" .. upper(v) .. "{%1}"
end
for k=1,#list_4 do
local v = list_4[k]
@@ -192,7 +194,6 @@ if false then
-- Df Dg {\rm f}^{\prime}
-- f() g() {\rm f}()
-
-- valid utf8
local S, P, R, C, V, Cc, Ct = lpeg.S, lpeg.P, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Ct
diff --git a/tex/context/base/x-cals.lua b/tex/context/base/x-cals.lua
index 36bc1aaba..3af6106d8 100644
--- a/tex/context/base/x-cals.lua
+++ b/tex/context/base/x-cals.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['x-cals'] = {
license = "see context related readme files"
}
+local next = next
local format, lower = string.format, string.lower
local xmlsprint, xmlcprint, xmlcollected, xmlelements = xml.sprint, xml.cprint, xml.collected, xml.elements
local n_todimen, s_todimen = number.todimen, string.todimen
@@ -65,8 +66,10 @@ end
local function getspecs(root, pattern, names, widths)
-- here, but actually we need this in core-ntb.tex
-- but ideally we need an mkiv enhanced core-ntb.tex
- local ignore_widths = cals.ignore_widths
- local shrink_widths = cals.shrink_widths
+ local ignore_widths = cals.ignore_widths
+-- local shrink_widths = at.option == "shrink" or cals.shrink_widths
+-- local stretch_widths = at.option == "stretch" or cals.stretch_widths
+ local shrink_widths = cals.shrink_widths
local stretch_widths = cals.stretch_widths
for e in xmlcollected(root,pattern) do
local at = e.at
diff --git a/tex/context/base/x-ct.lua b/tex/context/base/x-ct.lua
index 2dee985c3..9c647e8e7 100644
--- a/tex/context/base/x-ct.lua
+++ b/tex/context/base/x-ct.lua
@@ -122,6 +122,8 @@ function moduledata.ct.tabulate(root,namespace)
end
+-- todo: use content and caption
+
function moduledata.ct.combination(root,namespace)
if not root then
diff --git a/tex/context/base/x-html.mkiv b/tex/context/base/x-html.mkiv
new file mode 100644
index 000000000..e1806eb9e
--- /dev/null
+++ b/tex/context/base/x-html.mkiv
@@ -0,0 +1,379 @@
+%D \module
+%D [ file=x-html,
+%D version=2011.02.03, % adapted 2014.11.08
+%D title=\CONTEXT\ Modules,
+%D subtitle=HTML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[html]
+
+%D Usage:
+%D
+%D \starttyping
+%D \xmlregistersetup{xml:html:basics}
+%D \xmlregistersetup{xml:html:tables}
+%D \stoptyping
+
+% \xmlsetsetup{#1}{(p|span)[@lang]}{xml:html:lang}
+%
+% \startxmlsetups xml:html:lang
+% \begingroup
+% \language[\xmlatt{#1}{lang}]
+% \xmlsetup{#1}{xml:html:\xmltag{#1}}
+% \endgroup
+% \stopxmlsetups
+
+\unprotect
+
+\definehighlight[b] [\c!command=\v!no,\c!style=\v!bold]
+\definehighlight[i] [\c!command=\v!no,\c!style=\v!italic]
+\definehighlight[bi] [\c!command=\v!no,\c!style=\v!bolditalic]
+\definehighlight[em] [\c!command=\v!no,\c!style=\em]
+\definehighlight[tt] [\c!command=\v!no,\c!style=\v!mono]
+\definehighlight[strong][\c!command=\v!no,\c!style=\v!bold]
+\definehighlight[u] [\c!command=\v!no,\c!style=\directsetbar{\v!underbar}]
+\definehighlight[code] [\c!command=\v!no,\c!style=\v!mono]
+\definehighlight[pre] [\c!command=\v!no]
+
+\protect
+
+% todo: pre
+
+\startxmlsetups xml:html:basics
+ \xmlsetsetup{#1}{p|br|b|i|u|em|tt|strong|ul|ol|li|table|thead|tbody|tfoot|tr|td|th|span|img}{xml:html:*}
+ \xmlsetsetup{#1}{b/i}{xml:html:bi}
+ \xmlsetsetup{#1}{i/b}{xml:html:bi}
+ \xmlstripanywhere{#1}{!pre}
+\stopxmlsetups
+
+\startxmlsetups xml:html:tables
+ \xmlsetsetup{#1}{table|thead|tbody|tfoot|tr|td|th}{xml:html:*}
+\stopxmlsetups
+
+\startxmlsetups xml:html:p
+ \xmldoifnotselfempty {#1} {
+ \dontleavehmode
+ \ignorespaces
+ \xmlflush{#1}
+ \removeunwantedspaces
+ }
+ \par
+\stopxmlsetups
+
+\startxmlsetups xml:html:br
+ \crlf
+\stopxmlsetups
+
+\startxmlsetups xml:html:b
+ \directhighlight{b}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:i
+ \directhighlight{i}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:bi
+ \directhighlight{bi}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:em
+ \directhighlight{em}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:tt
+ \directhighlight{tt}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:strong
+ \directhighlight{strong}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:u
+ \directhighlight{u}{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:ul
+ \startitemize[packed]
+ \xmlflush{#1}
+ \stopitemize
+\stopxmlsetups
+
+\startxmlsetups xml:html:ol
+ \startitemize[packed,n]
+ \xmlflush{#1}
+ \stopitemize
+\stopxmlsetups
+
+\startxmlsetups xml:html:li
+ \startitem
+ \xmlflush{#1}
+ \stopitem
+\stopxmlsetups
+
+\startxmlsetups xml:html:code
+ \directhighlight{code}{\xmlflushspacewise{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:pre
+ \directhighlight{pre}{\xmlflushspacewise{#1}}
+\stopxmlsetups
+
+\startxmlsetups xml:html:span
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:html:img
+ \ifhmode
+ \dontleavehmode
+ \externalfigure[\xmlatt{#1}{src}]
+ \else
+ \startlinecorrection
+ \externalfigure[\xmlatt{#1}{src}]
+ \stoplinecorrection
+ \fi
+\stopxmlsetups
+
+% tables, maybe we need a generic html table module
+%
+% todo: align
+
+% beware, the padding code is somewhat experimental, eventually the
+% table will be done in cld code
+%
+% we can also use \xmlmap for border etc
+
+\starttexdefinition cssgetsinglepadding #1
+ \ctxlua {
+ context((moduledata.css.padding(
+ "#1",
+ \number\dimexpr0.1ex,
+ \number\dimexpr0.01\hsize,
+ \number\dimexpr1ex,
+ \number\dimexpr1em
+ ))) % returns 4 values therefore ()
+ }sp
+\stoptexdefinition
+
+\startxmlsetups xml:html:table
+ \edef\CellPadding{\xmlatt{#1}{cellpadding}}
+ \ifx\CellPadding\empty
+ \edef\CellPadding{.25ex}
+ \else
+ \edef\CellPadding{\cssgetsinglepadding\CellPadding}
+ \fi
+ \startlinecorrection[blank]
+ \doifelse {\xmlatt{#1}{border}} {0} {
+ \bTABLE[frame=off,offset=\CellPadding]
+ \xmlflush{#1}
+ \eTABLE
+ } {
+ \bTABLE[offset=\CellPadding]
+ \xmlflush{#1}
+ \eTABLE
+ }
+ \stoplinecorrection
+\stopxmlsetups
+
+\startxmlsetups xml:html:thead
+ \bTABLEhead
+ \xmlflush{#1}
+ \eTABLEhead
+\stopxmlsetups
+
+\startxmlsetups xml:html:tbody
+ \bTABLEbody
+ \xmlflush{#1}
+ \eTABLEbody
+\stopxmlsetups
+
+\startxmlsetups xml:html:tfoot
+ \bTABLEfoot
+ \xmlflush{#1}
+ \eTABLEfoot
+\stopxmlsetups
+
+\startxmlsetups xml:html:tr
+ \bTR[ny=\xmlattdef{#1}{rowspan}{1}]
+ \xmlflush{#1}
+ \eTR
+\stopxmlsetups
+
+\startxmlsetups xml:html:td
+ \bTD[nx=\xmlattdef{#1}{colspan}{1}]
+ \xmlflush{#1}
+ \eTD
+\stopxmlsetups
+
+\startxmlsetups xml:html:th
+ \bTH[nx=\xmlattdef{#1}{colspan}{1}]
+ \xmlflush{#1}
+ \eTH
+\stopxmlsetups
+
+% \xmlregistersetup{xml:html:basics}
+
+%D For old times sake:
+
+\startxmlsetups xml:setups:common
+ \xmlsetup{#1}{xml:html:basics}
+ \xmlsetup{#1}{xml:html:tables}
+% \ifconditional\qmlcleanuptwo
+% \xmlsetsetup{#1}{html/br[index() == 1]}{xml:noppes:1}
+% \xmlsetsetup{#1}{html/p[index() == lastindex()]/br[index() == lastindex()]}{xml:noppes:2}
+% \xmlsetsetup{#1}{html/br[index() == lastindex()]}{xml:noppes:3}
+% \xmlsetsetup{#1}{br[name(1) == 'img']}{xml:noppes}
+% \xmlsetsetup{#1}{br[name(1) == 'br' and name(2) == 'img']}{xml:noppes}
+% % \xmlsetsetup{#1}{br/following-sibling::img[position()==1]}{xml:noppes}
+% \fi
+\stopxmlsetups
+
+\stopmodule
+
+\continueifinputfile{x-html.mkiv}
+
+\xmlregistersetup{xml:html:basics}
+\xmlregistersetup{xml:html:tables}
+
+\startxmlsetups xml:whatever
+ \xmlsetsetup {#1} {
+ html|body
+ } {xml:html:*}
+\stopxmlsetups
+
+\xmlregisterdocumentsetup{main}{xml:whatever}
+
+\startxmlsetups xml:html:html
+ \xmlflush{#1}
+\stopxmlsetups
+
+\startxmlsetups xml:html:body
+ \xmlflush{#1}
+\stopxmlsetups
+
+\setuphead[subject][page=yes,style=\bfa]
+
+\starttexdefinition ShowExample#1
+ \startsubject[title=#1]
+ \typebuffer[#1]
+ \starttextrule{result}
+ \xmlprocessbuffer{main}{#1}{}
+ \stoptextrule
+ \stopsubject
+\stoptexdefinition
+
+\starttext
+
+\startbuffer[test 1]
+
+test
+
+test
+
+\stopbuffer
+
+\startbuffer[test 2]
+
+test (hierna een lf)
+test
+
+\stopbuffer
+
+\startbuffer[test 3]
+
+test (hierna een lf met lege regel)
+
+test
+
+\stopbuffer
+
+\startbuffer[test 4]
+
+test (hierna een lf met twee lege regels)
+
+
+test
+
+\stopbuffer
+
+\startbuffer[test 5]
+
+test (hierna br geen lf)
test
+
+\stopbuffer
+
+\startbuffer[test 6]
+
+test (hierna br met lf)
+test
+
+\stopbuffer
+
+\startbuffer[test 7]
+
+test (hierna br met lf en lege regel)
+
+test
+
+\stopbuffer
+
+\startbuffer[test 8]
+
+test (hierna br met lf en twee lege regels)
+
+
+test
+
+\stopbuffer
+
+\startbuffer[test 9]
+
+test (hierna bold) bold test
+
+\stopbuffer
+
+\startbuffer[test 10]
+
+test (hierna lf met bold)
+bold underlined test
+
+\stopbuffer
+
+\startbuffer[test 11]
+
+test (hierna lf met lege regel en bold)
+
+bold test
+
+\stopbuffer
+
+\startbuffer[test 12]
+
+test (hierna lf met lege regel en lf in bold)
+
+
+bold
+ test
+
+\stopbuffer
+
+\startbuffer[test 13]
+
+test (hierna lf met lege regel en lf en lege regel in bold)
+
+
+
+bold
+
+ test
+
+\stopbuffer
+
+\dorecurse{13}{\ShowExample{test #1}}
+
+\stoptext
diff --git a/tex/context/base/x-math-svg.lua b/tex/context/base/x-math-svg.lua
new file mode 100644
index 000000000..8a6288167
--- /dev/null
+++ b/tex/context/base/x-math-svg.lua
@@ -0,0 +1,176 @@
+if not modules then modules = { } end modules ['x-math-svg'] = {
+ version = 1.001,
+ comment = "companion to x-math-svg.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tostring, type, next = tostring, type, next
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
+local xmlfirst = xml.first
+local xmlconvert = xml.convert
+local xmlload = xml.load
+local xmlsave = xml.save
+local xmlcollected = xml.collected
+local xmldelete = xml.delete
+
+local loadtable = table.load
+local savetable = table.save
+
+local replacesuffix = file.replacesuffix
+local addsuffix = file.addsuffix
+local removefile = os.remove
+local isfile = lfs.isfile
+
+local formatters = string.formatters
+
+moduledata = moduledata or table.setmetatableindex("table")
+local svgmath = moduledata.svgmath -- autodefined
+
+local namedata = { }
+local pagedata = { }
+
+local statusname = "x-math-svg-status.lua"
+local pdfname = "x-math-svg.pdf"
+
+local pdftosvg = os.which("mudraw")
+
+local f_make_tex = formatters[ [[context --global kpse:x-math-svg.mkvi --inputfile="%s" --svgstyle="%s" --batch --noconsole --once --purgeall]] ]
+local f_make_svg = formatters[ [[mudraw -o "math-%%d.svg" "%s" 1-9999]] ]
+
+----- f_inline = formatters[ [[]] ]
+local f_inline = formatters[ [[]] ]
+local f_display = formatters[ [[]] ]
+local f_style = formatters[ [[vertical-align:%p]] ]
+
+local f_math_tmp = formatters[ [[math-%i]] ]
+
+function svgmath.process(filename)
+ if not filename then
+ -- no filename given
+ return
+ elseif not isfile(filename) then
+ -- invalid filename
+ return
+ end
+ local index = 0
+ local page = 0
+ local blobs = { }
+ local root = xmlload(filename)
+ for mth in xmlcollected(root,"math") do
+ index = index + 1
+ local blob = tostring(mth)
+ if blobs[blob] then
+ context.ReuseSVGMath(index,blobs[blob])
+ else
+ page = page + 1
+ buffers.assign(f_math_tmp(page),blob)
+ context.MakeSVGMath(index,page,mth.at.display)
+ blobs[blob] = page
+ end
+ end
+ context(function()
+ -- for tracing purposes:
+ for mathdata, pagenumber in next, blobs do
+ local p = pagedata[pagenumber]
+ p.mathml = mathdata
+ p.number = pagenumber
+ end
+ --
+ savetable(statusname, {
+ pagedata = pagedata,
+ namedata = namedata,
+ })
+ end)
+end
+
+function svgmath.register(index,page,specification)
+ if specification then
+ pagedata[page] = specification
+ end
+ namedata[index] = page
+end
+
+function svgmath.convert(filename,svgstyle)
+ if not filename then
+ -- no filename given
+ return false, "no filename"
+ elseif not isfile(filename) then
+ -- invalid filename
+ return false, "invalid filename"
+ elseif not pdftosvg then
+ return false, "mudraw is not installed"
+ end
+
+ os.execute(f_make_tex(filename,svgstyle))
+
+ local data = loadtable(statusname)
+ if not data then
+ -- invalid tex run
+ return false, "invalid tex run"
+ elseif not next(data) then
+ return false, "no converson needed"
+ end
+
+ local pagedata = data.pagedata
+ local namedata = data.namedata
+
+ os.execute(f_make_svg(pdfname))
+
+ local root = xmlload(filename)
+ local index = 0
+ local done = { }
+ local unique = 0
+
+ local between = (1-P("<"))^1/""
+ local strip = Cs((
+ (P(""))^1) * P("")) * between^0 / "" +
+ P(">") * between +
+ P(1)
+ )^1)
+
+ for mth in xmlcollected(root,"m:math") do
+ index = index + 1
+ local page = namedata[index]
+ if done[page] then
+ mth.__p__.dt[mth.ni] = done[page]
+ else
+ local info = pagedata[page]
+ local depth = info.depth
+ local mode = info.mode
+ local svgname = addsuffix(f_math_tmp(page),"svg")
+ local action = mode == "inline" and f_inline or f_display
+ -- local x_div = xmlfirst(xmlconvert(action(-depth)),"/div")
+ local x_div = xmlfirst(xmlconvert(action()),"/div")
+ local svgdata = io.loaddata(svgname)
+ if not svgdata or svgdata == "" then
+ print("error in:",svgname,tostring(mth))
+ else
+ -- svgdata = string.gsub(svgdata,">%s<","")
+ svgdata = lpegmatch(strip,svgdata)
+ local x_svg = xmlfirst(xmlconvert(svgdata),"/svg")
+ -- xmldelete(x_svg,"text")
+if mode == "inline" then
+ x_svg.at.style = f_style(-depth)
+end
+
+ x_div.dt = { x_svg }
+ mth.__p__.dt[mth.ni] = x_div -- use helper
+ end
+ done[page] = x_div
+ unique = unique + 1
+ end
+ end
+
+-- for k, v in next, data do
+-- removefile(addsuffix(k,"svg"))
+-- end
+-- removefile(statusname)
+-- removefile(pdfname)
+
+ xmlsave(root,filename)
+
+ return true, index, unique
+end
diff --git a/tex/context/base/x-mathml-basics.mkiv b/tex/context/base/x-mathml-basics.mkiv
new file mode 100644
index 000000000..e166995b0
--- /dev/null
+++ b/tex/context/base/x-mathml-basics.mkiv
@@ -0,0 +1,276 @@
+% macros=mkvi
+
+% makes sense (but rel vs op ...):
+
+% \unexpanded\def\stackrel#1#2{\mathematics{\mathop{\let\limits\relax\mover{#2}{#1}}}}
+
+% this can become a core helper
+
+% bwe could do all of them in lua
+
+\startluacode
+local find = string.find
+local lpegmatch = lpeg.match
+
+local splitter = lpeg.Ct(lpeg.C(lpeg.patterns.nestedbraces + lpeg.patterns.utf8character)^1)
+
+function commands.xmfenced(left,middle,right,content)
+ local l = left ~= "" and left or "("
+ local r = right ~= "" and right or ")"
+ local m = middle ~= "" and middle and lpegmatch(splitter,middle) or { "," }
+ local c = find(content,"{") and lpegmatch(splitter,content) or { content }
+ local n = #c
+ if n > 1 then
+ context("\\left%s",l)
+ for i=1,n do
+ if i > 1 then
+ context("%s %s",m[i] or m[#m],c[i])
+ else
+ context(c[i])
+ end
+ end
+ context("\\right%s",r)
+ else
+ context("\\left%s %s \\right%s",l,content,r)
+ end
+end
+
+\stopluacode
+
+\unprotect
+
+\unexpanded\def\mexecuteifdefined#1%
+ {\ifx#1\empty
+ \expandafter\secondoftwoarguments
+ \else\ifcsname#1\endcsname
+ \doubleexpandafter\firstoftwoarguments
+ \else
+ \doubleexpandafter\secondoftwoarguments
+ \fi\fi
+ {\csname#1\endcsname}}
+
+% mrow
+
+\let\mrow\mathematics
+
+% msub msup msubsup
+
+\starttexdefinition msub #1#2
+ \mathematics {
+ #1_{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition msup #1#2
+ \mathematics {
+ #1^{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition msubsup #1#2#3
+ \mathematics {
+ #1_{#2}^{#3}
+ }
+\stoptexdefinition
+
+% mn mo mi
+
+\let\mn\mathematics
+\let\mo\mathematics
+\let\mi\mathematics
+
+% ms mtext
+
+\starttexdefinition ms #1
+ \text {
+ "#1"
+ }
+\stoptexdefinition
+
+\starttexdefinition mtext #1
+ \text {
+ #1
+ }
+\stoptexdefinition
+
+% mover
+
+\starttexdefinition unexpanded moverabove #1
+ \edef\movercommand{\utfmathfiller\movertoken}
+ \mexecuteifdefined\movercommand {#1} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded moverbase #1
+ \edef\mbasecommand{\utfmathfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand {#1}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded moverbasefiller #1#2
+ \edef\mbasecommand{e\utfmathcommandfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand \relax {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded moveraccent #1#2
+ \edef\movercommand{\utfmathcommandabove\movertoken}
+ \mexecuteifdefined\movercommand \relax {#1}
+\stoptexdefinition
+\starttexdefinition unexpanded movertext #1#2
+ % \mathtriplet {\mathstylehbox{#1}} {#2} {}
+ \mathtriplet {\mathematics{#1}} {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded moveraccentchecker #1#2
+ \edef\movertoken{\tochar{#2}}
+ \doifelseutfmathabove\movertoken \moveraccent \movertext {#1}{#2}
+\stoptexdefinition
+
+\starttexdefinition unexpanded mover #1#2
+ \mathematics {
+ \edef\mbasetoken{\tochar{#1}}
+ \doifelseutfmathfiller\mbasetoken \moverbasefiller \moveraccentchecker {#1}{#2}
+ }
+\stoptexdefinition
+
+% munder
+
+\starttexdefinition unexpanded munderbelow #1
+ \edef\mundercommand{\utfmathfiller\mundertoken}
+ \mexecuteifdefined\mundercommand {#1} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded munderbase #1
+ \edef\mbasecommand{\utfmathfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand {#1}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded munderbasefiller #1#2
+ \edef\mbasecommand{e\utfmathcommandfiller\mbasetoken}
+ \mexecuteifdefined\mbasecommand \relax {#2} {}
+\stoptexdefinition
+\starttexdefinition unexpanded munderaccent #1#2
+ \edef\mundercommand{\utfmathcommandbelow\mundertoken}
+ \mexecuteifdefined\mundercommand \relax {#1}
+\stoptexdefinition
+\starttexdefinition unexpanded mundertext #1#2
+ % \mathtriplet {\mathstylehbox{#1}} {} {#2}
+ \mathtriplet {\mathematics{#1}} {} {#2}
+\stoptexdefinition
+\starttexdefinition unexpanded munderaccentchecker #1#2
+ \edef\mundertoken{\tochar{#2}}
+ \doifelseutfmathbelow\mundertoken \munderaccent \mundertext {#1}{#2}
+\stoptexdefinition
+
+\starttexdefinition unexpanded munder #1#2
+ \mathematics {
+ \edef\mbasetoken{\tochar{#1}}
+ \doifelseutfmathfiller\mbasetoken \munderbasefiller \munderaccentchecker {#1}{#2}
+ }
+\stoptexdefinition
+
+% munderover
+
+% mfenced
+
+% \mfenced{x,y}
+% \mfenced{{x}{y}}
+% \mfenced[separators]{{x}{y}}
+% \mfenced[left][right]{{x}{y}}
+% \mfenced[left][separators][right]{{x}{y}}
+
+\starttexdefinition unexpanded mfenced
+ \dotripleempty\do_mfenced
+\stoptexdefinition
+
+\starttexdefinition unexpanded do_mfenced [#1][#2][#3]#4
+ \mathematics {
+ \ctxcommand{xmfenced(
+ \ifthirdargument "#1","#2","#3"\else
+ \ifsecondargument "#1",",","#2"\else
+ \iffirstargument "(","#1",")"\else
+ "(",",",")"\fi\fi\fi
+ ,"#4")}
+ }
+\stoptexdefinition
+
+% mfrac
+
+\starttexdefinition unexpanded mfrac #1#2
+ \mathematics {
+ \frac{#1}{#2}
+ }
+\stoptexdefinition
+
+% mroot msqrt
+
+\starttexdefinition unexpanded mroot #1#2
+ \mathematics {
+ \sqrt[#1]{#2}
+ }
+\stoptexdefinition
+
+\starttexdefinition unexpanded msqrt #1
+ \mathematics {
+ \sqrt{#1}
+ }
+\stoptexdefinition
+
+% menclose
+
+% merror
+
+% mglyph
+
+% mmultiscripts
+
+% mpadded
+
+% mphantom
+
+% mspace
+
+% mstyle
+
+% mtable mtr mlabeledtr mtd
+
+% maction
+
+% semantics
+
+\protect
+
+\continueifinputfile{x-mathml-basics.mkiv}
+
+\starttext
+
+$\mfenced{1+a}$\par
+$\mfenced[,]{1+a}$\par
+$\mfenced[,]{{1+a}{1+b}}$\par
+
+% $\mover{←}{test}$\par
+% $\mover{\utfchar{"2190}}{test}$\par
+% $\mover{e:leftarrow}{test}$\par
+% $\mover{x:2190}{test}$\par
+
+% $\mover{test}{⏞}$\par
+% $\mover{test}{\utfchar{"23DE}}$\par
+% $\mover{test}{e:overbrace}$\par
+% $\mover{test}{x:23DE}$\par
+% $\mover{test}{over}$\par
+
+% \mover{test}{⏞}\par
+% \mover{test}{\utfchar{"23DE}}\par
+% \mover{test}{e:overbrace}\par
+% \mover{test}{x:23DE}\par
+
+% $\munder{←}{test}$\par
+% $\munder{\utfchar{"2190}}{test}$\par
+% $\munder{e:leftarrow}{test}$\par
+% $\munder{x:2190}{test}$\par
+
+% $\munder{test}{⏟}$\par
+% $\munder{test}{\utfchar{"23DF}}$\par
+% $\munder{test}{e:underbrace}$\par
+% $\munder{test}{x:23DF}$\par
+% $\munder{test}{under}$\par
+
+% \math{{\msup{x}{2}\mo{+}\mn{2}\mi{x}\mo{+}\mi{b}}}
+
+% \mrow{\msup{x}{2}\mo{+}\mn{2}\mi{x}\mo{+}\mi{b}}
+
+\stoptext
diff --git a/tex/context/base/x-mathml-html.mkiv b/tex/context/base/x-mathml-html.mkiv
new file mode 100644
index 000000000..2ac7d3cba
--- /dev/null
+++ b/tex/context/base/x-mathml-html.mkiv
@@ -0,0 +1,40 @@
+%D \modul
+%D [ file=x-mathml,
+%D version=2014.05.18,
+%D title=\CONTEXT\ XML Modules,
+%D subtitle=\MATHML\ embedded HTML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% maybe some more
+
+\startmodule [mathml-html]
+
+\startxmlsetups mml:html:b
+ \bold{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:i
+ \italic{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:tt
+ \mono{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html:em
+ \emphasized{\xmlflush{#1}}
+\stopxmlsetups
+
+\startxmlsetups mml:html
+ \xmlsetsetup{#1}{mml:b|mml:i|mml:tt|mml:em}{mml:html:*}
+\stopxmlsetups
+
+\xmlregistersetup{mml:html}
+
+\stopmodule
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index cd60e756d..a0db339bc 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -6,20 +6,56 @@ if not modules then modules = { } end modules ['x-mathml'] = {
license = "see context related readme files"
}
--- This needs an upgrade to the latest greatest mechanisms.
+-- This needs an upgrade to the latest greatest mechanisms. But ... it
+-- probably doesn't pay back as no mathml support ever did.
local type, next = type, next
-local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub
+local formatters, lower, find, gsub, match = string.formatters, string.lower, string.find, string.gsub, string.match
local strip = string.strip
-local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content
+local xmlsprint, xmlcprint, xmltext, xmlcontent, xmlempty = xml.sprint, xml.cprint, xml.text, xml.content, xml.empty
+local lxmlcollected, lxmlfilter = lxml.collected, lxml.filter
local getid = lxml.getid
-local utfchar, utfcharacters, utfvalues = utf.char, utf.characters, utf.values
-local lpegmatch = lpeg.match
+local utfchar, utfcharacters, utfvalues, utfsplit, utflen = utf.char, utf.characters, utf.values, utf.split, utf.len
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, Cs = lpeg.P, lpeg.Cs
local mathml = { }
moduledata.mathml = mathml
lxml.mathml = mathml -- for the moment
+local context = context
+
+local ctx_enabledelimiter = context.enabledelimiter
+local ctx_disabledelimiter = context.disabledelimiter
+local ctx_xmlflush = context.xmlflush -- better xmlsprint
+
+local ctx_halign = context.halign
+local ctx_noalign = context.noalign
+local ctx_bgroup = context.bgroup
+local ctx_egroup = context.egroup
+local ctx_crcr = context.crcr
+
+local ctx_bTABLE = context.bTABLE
+local ctx_eTABLE = context.eTABLE
+local ctx_bTR = context.bTR
+local ctx_eTR = context.eTR
+local ctx_bTD = context.bTD
+local ctx_eTD = context.eTD
+
+local ctx_mn = context.mn
+local ctx_mi = context.mi
+local ctx_mo = context.mo
+local ctx_startimath = context.startimath
+local ctx_ignorespaces = context.ignorespaces
+local ctx_removeunwantedspaces = context.removeunwantedspaces
+local ctx_stopimath = context.stopimath
+
+local ctx_mmlapplycsymbol = context.mmlapplycsymbol
+
+local ctx_mathopnolimits = context.mathopnolimits
+local ctx_left = context.left
+local ctx_right = context.right
+
-- an alternative is to remap to private codes, where we can have
-- different properties .. to be done; this will move and become
-- generic; we can then make the private ones active in math mode
@@ -62,6 +98,7 @@ local o_replacements = { -- in main table
["{"] = "\\mmlleftdelimiter \\lbrace",
["}"] = "\\mmlrightdelimiter\\rbrace",
["|"] = "\\mmlleftorrightdelimiter\\vert",
+ -- ["."] = "\\mmlleftorrightdelimiter.",
["/"] = "\\mmlleftorrightdelimiter\\solidus",
[doublebar] = "\\mmlleftorrightdelimiter\\Vert",
["("] = "\\mmlleftdelimiter(",
@@ -82,8 +119,9 @@ local o_replacements = { -- in main table
-- [utfchar(0xF103C)] = "\\mmlleftdelimiter<",
[utfchar(0xF1026)] = "\\mmlchar{38}",
+ [utfchar(0x02061)] = "", -- function applicator sometimes shows up in font
-- [utfchar(0xF103E)] = "\\mmlleftdelimiter>",
-
+ -- [utfchar(0x000AF)] = '\\mmlchar{"203E}', -- 0x203E
}
local simpleoperatorremapper = utf.remapper(o_replacements)
@@ -466,20 +504,39 @@ function mathml.stripped(str)
context(strip(str))
end
+local p_entity = (P("&") * ((1-P(";"))^0) * P(";"))
+local p_utfchar = lpegpatterns.utf8character
+local p_spacing = lpegpatterns.whitespace^1
+
+local p_mn = Cs((p_entity/"" + p_spacing/utfchar(0x205F) + p_utfchar/n_replacements)^0)
+local p_strip = Cs((p_entity/"" + p_utfchar )^0)
+local p_mi = Cs((p_entity/"" + p_utfchar/i_replacements)^0)
+
+-- function mathml.mn(id,pattern)
+-- -- maybe at some point we need to interpret the number, but
+-- -- currently we assume an upright font
+-- local str = xmlcontent(getid(id)) or ""
+-- local rep = gsub(str,"&.-;","")
+-- local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen)
+-- local rep = gsub(rep,".",n_replacements)
+-- ctx_mn(rep)
+-- end
+
function mathml.mn(id,pattern)
-- maybe at some point we need to interpret the number, but
-- currently we assume an upright font
- local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","")
- local rep = gsub(rep,"(%s+)",utfchar(0x205F)) -- medspace e.g.: twenty one (nbsp is not seen)
- local rep = gsub(rep,".",n_replacements)
- context.mn(rep)
+ ctx_mn(lpegmatch(p_mn,xmlcontent(getid(id)) or ""))
end
+-- function mathml.mo(id)
+-- local str = xmlcontent(getid(id)) or ""
+-- local rep = gsub(str,"&.-;","") -- todo
+-- context(simpleoperatorremapper(rep) or rep)
+-- end
+
function mathml.mo(id)
- local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","") -- todo
- context(simpleoperatorremapper(rep))
+ local str = lpegmatch(p_strip,xmlcontent(getid(id)) or "")
+ context(simpleoperatorremapper(str) or str)
end
function mathml.mi(id)
@@ -491,34 +548,45 @@ function mathml.mi(id)
if n == 0 then
-- nothing to do
elseif n == 1 then
- local str = gsub(str[1],"&.-;","") -- bah
- local rep = i_replacements[str]
- if not rep then
- rep = gsub(str,".",i_replacements)
+ local first = str[1]
+ if type(first) == "string" then
+ -- local str = gsub(first,"&.-;","") -- bah
+ -- local rep = i_replacements[str]
+ -- if not rep then
+ -- rep = gsub(str,".",i_replacements)
+ -- end
+ local str = lpegmatch(p_strip,first)
+ local rep = i_replacements[str] or lpegmatch(p_mi,str)
+ context(rep)
+ -- ctx_mi(rep)
+ else
+ ctx_xmlflush(id) -- xmlsprint or so
end
- context(rep)
- -- context.mi(rep)
else
- context.xmlflush(id) -- xmlsprint or so
+ ctx_xmlflush(id) -- xmlsprint or so
end
else
- context.xmlflush(id) -- xmlsprint or so
+ ctx_xmlflush(id) -- xmlsprint or so
end
end
function mathml.mfenced(id) -- multiple separators
id = getid(id)
- local left, right, separators = id.at.open or "(", id.at.close or ")", id.at.separators or ","
- local l, r = l_replacements[left], r_replacements[right]
- context.enabledelimiter()
+ local at = id.at
+ local left = at.open or "("
+ local right = at.close or ")"
+ local separators = at.separators or ","
+ local l = l_replacements[left]
+ local r = r_replacements[right]
+ ctx_enabledelimiter()
if l then
context(l_replacements[left] or o_replacements[left] or "")
else
context(o_replacements["@l"])
context(left)
end
- context.disabledelimiter()
- local collected = lxml.filter(id,"/*") -- check the *
+ ctx_disabledelimiter()
+ local collected = lxmlfilter(id,"/*") -- check the *
if collected then
local n = #collected
if n == 0 then
@@ -526,7 +594,7 @@ function mathml.mfenced(id) -- multiple separators
elseif n == 1 then
xmlsprint(collected[1]) -- to be checked
else
- local t = utf.split(separators,true)
+ local t = utfsplit(separators,true)
for i=1,n do
xmlsprint(collected[i]) -- to be checked
if i < n then
@@ -545,35 +613,16 @@ function mathml.mfenced(id) -- multiple separators
end
end
end
- context.enabledelimiter()
+ ctx_enabledelimiter()
if r then
context(r_replacements[right] or o_replacements[right] or "")
else
context(right)
context(o_replacements["@r"])
end
- context.disabledelimiter()
+ ctx_disabledelimiter()
end
---~ local function flush(e,tag,toggle)
---~ if toggle then
---~ context("^{")
---~ else
---~ context("_{")
---~ end
---~ if tag == "none" then
---~ context("{}")
---~ else
---~ xmlsprint(e.dt)
---~ end
---~ if not toggle then
---~ context("}")
---~ else
---~ context("}{}")
---~ end
---~ return not toggle
---~ end
-
local function flush(e,tag,toggle)
if tag == "none" then
-- if not toggle then
@@ -593,7 +642,7 @@ end
function mathml.mmultiscripts(id)
local done, toggle = false, false
- for e in lxml.collected(id,"/*") do
+ for e in lxmlcollected(id,"/*") do
local tag = e.tg
if tag == "mprescripts" then
context("{}")
@@ -603,14 +652,14 @@ function mathml.mmultiscripts(id)
end
end
local done, toggle = false, false
- for e in lxml.collected(id,"/*") do
+ for e in lxmlcollected(id,"/*") do
local tag = e.tg
if tag == "mprescripts" then
break
elseif done then
toggle = flush(e,tag,toggle)
else
- xmlsprint(e.dt)
+ xmlsprint(e)
done = true
end
end
@@ -645,12 +694,12 @@ function mathml.mcolumn(root)
local tag = e.tg
if tag == "mi" or tag == "mn" or tag == "mo" or tag == "mtext" then
local str = xmltext(e)
- str = gsub(str,"&.-;","")
+ str = lpegmatch(p_strip,str)
for s in utfcharacters(str) do
m[#m+1] = { tag, s }
end
if tag == "mn" then
- local n = utf.len(str)
+ local n = utflen(str)
if n > numbers then
numbers = n
end
@@ -664,20 +713,20 @@ function mathml.mcolumn(root)
-- m[#m+1] = { tag, e }
end
end
- for e in lxml.collected(root,"/*") do
+ for e in lxmlcollected(root,"/*") do
local m = { }
matrix[#matrix+1] = m
if e.tg == "mrow" then
-- only one level
- for e in lxml.collected(e,"/*") do
+ for e in lxmlcollected(e,"/*") do
collect(m,e)
end
else
collect(m,e)
end
end
- context.halign()
- context.bgroup()
+ ctx_halign()
+ ctx_bgroup()
context([[\hss\startimath\alignmark\stopimath\aligntab\startimath\alignmark\stopimath\cr]])
for i=1,#matrix do
local m = matrix[i]
@@ -689,7 +738,7 @@ function mathml.mcolumn(root)
end
end
if mline then
- context.noalign([[\obeydepth\nointerlineskip]])
+ ctx_noalign([[\obeydepth\nointerlineskip]])
end
for j=1,#m do
local mm = m[j]
@@ -732,9 +781,9 @@ function mathml.mcolumn(root)
local nchr = n_replacements[chr]
context(nchr or chr)
end
- context.crcr()
+ ctx_crcr()
end
- context.egroup()
+ ctx_egroup()
end
local spacesplitter = lpeg.tsplitat(" ")
@@ -752,42 +801,47 @@ function mathml.mtable(root)
local framespacing = at.framespacing or "0pt"
local framespacing = at.framespacing or "-\\ruledlinewidth" -- make this an option
- context.bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing }
- for e in lxml.collected(root,"/(mml:mtr|mml:mlabeledtr)") do
- context.bTR()
+ ctx_bTABLE { frame = frametypes[frame or "none"] or "off", offset = framespacing, background = "" } -- todo: use xtables and definextable
+ for e in lxmlcollected(root,"/(mml:mtr|mml:mlabeledtr)") do
+ ctx_bTR()
local at = e.at
local col = 0
local rfr = at.frame or (frames and frames [#frames])
local rra = at.rowalign or (rowaligns and rowaligns [#rowaligns])
local rca = at.columnalign or (columnaligns and columnaligns[#columnaligns])
local ignorelabel = e.tg == "mlabeledtr"
- for e in lxml.collected(e,"/mml:mtd") do -- nested we can use xml.collected
+ for e in lxmlcollected(e,"/mml:mtd") do -- nested we can use xml.collected
col = col + 1
if ignorelabel and col == 1 then
-- get rid of label, should happen at the document level
else
local at = e.at
- local rowspan, columnspan = at.rowspan or 1, at.columnspan or 1
+ local rowspan = at.rowspan or 1
+ local columnspan = at.columnspan or 1
local cra = rowalignments [at.rowalign or (rowaligns and rowaligns [col]) or rra or "center"] or "lohi"
local cca = columnalignments[at.columnalign or (columnaligns and columnaligns[col]) or rca or "center"] or "middle"
local cfr = frametypes [at.frame or (frames and frames [col]) or rfr or "none" ] or "off"
- context.bTD { align = format("{%s,%s}",cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
- context.startimath()
- context.ignorespaces()
- xmlcprint(e)
- context.stopimath()
- context.removeunwantedspaces()
- context.eTD()
+ ctx_bTD { align = formatters["{%s,%s}"](cra,cca), frame = cfr, nx = columnspan, ny = rowspan }
+ if xmlempty(e,".") then
+ -- nothing, else hsize max
+ else
+ ctx_startimath()
+ -- ctx_ignorespaces()
+ xmlcprint(e)
+ -- ctx_removeunwantedspaces()
+ ctx_stopimath()
+ end
+ ctx_eTD()
end
end
-- if e.tg == "mlabeledtr" then
- -- context.bTD()
+ -- ctx_bTD()
-- xmlcprint(xml.first(e,"/!mml:mtd"))
- -- context.eTD()
+ -- ctx_eTD()
-- end
- context.eTR()
+ ctx_eTR()
end
- context.eTABLE()
+ ctx_eTABLE()
end
function mathml.csymbol(root)
@@ -798,14 +852,16 @@ function mathml.csymbol(root)
local full = hash.original or ""
local base = hash.path or ""
local text = strip(xmltext(root) or "")
- context.mmlapplycsymbol(full,base,encoding,text)
+ ctx_mmlapplycsymbol(full,base,encoding,text)
end
+local p = lpeg.Cs(((1-lpegpatterns.whitespace)^1 / "mml:enclose:%0" + (lpegpatterns.whitespace^1)/",")^1)
+
function mathml.menclosepattern(root)
root = getid(root)
local a = root.at.notation
if a and a ~= "" then
- context("mml:enclose:",(gsub(a," +",",mml:enclose:")))
+ context(lpegmatch(p,a))
end
end
@@ -816,8 +872,8 @@ end
function mathml.cpolar_a(root)
root = getid(root)
local dt = root.dt
- context.mathopnolimits("Polar")
- context.left(false,"(")
+ ctx_mathopnolimits("Polar")
+ ctx_left(false,"(")
for k=1,#dt do
local dk = dt[k]
if xml.is_element(dk,"sep") then
@@ -826,5 +882,15 @@ function mathml.cpolar_a(root)
xmlsprint(dk)
end
end
- context.right(false,")")
+ ctx_right(false,")")
+end
+
+-- crap .. maybe in char-def a mathml overload
+
+local mathmleq = {
+ [utfchar(0x00AF)] = utfchar(0x203E),
+}
+
+function mathml.extensible(chr)
+ context(mathmleq[chr] or chr)
end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index ec8fd74e4..65a7223ce 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -1,4 +1,4 @@
-%D \modul
+%D \module
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
@@ -16,12 +16,17 @@
% This module is under construction and will be cleaned up. We use a funny mix of
% xml, tex and lua. I could rewrite the lot but it also shows how context evolves.
%
-% I might en dup with a lua-only implementation some day.
+% I might end up with a lua-only implementation some day. I must find a good reason
+% to spend time on it. In fact, it might even be more messy.
%
% no m:text strip (needs checking, maybe nbsp is mandate
%
% todo: more will be moved to lua (less hassle)
% todo: move left/right to the lua end
+%
+% this implememation looks like a hack ... this is because we deal with all weird cases we
+% ran into, including abuse that was supposed to render ok (even if it didn't in other
+% renderers) .. it was simply expected to work that way.
\writestatus{loading}{ConTeXt XML Macros / MathML Renderer}
@@ -51,10 +56,10 @@
\xmlregistersetup{xml:mml:define}
-\unexpanded\def\MMLhack
- {\let\MMLpar\par
- \let\par\relax
- \everyvbox{\let\par\MMLpar}}
+% \unexpanded\def\MMLhack % no longer needed
+% {\let\MMLpar\par
+% \let\par\relax
+% \everyvbox{\let\par\MMLpar}}
\xmlmapvalue {mml:math:mode} {display} {\displaymathematics} % we had this already
\xmlmapvalue {mml:math:mode} {inline} {\inlinemathematics }
@@ -75,7 +80,7 @@
}
{
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -85,7 +90,7 @@
\startxmlsetups mml:imath
\inlinemathematics {
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -94,7 +99,7 @@
\startxmlsetups mml:dmath
\displaymathematics {
\math_fences_checked_start
- \MMLhack
+ %\MMLhack
\xmlflush{#1}
\math_fences_checked_stop
}
@@ -106,7 +111,10 @@
\edef\mmlformulalabel {\xmlatt{#1}{label}\xmlatt{#1}{id}}
\edef\mmlformulasublabel{\xmlatt{#1}{sublabel}\xmlatt{#1}{id}}
\doifsomething\mmlformulalabel{\placeformula[\mmlformulalabel]{\mmlformulasublabel}}
- \startformula\MMLhack\xmlfirst{#1}{/mml:math}\stopformula
+ \startformula
+ %\MMLhack
+ \xmlfirst{#1}{/mml:math}
+ \stopformula
\stopxmlsetups
% old delimiter hacks
@@ -164,11 +172,6 @@
%D filter. There is an intermediate cleaner module but it has
%D some namespace limitations. Here we do it the \MKIV\ way.
-\def\widevec#1%
- {\vbox{\mathsurround\zeropoint\ialign{##\crcr
- \rightarrowfill\crcr\noalign{\nointerlineskip}%
- \startimath\hfil\displaystyle{#1}\hfil\stopimath\crcr}}}
-
%D The rendering macros:
\def\MMLrm{\mr}
@@ -209,7 +212,9 @@
%D We start with the parent elements and the option handler.
-\def\xmlmathmldirective#1{\dosetvalue{MML#1}}
+\unexpanded\def\xmlmathmldirective#1{\dosetvalue{MML#1}}
+
+\xmlinstalldirective{mathml}{xmlmathmldirective}
%def\xmlmathmldirective#1#2#3{[#1][#2][#3]\dosetvalue{MML#1}{#2}{#3}}
@@ -378,7 +383,7 @@
% \MMLdoR
% } {
\edef\mmlapplyaction{\xmlfilter{#1}{/*/name()}}
- \doifsetupselse {mml:apply:mml:\mmlapplyaction} {
+ \doifelsesetups {mml:apply:mml:\mmlapplyaction} {
\xmlsetup{#1}{mml:apply:mml:\mmlapplyaction}
} {
% \MMLdoL
@@ -399,7 +404,7 @@
\startxmlsetups mml:apply:mml:fn
\xmldoifelse {#1} {/mml:fn/mml:ci} {
\edef\mmlfnci{\xmlstripped{#1}{/mml:fn/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -466,7 +471,7 @@
\fi
\xmldoifelse {#1} {/mml:ci} { % first
\edef\mmlfnci{\xmltext{#1}{/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -497,7 +502,7 @@
\startxmlsetups mmc:fn:apply % where used?
\xmldoifelse {#1} {/mml:ci} { % first
\edef\mmlfnci{\xmltext{#1}{/mml:ci}}% was xmlcontent
- \doifsetupselse{mmc:fn:\mmlfnci} { % was mmc:fn:...
+ \doifelsesetups{mmc:fn:\mmlfnci} { % was mmc:fn:...
\xmlsetup{#1}{mmc:fn:\mmlfnci} % \MMLdoL/MMLdoR to be handled in plugin
} {
\MMLcreset
@@ -524,16 +529,15 @@
\starttexdefinition mmlapplycsymbol #1#2#3#4
% #1=full url, #2=name, #3=encoding, #4=text
\doifelse {#3} {text} {
-% {\mr #4}
\text{#4}
} {
- \doifsetupselse {mml:csymbol:#1} {
+ \doifelsesetups {mml:csymbol:#1} {
% full url
- \directsetup{mml:csymbol:#1}
+ \fastsetup{mml:csymbol:#1}
} {
% somename (fallback)
- \doifsetupselse {mml:csymbol:#2} {
- \directsetup{mml:csymbol:#2}
+ \doifelsesetups {mml:csymbol:#2} {
+ \fastsetup{mml:csymbol:#2}
} {
\xmlval{mmc:cs}{#3}{}% todo
}
@@ -580,7 +584,7 @@
\stopxmlsetups
\startxmlsetups mml:ci:vector
- \widevec{\xmlflush{#1}}
+ \overrightarrow{\xmlflush{#1}}
\stopxmlsetups
\startxmlsetups mml:ci:matrix
@@ -720,7 +724,7 @@
\startxmlsetups mml:cn:rational
\xmldoifelse {#1} {/mml:sep} {
- \frac
+ \mmlfrac
{\xmlsnippet{#1}{1}}
{\xmlsnippet{#1}{3}}
} {
@@ -905,7 +909,7 @@
\mmlsecond{#1}/\mmlthird{#1}
\else
\MMLcreset
- \frac{\MMLcreset\mmlsecond{#1}}{\MMLcreset\mmlthird{#1}}
+ \mmlfrac{\MMLcreset\mmlsecond{#1}}{\MMLcreset\mmlthird{#1}}
\fi
}
\advance\mmldividelevel\minusone
@@ -1051,7 +1055,7 @@
\let\MMLtimes@@symbol\MMLtimessymbol
} {
\xmldoifelse {#1} {/mml:cn[name(1) == 'mml:cn']} {% name(1) is next one
- \doifinsetelse\MMLtimessymbol{\v!yes,\v!no} {
+ \doifelseinset\MMLtimessymbol{\v!yes,\v!no} {
\let\MMLtimes@@symbol\v!yes
} {
\let\MMLtimes@@symbol\MMLtimessymbol
@@ -1303,7 +1307,7 @@
\doifelse \MMLdiffalternative \v!a {
\xmldoifelse {#1} {/mml:lambda} {
% a special case (mathadore/openmath)
- \frac {
+ \mmlfrac {
d
\normalsuperscript
{\xmlfirst{#1}{/mml:bvar}\xmlfirst{#1}{/mml:cn}}
@@ -1316,7 +1320,7 @@
}
} {
\xmldoifelse {#1} {/mml:bvar} {
- \frac {
+ \mmlfrac {
{\mr d}{
\xmldoifelse {#1} {/mml:degree} {
\normalsuperscript{\xmlconcat{#1}{/mml:degree}\empty}
@@ -1331,13 +1335,11 @@
\xmlfirst{#1}{/mml:ci}
} {
\MMLcreset
-\ifnum\xmlcount{#1}{/mml:apply/*}>\plustwo % hack
- \left(
- \xmlfirst{#1}{/mml:apply}
- \right)
-\else
- \xmlfirst{#1}{/mml:apply}
-\fi
+ \ifnum\xmlcount{#1}{/mml:apply/*}>\plustwo % hack
+ \left(\xmlfirst{#1}{/mml:apply}\right)
+ \else
+ \xmlfirst{#1}{/mml:apply}
+ \fi
}
}
} {
@@ -1387,7 +1389,7 @@
\xmlfirst{#1}{/(mml:apply\string|mml:reln\string|mml:ci\string|mml:cn)}
} {
\xmldoifelse {#1} {/mml:bvar} {
- \frac {
+ \mmlfrac {
{\mr d}\normalsuperscript{
\xmldoifelse {#1} {/mml:degree} {
\xmlconcat{#1}{/mml:degree}\empty
@@ -1709,7 +1711,7 @@
\stopxmlsetups
\startxmlsetups mml:annotation
- \xmldoifelse {#1} {.[oneof(@encoding,'TeX','tex','TEX','ConTeXt','context','CONTEXT','ctx')]} {
+ \xmldoifelse {#1} {.[oneof(@encoding,'TeX','tex','application/x-tex','TEX','ConTeXt','context','CONTEXT','ctx')]} {
\xmlflushcontext{#1}
} {
\xmldoifelse {#1} {.[oneof(@encoding,'calcmath','cm')]} {
@@ -1745,7 +1747,7 @@
\startxmlsetups mml:notanumber \mathopnolimits{NaN} \stopxmlsetups
\startxmlsetups mml:true \mathopnolimits{true} \stopxmlsetups
\startxmlsetups mml:false \mathopnolimits{false} \stopxmlsetups
-\startxmlsetups mml:emptyset \mathopnolimits{\O} \stopxmlsetups
+\startxmlsetups mml:emptyset \mathopnolimits{Ø} \stopxmlsetups
\startxmlsetups mml:pi \pi \stopxmlsetups
\startxmlsetups mml:eulergamma \gamma \stopxmlsetups
\startxmlsetups mml:infinity \infty \stopxmlsetups
@@ -1824,25 +1826,33 @@
% helpers: maybe we can need a setting for the uprights
-\xmlmapvalue {mml} {normal} {\mathupright} % {\mathtf}
-\xmlmapvalue {mml} {double-struck} {\mathblackboard}
-\xmlmapvalue {mml} {italic} {\mathit}
-\xmlmapvalue {mml} {fraktur} {\mathfraktur}
-\xmlmapvalue {mml} {script} {\mathscript}
-\xmlmapvalue {mml} {bold} {\mb} % {\mathbf}
-\xmlmapvalue {mml} {bold-italic} {\mathbi}
-\xmlmapvalue {mml} {bold-fraktur} {\mathfraktur\mathbf}
-\xmlmapvalue {mml} {bold-script} {\mathscript\mathbf}
-\xmlmapvalue {mml} {sans-serif} {\mathss}
-\xmlmapvalue {mml} {bold-sans-serif} {\mathss\mathbf}
-\xmlmapvalue {mml} {sans-serif-italic} {\mathss\mathit}
-\xmlmapvalue {mml} {sans-serif-bold-italic} {\mathss\mathbi}
-\xmlmapvalue {mml} {monospace} {\mathtt}
+\xmlmapvalue {mml:s} {normal} {\mathupright} % {\mathtf}
+\xmlmapvalue {mml:s} {double-struck} {\mathblackboard}
+\xmlmapvalue {mml:s} {italic} {\mathit}
+\xmlmapvalue {mml:s} {fraktur} {\mathfraktur}
+\xmlmapvalue {mml:s} {script} {\mathscript}
+\xmlmapvalue {mml:s} {bold} {\mb} % {\mathbf}
+\xmlmapvalue {mml:s} {bold-italic} {\mathbi}
+\xmlmapvalue {mml:s} {bold-fraktur} {\mathfraktur\mathbf}
+\xmlmapvalue {mml:s} {bold-script} {\mathscript\mathbf}
+\xmlmapvalue {mml:s} {sans-serif} {\mathss}
+\xmlmapvalue {mml:s} {bold-sans-serif} {\mathss\mathbf}
+\xmlmapvalue {mml:s} {sans-serif-italic} {\mathss\mathit}
+\xmlmapvalue {mml:s} {sans-serif-bold-italic} {\mathss\mathbi}
+\xmlmapvalue {mml:s} {monospace} {\mathtt}
+
+\xmlmapvalue {mml:l} {-} {\let\mmlfrac\tfrac}
+ \let\mmlfrac\frac
+\xmlmapvalue {mml:l} {+} {\let\mmlfrac\sfrac}
% todo: displaystyle=true/false (or whatever else shows up)
\starttexdefinition setmmlmathstyle #1
- \xmlval {mml} {\xmlatt{#1}{mathvariant}} \empty % was: \mmmr
+ \xmlval{mml:s}{\xmlatt{#1}{mathvariant}}\empty % was: \mmmr
+\stoptexdefinition
+
+\starttexdefinition setmmlscriptlevel #1
+ \xmlval{mml:l}{\xmlatt{#1}{scriptlevel}}{\let\mmlfrac\frac}
\stoptexdefinition
\starttexdefinition applymmlmathcolor #1#2
@@ -1899,8 +1909,11 @@
\startxmlsetups mml:mi % todo: mathsize (unlikely) mathcolor (easy) mathbackground (easy)
\begingroup
+ \pushmathstyle
\setmmlmathstyle{#1}
+ \setmmlscriptlevel{#1}
\ctxmodulemathml{mi("#1")}
+ \popmathstyle
\endgroup
\stopxmlsetups
@@ -2000,7 +2013,7 @@
\overline{\left)\strut\xmlflush{#1}\right.}
} {
\doifelse \mmlmenclosenotation {mml:enclose:actuarial} {
- \overline{\left.\strut\xmlflush{#1}\right|}
+ \overline{\left.\strut\xmlflush{#1}\right\vert}
} {
\doifelse \mmlmenclosenotation {mml:enclose:radical} {
\sqrt{\xmlflush{#1}}
@@ -2010,13 +2023,13 @@
\framed
[frame=off,strut=no,background={\mmlmenclosenotation}] % offset is kind of undefined
{\startimath
- \expanded{\doifinsetelse {mml:enclose:longdiv} {\mmlmenclosenotation}} {
+ \expanded{\doifelseinset {mml:enclose:longdiv} {\mmlmenclosenotation}} {
\overline{\left)\strut\xmlflush{#1}\right.}
} {
- \expanded{\doifinsetelse {mml:enclose:actuarial} {\mmlmenclosenotation}} {
- \overline{\left.\strut\xmlflush{#1}\right|}
+ \expanded{\doifelseinset {mml:enclose:actuarial} {\mmlmenclosenotation}} {
+ \overline{\left.\strut\xmlflush{#1}\right\vert}
} {
- \expanded{\doifinsetelse {mml:enclose:radical} {\mmlmenclosenotation}} {
+ \expanded{\doifelseinset {mml:enclose:radical} {\mmlmenclosenotation}} {
\sqrt{\xmlflush{#1}}
} {
\xmlflush{#1}
@@ -2043,7 +2056,7 @@
\doifelse{\xmlatt{#1}{bevelled}}{true} {
\left.\mmlfirst{#1}\middle/\mmlsecond{#1}\right.% \thinspace\middle/\thinspace
} {
- \frac{\mmlfirst{#1}}{\mmlsecond{#1}}
+ \mmlfrac{\mmlfirst{#1}}{\mmlsecond{#1}}
}
\else
\doifelse {\xmlval{mml:mfrac:linethickness}{\mmlfraclinethickness}{}} {} {
@@ -2074,8 +2087,11 @@
\startxmlsetups mml:mstyle
\begingroup
+ \pushmathstyle
\setmmlmathstyle{#1}
+ \setmmlscriptlevel{#1}
\xmlflush{#1}
+ \popmathstyle
\endgroup
\stopxmlsetups
@@ -2230,6 +2246,8 @@
}
\stopxmlsetups
+% helpers
+
\unexpanded\def\mmlexecuteifdefined#1%
{\ifx#1\empty
\expandafter\secondoftwoarguments
@@ -2240,135 +2258,166 @@
\fi\fi
{\csname#1\endcsname}}
-% todo: combine topaccent/over/bottomaccent/under check
-
-\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0]
-
-\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0]
-
-\definemathtriplet [\v!mathematics] [mmlovertriplet]
-\definemathtriplet [\v!mathematics] [mmlundertriplet]
-\definemathtriplet [\v!mathematics] [mmldoubletriplet]
-
-% alternative:
-%
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0]
-
-\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
+\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
+
+\definemathtriplet [\v!mathematics] [mmlovertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmlundertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmldoubletriplet] % or will we use a special instance
+
+% common to munder/mover/munderover
+
+\starttexdefinition unexpanded mmlfencedfirst #1
+ \math_fences_checked_start
+ \mmlunexpandedfirst{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedsecond #1
+ \math_fences_checked_start
+ \mmlunexpandedsecond{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedthird #1
+ \math_fences_checked_start
+ \mmlunexpandedthird{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+
+% mover
+
+\starttexdefinition unexpanded mmloverabove #1
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedsecond{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccent #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlovertext #1
+ \mmlovertriplet {\mmloverbase{#1}} {\mmloverabove{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccentchecker #1
+ \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathabove\mmlovertoken \mmloveraccent \mmlovertext {#1}
+\stoptexdefinition
\startxmlsetups mml:mover
- \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
- \doifelseutfmathabove\mmlovertoken {
- \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
- \mmloverof{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathabove\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken}
- \mmloverbs{#1}
- } {
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
- }
- }
- % \limits % spoils spacing
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmloverbasefiller \mmloveraccentchecker {#1}
\stopxmlsetups
-% alternative:
-%
-% \startxmlsetups mml:mover
-% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
-% \doifelseutfmathabove\mmlovertoken {
-% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
-% \mmloverof{#1}
-% } {
-% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}}
-% \ifcsname mml:\mmlbasetoken\endcsname
-% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax
-% \else
-% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
-% \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
-% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
-% \fi
-% }
-% % \limits % spoils spacing
-% \stopxmlsetups
+% munder
-\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
+\starttexdefinition unexpanded mmlunderbelow #1
+ \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccent #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlundertext #1
+ \mmlundertriplet {\mmlunderbase{#1}} {} {\mmlunderbelow{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken \mmlunderaccent \mmlundertext {#1}
+\stoptexdefinition
\startxmlsetups mml:munder
- \edef\mmlundertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
- \doifelseutfmathbelow\mmlundertoken {%
- \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
- \mmlunderuf{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderbs{#1}
- } {
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderus{#1}}{\mmlunderbf{#1}}\relax
- }
- }
- % \limits % spoils spacing
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderbasefiller \mmlunderaccentchecker {#1}
\stopxmlsetups
-\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax}
-\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax}
+% munderover
-\startxmlsetups mml:munderover
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderoverst{#1}
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUO #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlovercommand {\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand {\mmlovercommand\mmlundercommand}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \ifcsname\mmlovercommand\endcsname
+ \csname\mmlovercommand\endcsname {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}}
+ \else
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}\mmlfencedthird{#1}} {}
+ \fi
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUT #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlbasecommand {\mmlundercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}} {}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerOT #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand{\mmlovercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedsecond{#1}}
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerTT #1
+ \mmldoubletriplet {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken {\mmlextensible{\xmlraw{#1}{/mml:*[3]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken {
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerUO \mmlunderoveraccentcheckerUT {#1}
} {
- \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text()
- \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text()
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \edef\mmlovercommand {\utfmathfiller\mmlovertoken}
- \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverot{#1}}{\mmlunderoverus{#1}}\relax
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerOT \mmlunderoveraccentcheckerTT {#1}
}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+
+\startxmlsetups mml:munderover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderoverbasefiller \mmlunderoveraccentchecker {#1}
\stopxmlsetups
% tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd)
\startxmlsetups mml:mtable % some more attributes need to be supported
- \vcenter{\ctxmodulemathml{mtable("#1")}}
+ \vcenter {
+ \hbox {% needed because otherwise positions makr the vcenter wide
+ \ctxmodulemathml{mtable("#1")}
+ }
+ }
\stopxmlsetups
\startxmlsetups mml:mcolumn
@@ -2378,29 +2427,39 @@
\def\mmlsetfakewidth#1{\setbox\scratchbox\hbox{#1}\scratchdimen\wd\scratchbox}
\def\mmlmcolumndigitspace {\mmlsetfakewidth {0}\kern\scratchdimen}
-\def\mmlmcolumndigitrule {\mmlsetfakewidth {0}\vrule width \scratchdimen height .2pt depth .2pt\relax}
-\def\mmlmcolumnsymbolrule {\mmlsetfakewidth{\times}\vrule width \scratchdimen height .2pt depth .2pt\relax}
-\def\mmlmcolumnpunctuationrule{\mmlsetfakewidth {.}\vrule width \scratchdimen height .2pt depth .2pt\relax}
+\def\mmlmcolumndigitrule {\mmlsetfakewidth {0}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+\def\mmlmcolumnsymbolrule {\mmlsetfakewidth{\times}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+\def\mmlmcolumnpunctuationrule{\mmlsetfakewidth {.}\vrule \s!width \scratchdimen \s!height .2\points \s!depth .2\points\relax}
+
+\setupMMLappearance[mspace][\c!option=] % \v!test
\startxmlsetups mml:mspace
\begingroup
\edef\mmlspacetext{\xmlatt{#1}{spacing}}
\ifx\mmlspacetext\empty
- \!!widtha \xmlattdef{#1}{width} \!!zeropoint % must be string
- \!!heighta\xmlattdef{#1}{height}\!!zeropoint
- \!!deptha \xmlattdef{#1}{depth} \!!zeropoint
- \ifdim\!!heighta=\zeropoint
- \ifdim\!!deptha=\zeropoint\else
- \hbox{\vrule\s!depth\!!deptha\s!height\zeropoint\s!width\zeropoint}%
+ \scratchwidth \xmlattdef{#1}{width} \!!zeropoint % must be string
+ \scratchheight\xmlattdef{#1}{height}\!!zeropoint
+ \scratchdepth \xmlattdef{#1}{depth} \!!zeropoint
+ \ifdim\scratchheight=\zeropoint
+ \ifdim\scratchdepth=\zeropoint\else
+ \hbox{\vrule\s!depth\scratchdepth\s!height\zeropoint\s!width\zeropoint}%
\fi
\else
- \hbox{\vrule\s!depth\zeropoint\s!height\!!heighta\s!width\zeropoint}%
+ \hbox{\vrule\s!depth\zeropoint\s!height\scratchheight\s!width\zeropoint}%
\fi
- \ifdim\!!widtha=\zeropoint\else
- \hskip\!!widtha
+ \ifdim\scratchwidth=\zeropoint\else
+ \ifx\MMLmspaceoption\v!test
+ \hbox to \scratchwidth{\showstruts\strut\hss\lower2\exheight\hbox{\infofont\xmlattdef{#1}{width}}\hss\strut}
+ \else
+ \hskip\scratchwidth
+ \fi
\fi
\else
- \phantom{\triggermathstyle\normalmathstyle\mmlspacetext}
+ \ifx\MMLmspaceoption\v!test
+ \hbox{\showstruts\strut\phantom{\triggermathstyle\normalmathstyle\mmlspacetext}\strut}
+ \else
+ \phantom{\triggermathstyle\normalmathstyle\mmlspacetext}
+ \fi
\fi
\endgroup
\stopxmlsetups
diff --git a/tex/context/base/x-mathml.xsd b/tex/context/base/x-mathml.xsd
index 17f0bea2a..1c29452b0 100644
--- a/tex/context/base/x-mathml.xsd
+++ b/tex/context/base/x-mathml.xsd
@@ -3,9 +3,9 @@
-
+
-
-
+
+
diff --git a/tex/context/base/x-res-01.mkiv b/tex/context/base/x-res-01.mkiv
index e234e9867..36070c615 100644
--- a/tex/context/base/x-res-01.mkiv
+++ b/tex/context/base/x-res-01.mkiv
@@ -256,7 +256,7 @@
\vfill
}
\advance\hsize by -30pt
- \doifmodeelse {clipgrid-distance,clipgrid-steps} {
+ \doifelsemode {clipgrid-distance,clipgrid-steps} {
\xmlsetup{#1}{xml:resource:a}
} {
\xmlsetup{#1}{xml:resource:b}
diff --git a/tex/context/base/x-set-11.mkiv b/tex/context/base/x-set-11.mkiv
index d4b43a9ee..73e68e073 100644
--- a/tex/context/base/x-set-11.mkiv
+++ b/tex/context/base/x-set-11.mkiv
@@ -57,7 +57,7 @@
2: -- wordt verwerkt
3: -- is niet gedefinieerd
4: -- wordt nogmaals verwerkt
- optional: optioneel
+ optional: opt
displaymath: formule
index: ingang
math: formule
@@ -95,7 +95,7 @@
2: -- is processed
3: -- is undefined
4: -- is processed again
- optional: optional
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -133,7 +133,7 @@
2: -- wird verarbeitet
3: -- ist undefiniert
4: -- ist mehrmals verarbeitet
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -171,7 +171,7 @@
2: -- je zpracovano
3: -- je nedefinovano
4: -- je zpracovano znovu
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -209,7 +209,7 @@
2: -- is processed
3: -- is undefined
4: -- is processed again
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -247,7 +247,7 @@
2: este procesat --
3: -- este nedefinit
4: -- este procesat din nou
- optional: optioneel
+ optional: opt
displaymath: formula
index: entry
math: formula
@@ -285,7 +285,7 @@
2: -- est traité
3: -- n'est pas défini
4: -- est traité de nouveau
- optional: optionel
+ optional: opt
displaymath: formule
index: entrée
math: formule
@@ -369,7 +369,8 @@
} {
\let\currentSETUPprefix\empty
}
- \edef\currentSETUPname{\xmlatt{#1}{name}}
+ % \edef\currentSETUPname{\xmlatt{#1}{name}}
+ \edef\currentSETUPname{\xmlattribute{#1}{/sequence/string[1]}{value}}%
\doifelse {\xmlatt{#1}{generated}} {yes} {
\def\currentSETUPgenerated{*}
} {
@@ -448,8 +449,18 @@
% \def\showsetupindeed#1%
% {\xmlfilterlist{\loadedsetups}{interface/command[@name='#1']/command(xml:setups:typeset)}}
+% \def\showsetupindeed#1%
+% {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+
+% \setelementnature[setup][display]
+% \setelementnature[setup][mixed]
+
\def\showsetupindeed#1%
- {\xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and 'start' or '') .. @name]/command(xml:setups:typeset)}}
+ {\startelement[setup][name=#1]%
+ \startelement[noexport][comment={setup definition #1}]
+ \xmlfilterlist{\loadedsetups}{/interface/command['#1' == (@type=='environment' and '\e!start' or '') .. @name]/command(xml:setups:typeset)}%
+ \stopelement
+ \stopelement}
\unexpanded\def\placesetup {\placelistofsorts[texcommand][\c!criterium=\v!used]}
\unexpanded\def\placeallsetups{\placelistofsorts[texcommand][\c!criterium=\v!all ]}
@@ -459,7 +470,7 @@
%D Typesetting:
\setupxml
- [\c!method=mkiv, % mixed mode
+ [%\c!method=mkiv, % mixed mode
\c!default=\v!hidden, % ignore elements that are not defined
\c!compress=\v!yes, % strip comment
\c!entities=\v!yes] % replace entities
@@ -489,37 +500,41 @@
\xmlfilter{#1}{/sequence/first()}
\ignorespaces
\egroup
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-one]
- \doglobal\newcounter\currentSETUPargument
- \ignorespaces
- \xmlfilter{#1}{/arguments/text()}
- \egroup
- }
- \doif {\xmlatt{#1}{type}} {environment} {
- \bgroup
- \enablemode[setups-pass-one]%
- \hskip.5em\unknown\hskip.5em
- \doif {\xmlatt{#1}{generated}} {yes} {
- \ttsl
- }
- \tex{\e!stop}
- \xmlfilter{#1}{/sequence/first()}
- \ignorespaces
- \egroup
- }
- \endgraf
- \xmldoif{#1}{/arguments} {
- \bgroup
- \enablemode[setups-pass-two]
- \doglobal\newcounter\currentSETUPargument
- %\blank[\v!line] % packed mode (we could do \startunpacked ...)
- \godown[.75\lineheight]
- \switchtobodyfont[\v!small]
- \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
- \egroup
- }
+ \ifshortsetup
+ % nothing
+ \else
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-one]
+ \doglobal\newcounter\currentSETUPargument
+ \ignorespaces
+ \xmlfilter{#1}{/arguments/text()}
+ \egroup
+ }
+ \doif {\xmlatt{#1}{type}} {environment} {
+ \bgroup
+ \enablemode[setups-pass-one]%
+ \hskip.5em\unknown\hskip.5em
+ \doif {\xmlatt{#1}{generated}} {yes} {
+ \ttsl
+ }
+ \tex{\e!stop}
+ \xmlfilter{#1}{/sequence/first()}
+ \ignorespaces
+ \egroup
+ }
+ \endgraf
+ \xmldoif{#1}{/arguments} {
+ \bgroup
+ \enablemode[setups-pass-two]
+ \doglobal\newcounter\currentSETUPargument
+ %\blank[\v!line] % packed mode (we could do \startunpacked ...)
+ \godown[.75\lineheight]
+ \switchtobodyfont[\v!small]
+ \ignorespaces\xmlfilter{#1}{/arguments/text()}\endgraf
+ \egroup
+ }
+ \fi
\getvalue{\e!stop setuptext}
\stopxmlsetups
@@ -562,7 +577,7 @@
\startxmlsetups xml:setups:word \showSETUPcomponent{#1}{word} {word} \stopxmlsetups
\def\showSETUPcomponent#1#2#3%
- {\doifmodeelse{setups-pass-one}
+ {\doifelsemode{setups-pass-one}
{\getvalue{showSETUP#2}{#1}}
{\simpleSETUPargument{#3}}}
@@ -597,7 +612,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:assignments
- \doifmodeelse{setups-pass-one} {
+ \doifelsemode{setups-pass-one} {
\showSETUPassignment{#1}
} {
\xdef\currentSETUPwidth{0pt}%
@@ -619,7 +634,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:keywords
- \doifmodeelse{setups-pass-one} {
+ \doifelsemode{setups-pass-one} {
\showSETUPkeyword{#1}
} {
\startfirstSETUPcolumn{\showSETUPnumber}%
@@ -638,11 +653,16 @@
\xmlflush{#1}
\doifmode{interface:setup:defaults} {
\ifx\currentSETUPhash\empty \else
- \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
- \ifx\currentSETUPvalue\empty
- \space=\space
- \detokenize\expandafter{\currentSETUPvalue}
- \fi
+ \begingroup
+ % todo, make a one level expansion of parameter
+ \let\emwidth \relax
+ \let\exheight\relax
+ \edef\currentSETUPvalue{\csname named\currentSETUPhash parameter\endcsname\empty{\xmlatt{#1}{name}}}
+ \ifx\currentSETUPvalue\empty \else
+ =\space
+ \detokenize\expandafter{\currentSETUPvalue}
+ \fi
+ \endgroup
\fi
}
\stopsecondSETUPcolumn
@@ -650,7 +670,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:constant
- \doifmodeelse {setups-pass-one} {
+ \doifelsemode {setups-pass-one} {
} {
\doif {\xmlatt{#1}{default}} {yes} {
\underbar % next needs to be {braced}
@@ -662,7 +682,7 @@
\stopxmlsetups
\startxmlsetups xml:setups:variable
- \doifmodeelse {setups-pass-one} {
+ \doifelsemode {setups-pass-one} {
\expanded{\setupintfont{\xmlatt{#1}{value}}}\ignorespaces
} {
\c!setup!reserved!{\xmlatt{#1}{value}}
@@ -818,7 +838,6 @@
\stoptabulate
\stopxmlsetups
-
\starttexdefinition showrootvalues [#1]
\edef\currentsetupparametercategory{#1}
\edef\currentsetupparametercommand{setup#1}
diff --git a/tex/context/base/x-set-12.mkiv b/tex/context/base/x-set-12.mkiv
index 6590bfe9e..c60445313 100644
--- a/tex/context/base/x-set-12.mkiv
+++ b/tex/context/base/x-set-12.mkiv
@@ -146,8 +146,7 @@
[\c!alternative=\v!doublesided]
\setupsetup
- [\c!criterium=\v!all,
- \c!reference=0]
+ [\c!criterium=\v!all]
\setupframedtexts
[setuptext]
diff --git a/tex/context/base/x-xtag.mkiv b/tex/context/base/x-xtag.mkiv
index 09490cc8f..ab95c567e 100644
--- a/tex/context/base/x-xtag.mkiv
+++ b/tex/context/base/x-xtag.mkiv
@@ -14,7 +14,7 @@
%D Here we load the \MKII\ (mostly) streaming \XML\ parser. We
%D define a couple of catcode regimes first.
-\ifdefined\XMLbanner \endinput \fi
+\endinput
\writestatus{xtag}{this module is obsolete, use the mkiv-xml features or use mkii instead}
diff --git a/tex/context/bib/bibl-apa-it.tex b/tex/context/bib/bibl-apa-it.tex
new file mode 100644
index 000000000..ebf03313a
--- /dev/null
+++ b/tex/context/bib/bibl-apa-it.tex
@@ -0,0 +1,385 @@
+%D \module
+%D [ file=bibl-apa-it,
+%D version=2014.10.10,
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author={Andrea Valle \& Alan Braslau},
+%D date=\currentdate,
+%D copyright={Public Domain}]
+%C
+%C Donated to the public domain. Use at your own risk
+
+\unprotect
+
+\setupcite
+ [author,year]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [authoryear]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={ },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [authoryears]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={, },
+ \c!left={(},
+ \c!right={)}]
+
+\setupcite
+ [key,serial,authornum,page,short,type,doi,url]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left={[},
+ \c!right={]}]
+
+\setupcite
+ [num]
+ [\c!andtext={ e },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ e },
+ \c!compress=\v!yes,
+ \c!inbetween={--},
+ \c!left={[},
+ \c!right={]}]
+
+\setuppublications
+ [\c!sorttype=,
+ \c!criterium=,
+ \c!refcommand=authoryears,
+ \c!numbering=\v!no,
+ \c!autohang=\v!no]
+
+\setuppublicationlist
+ [\c!width=24pt,
+ \c!artauthor=\invertedshortauthor,
+ \c!editor=\invertedshortauthor,
+ \c!author=\invertedshortauthor,
+ \c!namesep={, },
+ \c!lastnamesep={ e },
+ \c!finalnamesep={ e },
+ \c!firstnamesep={, },
+ \c!juniorsep={ },
+ \c!vonsep={ },
+ \c!surnamesep={, },
+ \c!authoretallimit=5,
+ \c!editoretallimit=5,
+ \c!artauthoretallimit=5,
+ \c!authoretaldisplay=5,
+ \c!editoretaldisplay=5,
+ \c!artauthoretaldisplay=5,
+ \c!authoretaltext={ et al.},
+ \c!editoretaltext={ et al.},
+ \c!artauthoretaltext={ et al.}]
+
+\def\maybeyear#1{#1}
+\def\etalchar #1{#1}
+
+%D \macros{insertchapter,insertpublisher}
+
+%D Some shortcuts.
+
+% ((#1(type\ |)chapter#2)|#3)
+
+\def\insertchap#1#2#3%
+ {\insertchapter
+ {#1\insertbibtype{}{\ }{capitolo\ }}{#2}%
+ {#3}}
+
+% #1city, country: pubname#2
+% #1country: pubname#2
+% #1pubname#2
+% #1city, country#2
+% #3
+
+\def\insertpublisher#1#2#3%
+ {\insertpubname
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{}: }%
+ {#1\insertcountry{}{: }{}}}%
+ {#2}%
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}}%
+ {\insertcountry{#1}{#2}{#3}}}%
+ }
+
+\def\insertorg#1#2#3%
+ {\insertorganization
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}: }%
+ {\insertcountry{}{: }{#2}}}%
+ {}%
+ {\insertcity
+ {#1}
+ {\insertcountry{, }{}{#2}}%
+ {\insertcountry{}{#2}{#3}}}%
+ }
+
+
+
+\setuppublicationlayout[article]{%
+ \insertartauthors{}{ }{\insertthekey{}{ }{}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \insertarttitle{\bgroup }{\egroup. }{}%
+ \insertjournal{\bgroup \it}{\egroup}
+ {\insertcrossref{In }{}{}}%
+ \insertvolume
+ {\bgroup \it, }
+ {\egroup\insertissue{\/(}{)}{}\insertpages{, }{.}{.}}
+ {\insertpages{, pp. }{.}{.}}%
+ \insertnote{ }{.}{}%
+ \insertcomment{}{.}{}%
+}
+
+\newif\ifeditedbook
+
+\setuppublicationlayout[book]{%
+ \insertauthors{}{ }{\inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{. }{}}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \inserttitle
+ {\bgroup\it }%
+ {\/\egroup
+ \ifeditedbook
+ \global\editedbookfalse
+ \insertvolume
+ { N.~}%
+ {\insertseries
+ { in~\bgroup}%
+ {\egroup. }%
+ {\insertcrossref{ in~}{}{. }}}%
+ {\insertseries{ }{.}{.}}%
+ \else
+ \insertcrossref
+ {\insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{. }{. }%
+ \insertvolume{Vol.~}{ di~}{}%
+ }%
+ {}%
+ {\insertvolume
+ {, vol.~}%
+ {\insertseries
+ { di~\bgroup\it}%
+ {\egroup}
+ {}}
+ {}%
+ \insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{.}{.}%
+ }%
+ \fi}%
+ {}%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{ }%
+ \insertnote{}{.}{}%
+}
+
+\setuppublicationlayout[inbook]{%
+ \insertauthors{}{ }{\inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{. }{}}}%
+ \insertpubyear{(}{). }{\unskip.}%
+ \inserttitle
+ {\bgroup\it }%
+ {\/\egroup
+ \ifeditedbook
+ \global\editedbookfalse
+ \insertvolume
+ { number~}%
+ {\insertseries
+ { in~\bgroup}%
+ {\egroup. }%
+ {\insertcrossref{ in~}{}{.}}}%
+ {\insertseries{ }{.}{}}%
+ \else
+ \insertcrossref
+ {\insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{. }{. }%
+ \insertvolume{Volume~}{ di~}{}%
+ }%
+ {}%
+ {\insertvolume
+ {, volume~}%
+ {\insertseries
+ { di~\bgroup\it}%
+ {\egroup}
+ {}}
+ {}%
+ \insertchap{, }{}{}%
+ \insertpages{\unskip, pp. }{.}{}%
+ }%
+ \fi}%
+ { }%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[booklet]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertedition{ }{ ed.}{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{}{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[manual]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup \it }{\/\egroup \insertseries{ (}{)}{}. }{}%
+ \insertedition{ }{ ed.}{}%
+ \insertorg{ }{.}{.}%
+ \insertpages{}{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[incollection]{%
+ \insertartauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \insertarttitle{\bgroup }{\egroup. }{}%
+ \inserttitle
+ {In \inserteditors{}%
+ {, a c. di, }%
+ {}%
+ \bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ \insertedition{ }{ edition}{}%
+ \insertpublisher{ }{.}{.}%
+ }%
+ {In \insertcrossref{}{}{}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ }%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[inproceedings]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \insertarttitle{\bgroup }{\egroup. }{}%%
+ \inserttitle
+ {In \inserteditors{}%
+ {, a c. di, }%
+ {}%
+ \bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{}{}%
+ \insertorg{. }{.}{.}%
+ }%
+ {In \insertcrossref{}{}{}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{. }{\unskip. }%
+ }%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[proceedings]{%
+ \inserteditors{}{, a c. di%
+ \ \global\editedbooktrue
+ }{\insertthekey{}{ }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle
+ {\bgroup\it}%
+ {\egroup
+ \insertseries
+ {\insertvolume{, number }{~in }{ }}%
+ {}%
+ {}%
+ \insertchap{\unskip, }{ }{ }%
+ \insertpages{\unskip, pp.~}{}{}%
+ \insertorg{. }{.}{.}%
+ }%
+ {}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[mastersthesis]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{, }{Tesi di laurea, }%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[phdthesis]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup\it }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{, }{Tesi di dottorato,}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{ p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[misc]{%
+ \insertauthors{}{ }{\insertthekey{}{. }{}}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}. }{}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[techreport]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ \insertbibtype{}{\insertvolume{ }{, }{, }}{Relazione tecnica}%
+ \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertnote{ }{.}{}%
+}
+
+\setuppublicationlayout[unpublished]{%
+ \insertauthors{}{ }{}%
+ \insertpubyear{(}{). }{}%
+ \inserttitle{\bgroup }{\egroup \insertseries{ (}{)}{}. }{}%
+ % \insertpublisher{ }{.}{.}%
+ \insertpages{ }{p. }{}%
+ \insertbibtype{(}{)}{}%
+ \insertnote{ }{.}{}%
+}
+
+\protect
diff --git a/tex/context/bib/sample.bib b/tex/context/bib/sample.bib
index 3f8df623e..4bb71d3a7 100644
--- a/tex/context/bib/sample.bib
+++ b/tex/context/bib/sample.bib
@@ -35,4 +35,3 @@
address = {London},
keywords = {general},
}
-
diff --git a/tex/context/extra/showunic.tex b/tex/context/extra/showunic.tex
deleted file mode 100644
index efdbf4d3a..000000000
--- a/tex/context/extra/showunic.tex
+++ /dev/null
@@ -1,130 +0,0 @@
-% author : Hans Hagen / PRAGMA-ADE
-% version : 2005-06-22
-
-% todo: take antiqua (has everything)
-
-% \tracetypescriptstrue
-
-\usetypescriptfile[type-cbg]
-
-% \preloadtypescriptstrue (default at pragma)
-
-% this font does not work ... why
-%
-% \starttypescript [serif] [hebrew] [default]
-% \definefontsynonym [Serif] [hclassic]
-% \loadmapline[=][hclassic < hclassic.pfb]
-% \stoptypescript
-
-\setuppapersize
- [S6][S6]
-
-\setupbodyfont
- [10pt]
-
-\setuplayout
- [backspace=12pt,
- topspace=12pt,
- width=middle,
- height=middle,
- header=0pt,
- footer=0pt]
-
-\setupcolors
- [state=start]
-
-\setupbackgrounds
- [page]
- [background=color,
- backgroundcolor=darkgray]
-
-\definetypeface [main-latin] [rm] [serif] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-latin] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-math] [rm] [serif] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-math] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-\definetypeface [main-math] [mm] [math] [latin-modern] [default][encoding=default]
-
-\definetypeface [main-eastern] [rm] [serif] [latin-modern] [default][encoding=qx]
-\definetypeface [main-eastern] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-greek] [rm] [serif] [cbgreek] [default][encoding=default]
-\definetypeface [main-greek] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-\definetypeface [main-cyrillic] [rm] [serif] [computer-modern] [default][encoding=t2a]
-\definetypeface [main-cyrillic] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-% \definetypeface [main-hebrew] [rm] [serif] [hebrew] [default][encoding=default]
-% \definetypeface [main-hebrew] [tt] [mono] [latin-modern] [default][encoding=texnansi]
-
-% The \showunicodetable macro is defined in unic-run.tex.
-
-\starttext
-
-% latin: western / eastern
-
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{000}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-eastern]
- \centerbox{\scale[factor=max]{\showunicodetable{001}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{002}}}
-\stopstandardmakeup
-
-% greek
-
-\startstandardmakeup
- \setupbodyfont[main-greek]
- \centerbox{\scale[factor=max]{\showunicodetable{003}}}
-\stopstandardmakeup
-
-% cyrillic
-
-\startstandardmakeup
- \setupbodyfont[main-cyrillic]
- \centerbox{\scale[factor=max]{\showunicodetable{004}}}
-\stopstandardmakeup
-
-% hebrew
-
-% \startstandardmakeup
-% \setupbodyfont[mainhebrew]
-% \centerbox{\scale[factor=max]{\showunicodetable{005}}}
-% \stopstandardmakeup
-
-% misc
-
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{030}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{031}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-latin]
- \centerbox{\scale[factor=max]{\showunicodetable{032}}}
-\stopstandardmakeup
-
-% math
-
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{033}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{034}}}
-\stopstandardmakeup
-\startstandardmakeup
- \setupbodyfont[main-math]
- \centerbox{\scale[factor=max]{\showunicodetable{039}}}
-\stopstandardmakeup
-
-\stoptext
diff --git a/tex/context/fonts/lm-math.lfg b/tex/context/fonts/lm-math.lfg
index 87c37cd78..b8c996979 100644
--- a/tex/context/fonts/lm-math.lfg
+++ b/tex/context/fonts/lm-math.lfg
@@ -231,7 +231,7 @@ local seventeen = {
return {
name = "lm-math",
version = "1.00",
- comment = "Goodies that complement latin modern math.",
+ comment = "Goodies that complement latin modern math (virtual).",
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
diff --git a/tex/context/fonts/lm.lfg b/tex/context/fonts/lm.lfg
index 8d7614718..546d18def 100644
--- a/tex/context/fonts/lm.lfg
+++ b/tex/context/fonts/lm.lfg
@@ -34,8 +34,19 @@ return {
[0x2213] = { -- ∓
yoffset = -100,
},
- }
- }
+ },
+ },
+-- parameters = {
+-- FractionNumeratorDisplayStyleShiftUp = function(value,target,original)
+-- local o = original.mathparameters.FractionNumeratorDisplayStyleShiftUp
+-- if o > 675 then
+-- o = 600
+-- else
+-- -- probably tuned
+-- end
+-- return o * target.parameters.factor
+-- end,
+-- }
},
filenames = {
["latinmodern-math-regular.otf"] = {
diff --git a/tex/context/fonts/texgyre.lfg b/tex/context/fonts/texgyre.lfg
index 7782aa509..785982037 100644
--- a/tex/context/fonts/texgyre.lfg
+++ b/tex/context/fonts/texgyre.lfg
@@ -26,5 +26,11 @@ return {
"tgbonummath-regular.otf",
"tgbonum-math.otf",
},
+ ["texgyre-schola-math-regular.otf"] = {
+ "texgyreschola-math.otf", -- beta
+ "texgyrescholamath-regular.otf",
+ "tgscholamath-regular.otf",
+ "tgschola-math.otf",
+ },
},
}
diff --git a/tex/context/fonts/treatments.lfg b/tex/context/fonts/treatments.lfg
index 44d24da22..40bac427c 100644
--- a/tex/context/fonts/treatments.lfg
+++ b/tex/context/fonts/treatments.lfg
@@ -25,24 +25,50 @@ local fix_unifraktur = {
end,
}
-local fix_lmmonoregular = {
- comment = "wrong widths of some glyphs",
- fixes = function(data)
- report("fixing some wrong widths")
- local unicodes = data.resources.unicodes
- local descriptions = data.descriptions
- local defaultwidth = descriptions[unicodes["zero"]].width
- descriptions[unicodes["six"] ].width = defaultwidth
- descriptions[unicodes["nine"] ].width = defaultwidth
- descriptions[unicodes["caron"] ].width = defaultwidth
- descriptions[unicodes["perthousand"] ].width = defaultwidth
- descriptions[unicodes["numero"] ].width = defaultwidth
- descriptions[unicodes["caron.cap"] ].width = defaultwidth
- descriptions[unicodes["six.taboldstyle"] ].width = defaultwidth
- descriptions[unicodes["nine.taboldstyle"]].width = defaultwidth
- descriptions[unicodes["dollar.oldstyle" ]].width = defaultwidth
- end
-}
+-- local fix_lmmonoregular = {
+-- --
+-- -- there are now some extra safeguards for idris
+-- --
+-- comment = "wrong widths of some glyphs",
+-- fixes = function(data)
+-- report("fixing some wrong widths")
+-- local unicodes = data.resources.unicodes
+-- local descriptions = data.descriptions
+-- local function getdescription(name)
+-- local unicode = unicodes[name]
+-- if not unicode then
+-- report("no valid unicode for %a",name)
+-- return
+-- end
+-- local description = descriptions[unicode]
+-- if not description then
+-- report("no glyph names %a in font",name)
+-- return
+-- end
+-- return description
+-- end
+-- local zero = getdescription("zero")
+-- if not zero then
+-- return
+-- end
+-- local defaultwidth = zero.width
+-- local function setwidth(name)
+-- local data = getdescription(name)
+-- if data then
+-- data.width = defaultwidth
+-- end
+-- end
+-- setwidth("six")
+-- setwidth("nine")
+-- setwidth("caron")
+-- setwidth("perthousand")
+-- setwidth("numero")
+-- setwidth("caron.cap")
+-- setwidth("six.taboldstyle")
+-- setwidth("nine.taboldstyle")
+-- setwidth("dollar.oldstyle")
+-- end
+-- }
return {
name = "treatments",
diff --git a/tex/context/foxet/fe-bryson.xml b/tex/context/foxet/fe-bryson.xml
deleted file mode 100644
index 28646b65f..000000000
--- a/tex/context/foxet/fe-bryson.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-Imagine trying to live in a world dominated by dihydrogen oxide, a
-compound that has no taste or smell and is so viable in its properties
-that it is generally benign but at other times swiftly lethal.
-Depending on its state, it can scald you or freeze you. In the
-presence of certain organic molecules it can form carbonic acids so
-nasty that they can strip the leaves from trees and eat the faces off
-statuary. In bulk, when agitated, it can strike with a fury that no
-human edifice could withstand. Even for those who have learned to live
-with it, it is often murderous substance. We call it water.
-
diff --git a/tex/context/foxet/fe-ward.xml b/tex/context/foxet/fe-ward.xml
deleted file mode 100644
index 05f774265..000000000
--- a/tex/context/foxet/fe-ward.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-The Earth, as a habitat for animal life, is in old age and
-has a fatal illness. Several, in fact. It would be happening
-whether humans had ever evolved or not. But our presence is
-like the effect of an old-age patient who smokes many packs
-of cigarettes per day - and we humans are the cigarettes.
-
diff --git a/tex/context/foxet/fe-zapf.xml b/tex/context/foxet/fe-zapf.xml
deleted file mode 100644
index faf9a0831..000000000
--- a/tex/context/foxet/fe-zapf.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-Coming back to the use of typefaces in electronic
-publishing: many of the new typographers receive their
-knowledge and information about the rules of typography
-from books, from computer magazines or the instruction
-manuals which they get with the purchase of a PC or
-software. There is not so much basic instruction, as of
-now, as there was in the old days, showing the differences
-between good and bad typographic design. Many people are
-just fascinated by their PC's tricks, and think that a
-widelypraised program, called up on the
-screen, will make everything automatic from now on.
-
diff --git a/tex/context/foxet/fo-0101.fo b/tex/context/foxet/fo-0101.fo
deleted file mode 100644
index 197c5834c..000000000
--- a/tex/context/foxet/fo-0101.fo
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0102.fo b/tex/context/foxet/fo-0102.fo
deleted file mode 100644
index 9adcf917e..000000000
--- a/tex/context/foxet/fo-0102.fo
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0103.fo b/tex/context/foxet/fo-0103.fo
deleted file mode 100644
index 95d0d4769..000000000
--- a/tex/context/foxet/fo-0103.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0201.fo b/tex/context/foxet/fo-0201.fo
deleted file mode 100644
index f524cfd09..000000000
--- a/tex/context/foxet/fo-0201.fo
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0301.fo b/tex/context/foxet/fo-0301.fo
deleted file mode 100644
index e0b8a6fc4..000000000
--- a/tex/context/foxet/fo-0301.fo
+++ /dev/null
@@ -1,56 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- beforeafter
-
-
- beforeafter
- beforeafter
- beforeafter
- beforeafter
-
-
- beforeafter
- beforeafter
- beforeafter
- beforeafter
-
-
- beforeafter
- beforeafter
- beforeafter
- beforeafter
-
-
- beforeafter
- beforeafter
- beforeafter
- beforeafter
-
-
- beforeafter
- beforeafter
- beforeafter
- beforeafter
- beforeafter
- beforeafter
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0601.fo b/tex/context/foxet/fo-0601.fo
deleted file mode 100644
index 1e291f278..000000000
--- a/tex/context/foxet/fo-0601.fo
+++ /dev/null
@@ -1,29 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0602.fo b/tex/context/foxet/fo-0602.fo
deleted file mode 100644
index 36e864767..000000000
--- a/tex/context/foxet/fo-0602.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0603.fo b/tex/context/foxet/fo-0603.fo
deleted file mode 100644
index 268249d3e..000000000
--- a/tex/context/foxet/fo-0603.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0604.fo b/tex/context/foxet/fo-0604.fo
deleted file mode 100644
index 891198ee0..000000000
--- a/tex/context/foxet/fo-0604.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0611.fo b/tex/context/foxet/fo-0611.fo
deleted file mode 100644
index 70f495fb6..000000000
--- a/tex/context/foxet/fo-0611.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0612.fo b/tex/context/foxet/fo-0612.fo
deleted file mode 100644
index 4b3de6940..000000000
--- a/tex/context/foxet/fo-0612.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0613.fo b/tex/context/foxet/fo-0613.fo
deleted file mode 100644
index f478a571d..000000000
--- a/tex/context/foxet/fo-0613.fo
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0621.fo b/tex/context/foxet/fo-0621.fo
deleted file mode 100644
index 554fcc4a0..000000000
--- a/tex/context/foxet/fo-0621.fo
+++ /dev/null
@@ -1,106 +0,0 @@
-
-
-
-
-
-
-
-
- setting up simple page master 'any'
-
-
-
-
-
- setting up simple page master 'first-page'
-
-
-
-
-
- setting up simple page master 'left-page'
-
-
-
-
-
- setting up simple page master 'right-page'
-
-
-
-
-
- setting up simple page master 'blank-page'
-
-
-
-
-
- setting up simple page master 'odd'
-
-
-
-
-
- setting up simple page master 'even'
-
-
-
-
-
- setting up simple page master 'rest'
-
-
-
-
-
-
- setting up page sequence master 'demo'
-
-
-
-
-
-
-
-
- setting up page sequence master 'omed'
-
-
-
-
-
-
-
-
-
-
- starting page sequence 'any'
-
-
-
-
-
-
-
-
- starting page sequence 'demo'
-
-
-
-
-
-
-
-
- starting page sequence 'omed'
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0641.fo b/tex/context/foxet/fo-0641.fo
deleted file mode 100644
index 9dbb90870..000000000
--- a/tex/context/foxet/fo-0641.fo
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0642.fo b/tex/context/foxet/fo-0642.fo
deleted file mode 100644
index 8f646509c..000000000
--- a/tex/context/foxet/fo-0642.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0643.fo b/tex/context/foxet/fo-0643.fo
deleted file mode 100644
index 9595d4438..000000000
--- a/tex/context/foxet/fo-0643.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0644.fo b/tex/context/foxet/fo-0644.fo
deleted file mode 100644
index a408661e0..000000000
--- a/tex/context/foxet/fo-0644.fo
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0650.fo b/tex/context/foxet/fo-0650.fo
deleted file mode 100644
index 54fd7c128..000000000
--- a/tex/context/foxet/fo-0650.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0651.fo b/tex/context/foxet/fo-0651.fo
deleted file mode 100644
index 319592ba8..000000000
--- a/tex/context/foxet/fo-0651.fo
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0701.fo b/tex/context/foxet/fo-0701.fo
deleted file mode 100644
index fabbe0722..000000000
--- a/tex/context/foxet/fo-0701.fo
+++ /dev/null
@@ -1,39 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Test 1
-
-
- Test 2
-
-
- Test 3
-
-
- Test 4
-
-
- Test 5
-
-
- Test 6
-
-
- Test 7
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0801.fo b/tex/context/foxet/fo-0801.fo
deleted file mode 100644
index b18c4282f..000000000
--- a/tex/context/foxet/fo-0801.fo
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- digits:
-
-
-
-
-
- characters:
-
-
-
-
-
- romannumerals:
-
-
-
-
-
- complex format:
-
-
-
-
-
- no format:
-
-
-
-
-
-
- digits:
- characters:
- romannumerals:
- complex format:
- no format:
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0901.fo b/tex/context/foxet/fo-0901.fo
deleted file mode 100644
index 05011269b..000000000
--- a/tex/context/foxet/fo-0901.fo
+++ /dev/null
@@ -1,58 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- times
- helvetica
- courier
-
-
- times
- helvetica
- courier
-
-
- times
- helvetica
- courier
-
-
- times
- helvetica
- courier
-
-
- large
- large
- large
-
-
- x-large
- x-large
- x-large
-
-
- xx-large
- xx-large
- xx-large
-
-
- times
- helvetica
- courier
-
-
-
-
-
diff --git a/tex/context/foxet/fo-0902.fo b/tex/context/foxet/fo-0902.fo
deleted file mode 100644
index ebaa06651..000000000
--- a/tex/context/foxet/fo-0902.fo
+++ /dev/null
@@ -1,33 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1001.fo b/tex/context/foxet/fo-1001.fo
deleted file mode 100644
index 3733265fd..000000000
--- a/tex/context/foxet/fo-1001.fo
+++ /dev/null
@@ -1,63 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- a test line d 10pt
-
- a test line d 5pt
-
- a test line d 4pt
-
- a test line d d d
-
-
-
- a test line r d d
-
-
-
- a test line d r d
-
-
-
- a test line r r d
-
-
-
- a test line d d r
-
-
-
- a test line r d r
-
-
-
- a test line d r r
-
-
-
- a test line r r r
-
-
-
- a test line d r r force
-
-
-
- a test line
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1002.fo b/tex/context/foxet/fo-1002.fo
deleted file mode 100644
index cc0ea8100..000000000
--- a/tex/context/foxet/fo-1002.fo
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1003.fo b/tex/context/foxet/fo-1003.fo
deleted file mode 100644
index 4c416e02d..000000000
--- a/tex/context/foxet/fo-1003.fo
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1004.fo b/tex/context/foxet/fo-1004.fo
deleted file mode 100644
index 1c057edd7..000000000
--- a/tex/context/foxet/fo-1004.fo
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- beforeinbetweenafter
-
-
- beforeinbetweenafter
-
-
- beforeinbetweenafter
-
-
- beforeinbetweenafter
-
-
- beforeinbetweenafter
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1101.fo b/tex/context/foxet/fo-1101.fo
deleted file mode 100644
index f6953d522..000000000
--- a/tex/context/foxet/fo-1101.fo
+++ /dev/null
@@ -1,63 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [b]
-
-
-
-
-
- [bb]
-
-
-
-
-
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1102.fo b/tex/context/foxet/fo-1102.fo
deleted file mode 100644
index f8d5a9df3..000000000
--- a/tex/context/foxet/fo-1102.fo
+++ /dev/null
@@ -1,128 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1103.fo b/tex/context/foxet/fo-1103.fo
deleted file mode 100644
index 245f3ff0e..000000000
--- a/tex/context/foxet/fo-1103.fo
+++ /dev/null
@@ -1,85 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
- [a]
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1104.fo b/tex/context/foxet/fo-1104.fo
deleted file mode 100644
index 6867e772f..000000000
--- a/tex/context/foxet/fo-1104.fo
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- X
-
-
-
-
-
-
-
-
-
-
diff --git a/tex/context/foxet/fo-1201.fo b/tex/context/foxet/fo-1201.fo
deleted file mode 100644
index 364dcb214..000000000
--- a/tex/context/foxet/fo-1201.fo
+++ /dev/null
@@ -1,40 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- test
- S
- S
- S
- F
- F
- F
- A
- A
- A
- R
- R
- R
- test
- Q
- Q
- Q
- Q
-
-
-
-
-
-
diff --git a/tex/context/interface/cont-cs.xml b/tex/context/interface/cont-cs.xml
index afaacb709..c798ea4a9 100644
--- a/tex/context/interface/cont-cs.xml
+++ b/tex/context/interface/cont-cs.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml
index e9771d07a..f40927b45 100644
--- a/tex/context/interface/cont-de.xml
+++ b/tex/context/interface/cont-de.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml
index f00215596..651292e7f 100644
--- a/tex/context/interface/cont-en.xml
+++ b/tex/context/interface/cont-en.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml
index 1cd7fa33a..30a89c059 100644
--- a/tex/context/interface/cont-fr.xml
+++ b/tex/context/interface/cont-fr.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml
index d9fe0ac97..32d19e87f 100644
--- a/tex/context/interface/cont-it.xml
+++ b/tex/context/interface/cont-it.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml
index 685033f81..6c49baccd 100644
--- a/tex/context/interface/cont-nl.xml
+++ b/tex/context/interface/cont-nl.xml
@@ -6525,7 +6525,7 @@
-
+
@@ -7188,6 +7188,7 @@
+
@@ -8996,7 +8997,7 @@
-
+
diff --git a/tex/context/interface/cont-pe.xml b/tex/context/interface/cont-pe.xml
index 434a328e8..bf7d65fd0 100644
--- a/tex/context/interface/cont-pe.xml
+++ b/tex/context/interface/cont-pe.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml
index 31ef5d920..b81c3bc7e 100644
--- a/tex/context/interface/cont-ro.xml
+++ b/tex/context/interface/cont-ro.xml
@@ -7188,6 +7188,7 @@
+
diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml
index d36f969f3..c8e7e7517 100644
--- a/tex/context/interface/keys-cs.xml
+++ b/tex/context/interface/keys-cs.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml
index c5ba364e3..a100a938f 100644
--- a/tex/context/interface/keys-de.xml
+++ b/tex/context/interface/keys-de.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml
index be59542e7..0e51dfc7c 100644
--- a/tex/context/interface/keys-en.xml
+++ b/tex/context/interface/keys-en.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml
index 43c47d578..cd35ad7e1 100644
--- a/tex/context/interface/keys-fr.xml
+++ b/tex/context/interface/keys-fr.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1007,7 +1013,7 @@
-
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml
index 95c2d8aa5..f07dbb5e6 100644
--- a/tex/context/interface/keys-it.xml
+++ b/tex/context/interface/keys-it.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -839,7 +844,7 @@
-
+
@@ -863,6 +868,7 @@
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml
index bc940ebc4..f32d79275 100644
--- a/tex/context/interface/keys-nl.xml
+++ b/tex/context/interface/keys-nl.xml
@@ -183,6 +183,7 @@
+
@@ -290,20 +291,21 @@
-
-
-
+
+
+
-
+
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -950,7 +956,7 @@
-
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1098,7 +1106,7 @@
-
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml
index 75e3a17c2..a55ad78ce 100644
--- a/tex/context/interface/keys-pe.xml
+++ b/tex/context/interface/keys-pe.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1007,7 +1013,7 @@
-
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml
index e83d145d0..951a5e8c9 100644
--- a/tex/context/interface/keys-ro.xml
+++ b/tex/context/interface/keys-ro.xml
@@ -183,6 +183,7 @@
+
@@ -304,6 +305,7 @@
+
@@ -363,6 +365,7 @@
+
@@ -422,7 +425,7 @@
-
+
@@ -657,7 +660,7 @@
-
+
@@ -692,7 +695,7 @@
-
+
@@ -707,6 +710,7 @@
+
@@ -758,6 +762,7 @@
+
@@ -863,6 +868,7 @@
+
@@ -1051,6 +1057,8 @@
+
+
@@ -1290,6 +1298,7 @@
+
@@ -1450,7 +1459,6 @@
-
@@ -1460,11 +1468,13 @@
+
+
@@ -1611,7 +1621,6 @@
-
@@ -1629,6 +1638,7 @@
+
@@ -1675,6 +1685,7 @@
+
@@ -1701,6 +1712,7 @@
+
@@ -1715,6 +1727,7 @@
+
@@ -1740,6 +1753,7 @@
+
diff --git a/tex/context/patterns/lang-it.lua b/tex/context/patterns/lang-it.lua
index 20ab48fbf..fb6a9d893 100644
--- a/tex/context/patterns/lang-it.lua
+++ b/tex/context/patterns/lang-it.lua
@@ -38,7 +38,7 @@ return {
%\
% This work consists of the single file hyph-it.tex.\
%\
-% \\versionnumber{4.8i} \\versiondate{2011/08/16}\
+% \\versionnumber{4.9} \\versiondate{2014/04/22}\
%\
% These hyphenation patterns for the Italian language are supposed to comply\
% with the Recommendation UNI 6461 on hyphenation issued by the Italian\
@@ -47,6 +47,7 @@ return {
% liability is disclaimed.\
%\
% ChangeLog:\
+% - 2014-04-22 - Add few pattherns involving `h'\
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.\
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.\
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.\
@@ -56,11 +57,11 @@ return {
},
["patterns"]={
["characters"]="'abcdefghijklmnopqrstuvwxyz’",
- ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ 1j 2j. 2j' 2j’ 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ 1t 2tb 2tc 2td 2tf 2tg t2h t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
- ["length"]=1806,
+ ["data"]=".a3p2n .anti1 .anti3m2n .bio1 .ca4p3s .circu2m1 .contro1 .di2s3cine .e2x1eu .fran2k3 .free3 .li3p2sa .narco1 .opto1 .orto3p2 .para1 .ph2l .ph2r .poli3p2 .pre1 .p2s .re1i2scr .sha2re3 .tran2s3c .tran2s3d .tran2s3l .tran2s3n .tran2s3p .tran2s3r .tran2s3t .su2b3lu .su2b3r .wa2g3n .wel2t1 2'2 2’2 a1ia a1ie a1io a1iu a1uo a1ya 2at. e1iu e2w o1ia o1ie o1io o1iu 1b 2bb 2bc 2bd 2bf 2bm 2bn 2bp 2bs 2bt 2bv b2l b2r 2b. 2b' 2b’ 1c 2cb 2cc 2cd 2cf 2ck 2cm 2cn 2cq 2cs 2ct 2cz 2chh c2h 2ch. 2ch'. 2ch’. 2ch''. 2ch’’. 2chb ch2r 2chn c2l c2r 2c. 2c' 2c’ .c2 1d 2db 2dd 2dg 2dl 2dm 2dn 2dp d2r 2ds 2dt 2dv 2dw 2d. 2d' 2d’ .d2 1f 2fb 2fg 2ff 2fn f2l f2r 2fs 2ft 2f. 2f' 2f’ 1g 2gb 2gd 2gf 2gg g2h g2l 2gm g2n 2gp g2r 2gs 2gt 2gv 2gw 2gz 2gh2t 2g. 2g' 2g’ .h2 1h 2hb 2hd 2hh hi3p2n h2l 2hm 2hn 2hr 2hv 2h. 2h' 2h’ .j2 1j 2j. 2j' 2j’ .k2 1k 2kg 2kf k2h 2kk k2l 2km k2r 2ks 2kt 2k. 2k' 2k’ 1l 2lb 2lc 2ld 2l3f2 2lg l2h l2j 2lk 2ll 2lm 2ln 2lp 2lq 2lr 2ls 2lt 2lv 2lw 2lz 2l. 2l'. 2l’. 2l'' 2l’’ 1m 2mb 2mc 2mf 2ml 2mm 2mn 2mp 2mq 2mr 2ms 2mt 2mv 2mw 2m. 2m' 2m’ 1n 2nb 2nc 2nd 2nf 2ng 2nk 2nl 2nm 2nn 2np 2nq 2nr 2ns n2s3fer 2nt 2nv 2nz n2g3n 2nheit 2n. 2n' 2n’ 1p 2pd p2h p2l 2pn 3p2ne 2pp p2r 2ps 3p2sic 2pt 2pz 2p. 2p' 2p’ 1q 2qq 2q. 2q' 2q’ 1r 2rb 2rc 2rd 2rf r2h 2rg 2rk 2rl 2rm 2rn 2rp 2rq 2rr 2rs 2rt r2t2s3 2rv 2rx 2rw 2rz 2r. 2r' 2r’ 1s2 2shm 2sh. 2sh' 2sh’ 2s3s s4s3m 2s3p2n 2stb 2stc 2std 2stf 2stg 2stm 2stn 2stp 2sts 2stt 2stv 2sz 4s. 4s'. 4s’. 4s'' 4s’’ .t2 1t 2tb 2tc 2td 2tf 2tg t2h 2th. t2l 2tm 2tn 2tp t2r t2s 3t2sch 2tt t2t3s 2tv 2tw t2z 2tzk tz2s 2t. 2t'. 2t’. 2t'' 2t’’ 1v 2vc v2l v2r 2vv 2v. 2v'. 2v’. 2v'' 2v’’ 1w w2h wa2r 2w1y 2w. 2w' 2w’ 1x 2xb 2xc 2xf 2xh 2xm 2xp 2xt 2xw 2x. 2x' 2x’ y1ou y1i 1z 2zb 2zd 2zl 2zn 2zp 2zt 2zs 2zv 2zz 2z. 2z'. 2z’. 2z'' 2z’’ .z2",
+ ["length"]=1839,
["minhyphenmax"]=1,
["minhyphenmin"]=1,
- ["n"]=377,
+ ["n"]=384,
},
["version"]="1.001",
}
\ No newline at end of file
diff --git a/tex/context/patterns/lang-it.pat b/tex/context/patterns/lang-it.pat
index 78a127aa7..12a9edf33 100644
--- a/tex/context/patterns/lang-it.pat
+++ b/tex/context/patterns/lang-it.pat
@@ -21,6 +21,8 @@
.opto1
.orto3p2
.para1
+.ph2l
+.ph2r
.poli3p2
.pre1
.p2s
@@ -137,6 +139,7 @@ g2r
2gh2t
2g.
2g'
+.h2
1h
2hb
2hd
@@ -149,9 +152,11 @@ h2l
2hv
2h.
2h'
+.j2
1j
2j.
2j'
+.k2
1k
2kg
2kf
@@ -288,6 +293,7 @@ s4s3m
4s.
4s'.
4s''
+.t2
1t
2tb
2tc
@@ -295,6 +301,7 @@ s4s3m
2tf
2tg
t2h
+2th.
t2l
2tm
2tn
diff --git a/tex/context/patterns/lang-it.rme b/tex/context/patterns/lang-it.rme
index 6cfe6896a..2a2fb60d5 100644
--- a/tex/context/patterns/lang-it.rme
+++ b/tex/context/patterns/lang-it.rme
@@ -32,7 +32,7 @@ Italian hyphenation patterns
%
% This work consists of the single file hyph-it.tex.
%
-% \versionnumber{4.8i} \versiondate{2011/08/16}
+% \versionnumber{4.9} \versiondate{2014/04/22}
%
% These hyphenation patterns for the Italian language are supposed to comply
% with the Recommendation UNI 6461 on hyphenation issued by the Italian
@@ -41,6 +41,7 @@ Italian hyphenation patterns
% liability is disclaimed.
%
% ChangeLog:
+% - 2014-04-22 - Add few pattherns involving `h'
% - 2011-08-16 - Change the licence from GNU LGPL into LPPL v1.3.
% - 2010-05-24 - Fix for Italian patterns for proper hyphenation of -ich and Ljubljana.
% - 2008-06-09 - Import of original ithyph.tex into hyph-utf8 package.
diff --git a/tex/context/sample/cervantes-es.tex b/tex/context/sample/cervantes-es.tex
new file mode 100644
index 000000000..153797023
--- /dev/null
+++ b/tex/context/sample/cervantes-es.tex
@@ -0,0 +1,6 @@
+En un lugar de la Mancha, de cuyo nombre no quiero acordar-me, no ha
+mucho tiempo que vivía un hidalgo de los de lanza en astillero, adarga
+antigua, rocín flaco y galgo corredor. Una olla de algo más vaca que
+carnero, salpicón las más noches, duelos y quebrantos los sábados,
+lantejas los viernes, algún palomino de añadidura los domingos,
+consumían las tres partes de su hacienda.
diff --git a/tex/context/sample/darwin.tex b/tex/context/sample/darwin.tex
new file mode 100644
index 000000000..6425bf156
--- /dev/null
+++ b/tex/context/sample/darwin.tex
@@ -0,0 +1,19 @@
+It is interesting to contemplate an entangled bank, clothed with many
+plants of many kinds, with birds singing on the bushes, with various
+insects flitting about, and with worms crawling through the damp earth,
+and to reflect that these elaborately constructed forms, so different
+from each other, and dependent on each other in so complex a manner,
+have all been produced by laws acting around us. These laws, taken in
+the largest sense, being Growth with Reproduction; Inheritance which is
+almost implied by reproduction; Variability from the indirect and
+direct action of the external conditions of life, and from use and
+disuse; a Ratio of Increase so high as to lead to a Struggle for Life,
+and as a consequence to Natural Selection, entailing Divergence of
+Character and the Extinction of less-improved forms. Thus, from the war
+of nature, from famine and death, the most exalted object which we are
+capable of conceiving, namely, the production of the higher animals,
+directly follows. There is grandeur in this view of life, with its
+several powers, having been originally breathed into a few forms or
+into one; and that, whilst this planet has gone cycling on according to
+the fixed law of gravity, from so simple a beginning endless forms most
+beautiful and most wonderful have been, and are being, evolved.
diff --git a/tex/context/sample/dawkins.tex b/tex/context/sample/dawkins.tex
index 3490b79b0..c0acd3157 100644
--- a/tex/context/sample/dawkins.tex
+++ b/tex/context/sample/dawkins.tex
@@ -8,13 +8,13 @@ individual out. \quotation {Let's first establish,} I said,
right half of the lecture hall.} I invited everybody to
stand up while my assistant tossed a coin. Everybody on the
left of the hall was asked to \quote {will} the coin to
-come down head. Everybody on the right had to will it to be
+come down heads. Everybody on the right had to will it to be
tails. Obviously one side had to lose, and they were asked
-to sit down. Then those who remained were divided into two,
+to sit down. Then those that remained were divided into two,
with half \quote {willing} heads and the other half tails.
Again the losers sat down. And so on by successive halvings
until, inevitably, after seven or eight tosses, one
individual was left standing. \quotation {A big round of
applause for our psychic.} He must be psychic, mustn't he,
because he successfully influenced the coin eight times in
-a row?
\ No newline at end of file
+a row?
diff --git a/tex/context/sample/douglas.tex b/tex/context/sample/douglas.tex
index 7d986d484..838c6d24d 100644
--- a/tex/context/sample/douglas.tex
+++ b/tex/context/sample/douglas.tex
@@ -1,18 +1,18 @@
Donald Knuth has spent the past several years working on a
system allowing him to control many aspects of the design
-of his forthcoming books, from the typesetting and layout
+of his forthcoming books|=|from the typesetting and layout
down to the very shapes of the letters! Seldom has an
author had anything remotely like this power to control the
final appearance of his or her work. Knuth's \TEX\
-typesetting system has become well|-|known and available in
+typesetting system has become well|-|known and as available in
many countries around the world. By contrast, his
\METAFONT\ system for designing families of typefaces has
-not become as well known or available.
+not become as well known or as available.
In his article \quotation {The Concept of a Meta|-|Font},
Knuth sets forth for the first time the underlying
philosophy of \METAFONT, as well as some of its products.
-Not only is the concept exiting and clearly well executed,
+Not only is the concept exciting and clearly well executed,
but in my opinion the article is charmingly written as well.
However, despite my overall enthusiasm for Knuth's idea and
article, there are some points in it that I feel might be
diff --git a/tex/context/sample/quevedo-es.tex b/tex/context/sample/quevedo-es.tex
new file mode 100644
index 000000000..166b0328f
--- /dev/null
+++ b/tex/context/sample/quevedo-es.tex
@@ -0,0 +1,19 @@
+\startlines
+Un soneto me manda hacer Violante
+que en mi vida me he visto en tanto aprieto;
+catorce versos dicen que es soneto;
+burla burlando van los tres delante.
+
+Yo pensé que no hallara consonante,
+y estoy a la mitad de otro cuarteto;
+mas si me veo en el primer terceto,
+no hay cosa en los cuartetos que me espante.
+
+Por el primer terceto voy entrando,
+y parece que entré con pie derecho,
+pues fin con este verso le voy dando.
+
+Ya estoy en el segundo, y aun sospecho
+que voy los trece versos acabando;
+contad si son catorce, y está hecho.
+\stoplines
diff --git a/tex/context/sample/sample.tex b/tex/context/sample/sample.tex
index 5f97ea009..ef31153a1 100644
--- a/tex/context/sample/sample.tex
+++ b/tex/context/sample/sample.tex
@@ -1,75 +1,21 @@
-\starttext
-
-The sample directory contains a few files with quotes that can be used
-while testing styles.
-
-I'll complete this file when I've reorganized my books and audio cd's.
-
-If someone makes a nice bibtex file of these, the quotes can also be
-used in testing bibliographic references and citations.
-
-\starttabulate[|l|l|p|]
-\NC \bf file \NC \bf author \NC \bf source \NC \NR
-\HL
-%NC stork.tex \NC David F. Stork \NC \NC \NR
-\NC knuth.tex \NC Donald E. Knuth \NC \NC \NR
-\NC tufte.tex \NC Edward R. Tufte \NC \NC \NR
-\NC reich.tex \NC Steve Reich \NC \NC \NR
-\NC materie.tex \NC Louis Andriessen \NC De Materie \NC \NR
-\NC douglas.tex \NC Douglas R. Hofstadter \NC \NC \NR
-\NC dawkins.tex \NC Dawkins \NC \NC \NR
-\NC ward.tex \NC Peter D. Ward \NC The Life and Death of Planet Earth \NC \NR
-\NC zapf.tex \NC Hermann Zapf \NC About micro-typography and the hz-program, \endgraf
- Electronic Publishing, vol. 6(3), \endgraf
- 283-288 (September 1993) \NC \NR
-\NC bryson.tex \NC Bill Bryson \NC A Short History of Nearly Everything, \endgraf
- Random House, 2003 \NC \NR
-\NC davis.tex \NC Kenneth C. Davis \NC Don't Know Much About History, \endgraf
- Everything You Need to Know About American
- History but Never Learned, \endgraf
- HarperCollins, 2003 \NC \NR
-\NC thuan.tex \NC Trinh Xuan Thuan \NC Chaos and Harmony, Perspectives on Scientific
- Revolutions of the Twentieth Century, \endgraf
- Oxford University Press, 2001 \NC \NR
-\NC hawking.tex \NC Steve W. Hawking \NC The Universe in a Nutshell, Bantam Books
- Random House, 2001 \NC \NR
-\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
- Destruction of Civilizations, \endgraf
- Simon \& Schuster, 2006, p.106 \NC \NR
-\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
- Thomas Dunne Books, 2007, p.160 \NC \NR
-\NC montgomery.tex \NC David R Montgomery \NC Dirt, The Erosion of Civilizations, \endgraf
- University of California Press, 2007, p.199 \NC \NR
-\NC carrol.tex \NC Sean B. Carrol \NC The Making of the Fittest, \endgraf
- Quercus, London, 2006 \NC \NR
-%NC schwarzenegger.tex \NC Arnold Schwarzenegger \NC Several place on the World Wide Web. \NC \NR
-\stoptabulate
+% this original file has been renamed to samples.tex and describes the
+% short quotes that can be used in test files
-% Tufte: This quote will always produce hyphenated text, apart from the content,
-% it's a pretty good test case for protruding.
-
-% Ward: I should find a quote in the extremely well written Rare Earth as well. All Wards
-% books excell.
-
-% A Short History of Nearly Everything: I wish that I had the memory to remember this book
-% verbatim.
-
-% Chaos and Harmony: very nice and well written book, but the typography is rather bad:
-% quite visible inter-character spacing in a text that can be typeset quite well by \TeX.
+\starttext
-% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
-% of Time).
+see \type {samples.tex}
-% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
-% the world around you differently (and a bit more freightened).
+\blank
-% Dirt, The Erosion of Civilizations: one of those books that you buy immediately after
-% reading a few sentences. Also one of those books that every politician should read.
+\startluacode
+ context("see \\type {samples.tex}")
+\stopluacode
-% The Making of the Fittest: nice sample for color ans subsentence testing. A very
-% readable book but unfortunately it has inter-character spacing.
+\blank
-% The Schwarzenegger letter was originally typeset at a width equivalent to 16.1cm in
-% a default ConTeXt setup.
+\startMPcode
+ draw textext("see \type {samples.tex}") ;
+ draw boundingbox currentpicture enlarged 2pt ;
+\stopMPcode
\stoptext
diff --git a/tex/context/sample/samples.bib b/tex/context/sample/samples.bib
new file mode 100644
index 000000000..e54092aae
--- /dev/null
+++ b/tex/context/sample/samples.bib
@@ -0,0 +1,29 @@
+% The following entries were provided by Rik Kabel:
+
+@book{dawkins2000unweaving,
+ title = {Unweaving the Rainbow: Science, Delusion and the Appetite for Wonder},
+ author = {Dawkins, R.},
+ isbn = {9780547347356},
+ url = {http://books.google.com/books?id=ZudTchiioUoC},
+ year = {2000},
+ publisher = {Houghton Mifflin Harcourt},
+ pages = {145--146},
+}
+
+@book{Hofstadter:1985:MTQ:537101,
+ author = {Hofstadter, Douglas R.},
+ title = {Metamagical Themas: Questing for the Essence of Mind and Pattern},
+ year = {1985},
+ isbn = {0465045405},
+ publisher = {Basic Books, Inc.},
+ address = {New York, NY, USA},
+}
+
+@book{Tufte:1990:EI:78223,
+ author = {Tufte, Edward},
+ title = {Envisioning Information},
+ year = {1990},
+ isbn = {0-9613921-1-8},
+ publisher = {Graphics Press},
+ address = {Cheshire, CT, USA},
+}
diff --git a/tex/context/sample/samples.tex b/tex/context/sample/samples.tex
new file mode 100644
index 000000000..6e217a592
--- /dev/null
+++ b/tex/context/sample/samples.tex
@@ -0,0 +1,78 @@
+\starttext
+
+The sample directory contains a few files with quotes that can be used
+while testing styles.
+
+I'll complete this file when I've reorganized my books and audio cd's.
+
+If someone makes a nice bibtex file of these, the quotes can also be
+used in testing bibliographic references and citations.
+
+\starttabulate[|l|l|p|]
+\NC \bf file \NC \bf author \NC \bf source \NC \NR
+\HL
+%NC stork.tex \NC David F. Stork \NC \NC \NR
+\NC knuth.tex \NC Donald E. Knuth \NC \NC \NR
+\NC tufte.tex \NC Edward R. Tufte \NC \NC \NR
+\NC reich.tex \NC Steve Reich \NC City Life (1995) \NC \NR
+\NC materie.tex \NC Louis Andriessen \NC De Materie \NC \NR
+\NC douglas.tex \NC Douglas R. Hofstadter \NC \NC \NR
+\NC dawkins.tex \NC Dawkins \NC \NC \NR
+\NC ward.tex \NC Peter D. Ward \NC The Life and Death of Planet Earth \NC \NR
+\NC zapf.tex \NC Hermann Zapf \NC About micro-typography and the hz-program, \endgraf
+ Electronic Publishing, vol. 6(3), \endgraf
+ 283-288 (September 1993) \NC \NR
+\NC bryson.tex \NC Bill Bryson \NC A Short History of Nearly Everything, \endgraf
+ Random House, 2003 \NC \NR
+\NC davis.tex \NC Kenneth C. Davis \NC Don't Know Much About History, \endgraf
+ Everything You Need to Know About American
+ History but Never Learned, \endgraf
+ HarperCollins, 2003 \NC \NR
+\NC thuan.tex \NC Trinh Xuan Thuan \NC Chaos and Harmony, Perspectives on Scientific
+ Revolutions of the Twentieth Century, \endgraf
+ Oxford University Press, 2001 \NC \NR
+\NC hawking.tex \NC Steve W. Hawking \NC The Universe in a Nutshell, Bantam Books
+ Random House, 2001 \NC \NR
+\NC linden.tex \NC Eugene Linden \NC The Winds of Change, Climate, Weather, and the
+ Destruction of Civilizations, \endgraf
+ Simon \& Schuster, 2006, p.106 \NC \NR
+\NC weisman.tex \NC Alan Weisman \NC The World Without Us, \endgraf
+ Thomas Dunne Books, 2007, p.160 \NC \NR
+\NC montgomery.tex \NC David R Montgomery \NC Dirt, The Erosion of Civilizations, \endgraf
+ University of California Press, 2007, p.199 \NC \NR
+\NC carrol.tex \NC Sean B. Carrol \NC The Making of the Fittest, \endgraf
+ Quercus, London, 2006 \NC \NR
+%NC jojomayer.tex \NC Jojo Mayer \NC Between Zero & One, www.youtube.com/watch?v=mSj298iBjBY \NC \NR
+%NC schwarzenegger.tex \NC Arnold Schwarzenegger \NC Several place on the World Wide Web. \NC \NR
+\stoptabulate
+
+% Tufte: This quote will always produce hyphenated text, apart from the content,
+% it's a pretty good test case for protruding.
+
+% Reich: This is a list of the voice samples from Steve Reich's 1995 composition City Life.
+
+% Ward: I should find a quote in the extremely well written Rare Earth as well. All Wards
+% books excell.
+
+% A Short History of Nearly Everything: I wish that I had the memory to remember this book
+% verbatim.
+
+% Chaos and Harmony: very nice and well written book, but the typography is rather bad:
+% quite visible inter-character spacing in a text that can be typeset quite well by \TeX.
+
+% The Universe in a Nutshell: a beautiful designed book, (companion of A Short History
+% of Time).
+
+% The World Without Us: A properly typeset, very readable book. Read it and you'll look at
+% the world around you differently (and a bit more freightened).
+
+% Dirt, The Erosion of Civilizations: one of those books that you buy immediately after
+% reading a few sentences. Also one of those books that every politician should read.
+
+% The Making of the Fittest: nice sample for color ans subsentence testing. A very
+% readable book but unfortunately it has inter-character spacing.
+
+% The Schwarzenegger letter was originally typeset at a width equivalent to 16.1cm in
+% a default ConTeXt setup.
+
+\stoptext
diff --git a/tex/context/test/pdf-a1b-2005.mkiv b/tex/context/test/pdf-a1b-2005.mkiv
index f980e3148..bc970c3f9 100644
--- a/tex/context/test/pdf-a1b-2005.mkiv
+++ b/tex/context/test/pdf-a1b-2005.mkiv
@@ -1,9 +1,9 @@
% PDF/A-1b:2005
-\enabletrackers[structure.tags,backend.tags]
+\enabletrackers[structure.tags,backend.tags,backend.xmp]
\setupbackend
- [format=PDF/A-1a:2005,
+ [format=PDF/A-1b:2005,
intent=sRGB IEC61966-2.1, % use entry here; otherwise problems with predefined default profile
profile=sRGB.icc, % use here
level=0]
@@ -20,8 +20,6 @@
Text is needed, otherwise tagging base entries are not applied.
-\stopchapter
-
\stoptextcolor
%\startTEXpage
diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua
index 9cf5b9317..c4d653604 100644
--- a/tex/generic/context/luatex/luatex-basics-gen.lua
+++ b/tex/generic/context/luatex/luatex-basics-gen.lua
@@ -15,8 +15,13 @@ local dummyfunction = function()
end
local dummyreporter = function(c)
- return function(...)
- (texio.reporter or texio.write_nl)(c .. " : " .. string.formatters(...))
+ return function(f,...)
+ local r = texio.reporter or texio.write_nl
+ if f then
+ r(c .. " : " .. string.formatters(f,...))
+ else
+ r("")
+ end
end
end
@@ -254,6 +259,18 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data = false
local luaname, lucname = makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile) == "function" then
+ -- in case we used luatex and luajittex mixed ... lub or luc file
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then -- maybe also check for size
texio.write(string.format("(load luc: %s)",lucname))
data = loadfile(lucname)
@@ -339,5 +356,23 @@ end
--
function table.setmetatableindex(t,f)
+ if type(t) ~= "table" then
+ f = f or t
+ t = { }
+ end
setmetatable(t,{ __index = f })
+ return t
+end
+
+-- helper for plain:
+
+arguments = { }
+
+if arg then
+ for i=1,#arg do
+ local k, v = string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k] = v
+ end
+ end
end
diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua
index 50af40193..1ec2895ba 100644
--- a/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -45,7 +45,7 @@ attributes.private = attributes.private or function(name)
return number
end
--- Nodes:
+-- Nodes (a subset of context so that we don't get too much unused code):
nodes = { }
nodes.pool = { }
@@ -54,7 +54,7 @@ nodes.handlers = { }
local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
-local disccodes = { [0] = "discretionary","explicit", "automatic", "regular", "first", "second" }
+local disccodes = { [0] = "discretionary", "explicit", "automatic", "regular", "first", "second" }
nodes.nodecodes = nodecodes
nodes.whatcodes = whatcodes
@@ -67,11 +67,20 @@ local remove_node = node.remove
local new_node = node.new
local traverse_id = node.traverse_id
-local math_code = nodecodes.math
-
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+local math_code = nodecodes.math
+local end_of_math = node.end_of_math
+
+function node.end_of_math(n)
+ if n.id == math_code and n.subtype == 1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
+
function nodes.remove(head, current, free_too)
local t = current
head, current = remove_node(head,current)
@@ -96,10 +105,8 @@ function nodes.pool.kern(k)
return n
end
--- experimental
-
-local getfield = node.getfield or function(n,tag) return n[tag] end
-local setfield = node.setfield or function(n,tag,value) n[tag] = value end
+local getfield = node.getfield
+local setfield = node.setfield
nodes.getfield = getfield
nodes.setfield = setfield
@@ -107,17 +114,6 @@ nodes.setfield = setfield
nodes.getattr = getfield
nodes.setattr = setfield
-if node.getid then nodes.getid = node.getid else function nodes.getid (n) return getfield(n,"id") end end
-if node.getsubtype then nodes.getsubtype = node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
-if node.getnext then nodes.getnext = node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
-if node.getprev then nodes.getprev = node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
-if node.getchar then nodes.getchar = node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
-if node.getfont then nodes.getfont = node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
-if node.getlist then nodes.getlist = node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
-
-function nodes.tonut (n) return n end
-function nodes.tonode(n) return n end
-
-- being lazy ... just copy a bunch ... not all needed in generic but we assume
-- nodes to be kind of private anyway
@@ -158,12 +154,95 @@ nodes.unset_attribute = node.unset_attribute
nodes.protect_glyphs = node.protect_glyphs
nodes.unprotect_glyphs = node.unprotect_glyphs
-nodes.kerning = node.kerning
-nodes.ligaturing = node.ligaturing
+-----.kerning = node.kerning
+-----.ligaturing = node.ligaturing
nodes.mlist_to_hlist = node.mlist_to_hlist
-- in generic code, at least for some time, we stay nodes, while in context
-- we can go nuts (e.g. experimental); this split permits us us keep code
-- used elsewhere stable but at the same time play around in context
-nodes.nuts = nodes
+local direct = node.direct
+local nuts = { }
+nodes.nuts = nuts
+
+local tonode = direct.tonode
+local tonut = direct.todirect
+
+nodes.tonode = tonode
+nodes.tonut = tonut
+
+nuts.tonode = tonode
+nuts.tonut = tonut
+
+
+local getfield = direct.getfield
+local setfield = direct.setfield
+
+nuts.getfield = getfield
+nuts.setfield = setfield
+nuts.getnext = direct.getnext
+nuts.getprev = direct.getprev
+nuts.getid = direct.getid
+nuts.getattr = getfield
+nuts.setattr = setfield
+nuts.getfont = direct.getfont
+nuts.getsubtype = direct.getsubtype
+nuts.getchar = direct.getchar
+
+nuts.insert_before = direct.insert_before
+nuts.insert_after = direct.insert_after
+nuts.delete = direct.delete
+nuts.copy = direct.copy
+nuts.copy_list = direct.copy_list
+nuts.tail = direct.tail
+nuts.flush_list = direct.flush_list
+nuts.free = direct.free
+nuts.remove = direct.remove
+nuts.is_node = direct.is_node
+nuts.end_of_math = direct.end_of_math
+nuts.traverse = direct.traverse
+nuts.traverse_id = direct.traverse_id
+
+nuts.getprop = nuts.getattr
+nuts.setprop = nuts.setattr
+
+local new_nut = direct.new
+nuts.new = new_nut
+nuts.pool = { }
+
+function nuts.pool.kern(k)
+ local n = new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+
+-- properties as used in the (new) injector:
+
+local propertydata = direct.get_properties_table()
+nodes.properties = { data = propertydata }
+
+direct.set_properties_mode(true,true) -- needed for injection
+
+function direct.set_properties_mode() end -- we really need the set modes
+
+nuts.getprop = function(n,k)
+ local p = propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+
+nuts.setprop = function(n,k,v)
+ if v then
+ local p = propertydata[n]
+ if p then
+ p[k] = v
+ else
+ propertydata[n] = { [k] = v }
+ end
+ end
+end
+
+nodes.setprop = nodes.setproperty
+nodes.getprop = nodes.getproperty
diff --git a/tex/generic/context/luatex/luatex-fonts-cbk.lua b/tex/generic/context/luatex/luatex-fonts-cbk.lua
index 9db94f65e..ce19c8811 100644
--- a/tex/generic/context/luatex/luatex-fonts-cbk.lua
+++ b/tex/generic/context/luatex/luatex-fonts-cbk.lua
@@ -18,14 +18,51 @@ local nodes = nodes
local traverse_id = node.traverse_id
local glyph_code = nodes.nodecodes.glyph
+local disc_code = nodes.nodecodes.disc
-function nodes.handlers.characters(head)
+-- from now on we apply ligaturing and kerning here because it might interfere with complex
+-- opentype discretionary handling where the base ligature pass expect some weird extra
+-- pointers (which then confuse the tail slider that has some checking built in)
+
+local ligaturing = node.ligaturing
+local kerning = node.kerning
+
+local basepass = true
+
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning = nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning = nil end
+
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+
+function nodes.handlers.setbasepass(v)
+ basepass = v
+end
+
+function nodes.handlers.nodepass(head)
local fontdata = fonts.hashes.identifiers
if fontdata then
- local usedfonts, done, prevfont = { }, false, nil
+ local usedfonts = { }
+ local basefonts = { }
+ local prevfont = nil
+ local basefont = nil
for n in traverse_id(glyph_code,head) do
local font = n.font
if font ~= prevfont then
+ if basefont then
+ basefont[2] = n.prev
+ end
prevfont = font
local used = usedfonts[font]
if not used then
@@ -36,18 +73,57 @@ function nodes.handlers.characters(head)
local processors = shared.processes
if processors and #processors > 0 then
usedfonts[font] = processors
- done = true
+ elseif basepass then
+ basefont = { n, nil }
+ basefonts[#basefonts+1] = basefont
+ end
+ end
+ end
+ end
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r = d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ end
+ end
end
end
end
end
end
end
- if done then
+ if next(usedfonts) then
for font, processors in next, usedfonts do
for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
+ head = processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts > 0 then
+ for i=1,#basefonts do
+ local range = basefonts[i]
+ local start = range[1]
+ local stop = range[2]
+ if stop then
+ start, stop = ligaturing(start,stop)
+ start, stop = kerning(start,stop)
+ elseif start then
+ start = ligaturing(start)
+ start = kerning(start)
end
end
end
@@ -57,12 +133,27 @@ function nodes.handlers.characters(head)
end
end
+function nodes.handlers.basepass(head)
+ if not basepass then
+ head = ligaturing(head)
+ head = kerning(head)
+ end
+ return head, true
+end
+
+local nodepass = nodes.handlers.nodepass
+local basepass = nodes.handlers.basepass
+local injectpass = nodes.injections.handler
+local protectpass = nodes.handlers.protectglyphs
+
function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
+ if head then
+ head = nodepass(head)
+ head = injectpass(head)
+ head = basepass(head)
+ protectpass(head)
+ return head, true
+ else
+ return head, false
+ end
end
diff --git a/tex/generic/context/luatex/luatex-fonts-enc.lua b/tex/generic/context/luatex/luatex-fonts-enc.lua
index e20c3a03b..2e1c6a466 100644
--- a/tex/generic/context/luatex/luatex-fonts-enc.lua
+++ b/tex/generic/context/luatex/luatex-fonts-enc.lua
@@ -11,9 +11,10 @@ if context then
os.exit()
end
-local fonts = fonts
-fonts.encodings = { }
-fonts.encodings.agl = { }
+local fonts = fonts
+fonts.encodings = { }
+fonts.encodings.agl = { }
+fonts.encodings.known = { }
setmetatable(fonts.encodings.agl, { __index = function(t,k)
if k == "unicodes" then
diff --git a/tex/generic/context/luatex/luatex-fonts-inj.lua b/tex/generic/context/luatex/luatex-fonts-inj.lua
new file mode 100644
index 000000000..332e92033
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-inj.lua
@@ -0,0 +1,1055 @@
+if not modules then modules = { } end modules ['font-inj'] = {
+ version = 1.001,
+ comment = "companion to font-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This property based variant is not faster but looks nicer than the attribute one. We
+-- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
+-- get/set that one for our purpose!
+
+if not nodes.properties then return end
+
+local next, rawget = next, rawget
+local utfchar = utf.char
+local fastcopy = table.fastcopy
+
+local trace_injections = false trackers.register("fonts.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("fonts","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+
+local nuts = nodes.nuts
+local nodepool = nuts.pool
+
+local newkern = nodepool.kern
+
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local find_tail = nuts.tail
+
+local properties = nodes.properties.data
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local nofregisteredkerns = 0
+local nofregisteredpairs = 0
+local nofregisteredmarks = 0
+local nofregisteredcursives = 0
+----- markanchors = { } -- one base can have more marks
+local keepregisteredcounts = false
+
+function injections.keepcounts()
+ keepregisteredcounts = true
+end
+
+function injections.resetcounts()
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ keepregisteredcounts = false
+end
+
+-- We need to make sure that a possible metatable will not kick in
+-- unexpectedly.
+
+function injections.reset(n)
+ local p = rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections = nil
+ end
+end
+
+function injections.copy(target,source)
+ local sp = rawget(properties,source)
+ if sp then
+ local tp = rawget(properties,target)
+ local si = rawget(sp,"injections")
+ if si then
+ si = fastcopy(si)
+ if tp then
+ tp.injections = si
+ else
+ propertydata[target] = {
+ injections = si,
+ }
+ end
+ else
+ if tp then
+ tp.injections = nil
+ end
+ end
+ end
+end
+
+function injections.setligaindex(n,index)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.ligaindex = index
+ else
+ p.injections = {
+ ligaindex = index
+ }
+ end
+ else
+ properties[n] = {
+ injections = {
+ ligaindex = index
+ }
+ }
+ end
+end
+
+function injections.getligaindex(n,default)
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
+ local dx = factor*(exit[1]-entry[1])
+ local dy = -factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ nofregisteredcursives = nofregisteredcursives + 1
+ if rlmode < 0 then
+ dx = -(dx + wn)
+ else
+ dx = dx - ws
+ end
+ --
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursiveanchor = true
+ else
+ p.injections = {
+ cursiveanchor = true,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ cursiveanchor = true,
+ },
+ }
+ end
+ local p = rawget(properties,nxt)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.cursivex = dx
+ i.cursivey = dy
+ else
+ p.injections = {
+ cursivex = dx,
+ cursivey = dy,
+ }
+ end
+ else
+ properties[nxt] = {
+ injections = {
+ cursivex = dx,
+ cursivey = dy,
+ },
+ }
+ end
+ return dx, dy, nofregisteredcursives
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then -- okay?
+ local yoffset = y - h
+ local leftkern = x -- both kerns are set in a pair kern compared
+ local rightkern = w - x -- to normal kerns where we set only leftkern
+ if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
+ nofregisteredpairs = nofregisteredpairs + 1
+ if rlmode and rlmode < 0 then
+ leftkern, rightkern = rightkern, leftkern
+ end
+ local p = rawget(properties,current)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ if leftkern ~= 0 then
+ i.leftkern = (i.leftkern or 0) + leftkern
+ end
+ if rightkern ~= 0 then
+ i.rightkern = (i.rightkern or 0) + rightkern
+ end
+ if yoffset ~= 0 then
+ i.yoffset = (i.yoffset or 0) + yoffset
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ p.injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ }
+ else
+ p.injections = {
+ yoffset = yoffset,
+ }
+ end
+ elseif leftkern ~= 0 or rightkern ~= 0 then
+ properties[current] = {
+ injections = {
+ leftkern = leftkern,
+ rightkern = rightkern,
+ yoffset = yoffset,
+ },
+ }
+ else
+ properties[current] = {
+ injections = {
+ yoffset = yoffset,
+ },
+ }
+ end
+ return x, y, w, h, nofregisteredpairs
+ end
+ end
+ return x, y, w, h -- no bound
+end
+
+-- this needs checking for rl < 0 but it is unlikely that a r2l script
+-- uses kernclasses between glyphs so we're probably safe (KE has a
+-- problematic font where marks interfere with rl < 0 in the previous
+-- case)
+
+function injections.setkern(current,factor,rlmode,x,injection)
+ local dx = factor * x
+ if dx ~= 0 then
+ nofregisteredkerns = nofregisteredkerns + 1
+ local p = rawget(properties,current)
+ if not injection then
+ injection = "injections"
+ end
+ if p then
+ local i = rawget(p,injection)
+ if i then
+ i.leftkern = dx + (i.leftkern or 0)
+ else
+ p[injection] = {
+ leftkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ leftkern = dx,
+ },
+ }
+ end
+ return dx, nofregisteredkerns
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
+ nofregisteredmarks = nofregisteredmarks + 1
+ -- markanchors[nofregisteredmarks] = base
+ if rlmode >= 0 then
+ dx = tfmbase.width - dx -- see later commented ox
+ end
+ local p = rawget(properties,start)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ i.markx = dx
+ i.marky = dy
+ i.markdir = rlmode or 0
+ i.markbase = nofregisteredmarks
+ i.markbasenode = base
+ else
+ p.injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ }
+ end
+ else
+ properties[start] = {
+ injections = {
+ markx = dx,
+ marky = dy,
+ markdir = rlmode or 0,
+ markbase = nofregisteredmarks,
+ markbasenode = base,
+ },
+ }
+ end
+ return dx, dy, nofregisteredmarks
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function showchar(n,nested)
+ local char = getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+
+local function show(n,what,nested,symbol)
+ if n then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,what)
+ if i then
+ local leftkern = i.leftkern or 0
+ local rightkern = i.rightkern or 0
+ local yoffset = i.yoffset or 0
+ local markx = i.markx or 0
+ local marky = i.marky or 0
+ local markdir = i.markdir or 0
+ local markbase = i.markbase or 0 -- will be markbasenode
+ local cursivex = i.cursivex or 0
+ local cursivey = i.cursivey or 0
+ local ligaindex = i.ligaindex or 0
+ local margin = nested and 4 or 2
+ --
+ if rightkern ~= 0 or yoffset ~= 0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern ~= 0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx ~= 0 or marky ~= 0 or markbase ~= 0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase ~= 0 and "yes" or "no")
+ end
+ if cursivex ~= 0 or cursivey ~= 0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex ~= 0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n = head
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id == disc_code then
+ local pre = getfield(n,"pre")
+ local post = getfield(n,"post")
+ local replace = getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
+ end
+ if post then
+ showsub(post,"postinjections","post")
+ end
+ if replace then
+ showsub(replace,"replaceinjections","replace")
+ end
+ end
+ n = getnext(n)
+ end
+ report_injections("end run")
+end
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",
+ getchar(current),getfield(current,"width"),getfield(current,"xoffset"),getfield(current,"yoffset"))
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",getfield(current,"kern"))
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = getnext(current)
+ end
+end
+
+-- we could also check for marks here but maybe not all are registered (needs checking)
+
+local function collect_glyphs_1(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ -- yoffsets can influence curs steps
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function collect_glyphs_2(head)
+ local glyphs, nofglyphs = { }, 0
+ local marks, nofmarks = { }, 0
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n) < 256 then
+ local f = getfont(n)
+ if f ~= nf then
+ nf = f
+ tm = fontdata[nf].resources.marks -- other hash in ctx
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = n
+ else
+ nofglyphs = nofglyphs + 1
+ glyphs[nofglyphs] = n
+ end
+ end
+ end
+ return glyphs, nofglyphs, marks, nofmarks
+end
+
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n = marks[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ if pn then
+ local p = pn.markbasenode
+ if p then
+ local px = getfield(p,"xoffset")
+ local ox = 0
+ local rightkern = nil
+ local pp = rawget(properties,p)
+ if pp then
+ pp = rawget(pp,"injections")
+ if pp then
+ rightkern = pp.rightkern
+ end
+ end
+ if rightkern then -- x and w ~= 0
+ if pn.markdir < 0 then
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ ox = px - pn.markx - rightkern
+ -- report_injections("r2l case 1: %p",ox)
+ else
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ -- ox = px - getfield(p,"width") + pn.markx - pp.leftkern
+ local leftkern = pp.leftkern
+ if leftkern then
+ ox = px - pn.markx
+ else
+ ox = px - pn.markx - leftkern
+ end
+-- report_injections("l2r case 1: %p",ox)
+ end
+ else
+ -- we need to deal with fonts that have marks with width
+ -- if pn.markdir < 0 then
+ -- ox = px - pn.markx
+ -- -- report_injections("r2l case 3: %p",ox)
+ -- else
+ -- -- ox = px - getfield(p,"width") + pn.markx
+ ox = px - pn.markx
+ -- report_injections("l2r case 3: %p",ox)
+ -- end
+ local wn = getfield(n,"width") -- in arial marks have widths
+ if wn ~= 0 then
+ -- bad: we should center
+ -- insert_node_before(head,n,newkern(-wn/2))
+ -- insert_node_after(head,n,newkern(-wn/2))
+ pn.leftkern = -wn/2
+ pn.rightkern = -wn/2
+ -- wx[n] = { 0, -wn/2, 0, -wn }
+ end
+ -- so far
+ end
+ setfield(n,"xoffset",ox)
+ --
+ local py = getfield(p,"yoffset")
+ local oy = 0
+ if marks[p] then
+ oy = py + pn.marky
+ else
+ oy = getfield(n,"yoffset") + py + pn.marky
+ end
+ setfield(n,"yoffset",oy)
+ else
+ -- normally this can't happen (only when in trace mode which is a special case anyway)
+ -- report_injections("missing mark anchor %i",pn.markbase or 0)
+ end
+ end
+ end
+ end
+end
+
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor, lastanchor = nil, nil
+ local minc, maxc, last = 0, 0, nil
+ for i=1,nofglyphs do
+ local n = glyphs[i]
+ local pn = rawget(properties,n)
+ if pn then
+ pn = rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex = pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex ~= 0 then
+ pn.leftkern = (pn.leftkern or 0) + cursivex
+ end
+ if lastanchor then
+ if maxc == 0 then
+ minc = lastanchor
+ end
+ maxc = lastanchor
+ properties[cursiveanchor].cursivedy = pn.cursivey
+ end
+ last = n
+ else
+ maxc = 0
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor = n
+ lastanchor = i
+ else
+ cursiveanchor = nil
+ lastanchor = nil
+ if maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ maxc = 0
+ end
+ end
+ elseif maxc > 0 then
+ local ny = getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset") + ny) -- ?
+ end
+ maxc = 0
+ cursiveanchor = nil
+ lastanchor = nil
+ end
+ -- if maxc > 0 and not cursiveanchor then
+ -- local ny = getfield(n,"yoffset")
+ -- for i=maxc,minc,-1 do
+ -- local ti = glyphs[i]
+ -- ny = ny + properties[ti].cursivedy
+ -- setfield(ti,"yoffset",ny) -- why not add ?
+ -- end
+ -- maxc = 0
+ -- end
+ end
+ if last and maxc > 0 then
+ local ny = getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti = glyphs[i]
+ ny = ny + properties[ti].cursivedy
+ setfield(ti,"yoffset",ny) -- why not add ?
+ end
+ end
+end
+
+local function inject_kerns(head,list,length)
+ -- todo: pre/post/replace
+ for i=1,length do
+ local n = list[i]
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern)) -- type 0/2
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern)) -- type 0/2
+ end
+ end
+ end
+ end
+end
+
+local function inject_everything(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs, nofglyphs, marks, nofmarks
+ if nofregisteredpairs > 0 then
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_1(head)
+ else
+ glyphs, nofglyphs, marks, nofmarks = collect_glyphs_2(head)
+ end
+ if nofglyphs > 0 then
+ if nofregisteredcursives > 0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks > 0 then -- and nofmarks > 0
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
+ end
+ if nofmarks > 0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ nofregisteredpairs = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_kerns_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ head = insert_node_before(head,n,newkern(leftkern))
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"preinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n) -- why can it be empty { }
+ if pn then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+local function inject_pairs_only(head,where)
+ head = tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ --
+ local n = head
+ local p = nil
+ while n do
+ local id = getid(n)
+ if id == glyph_code then
+ if getsubtype(n) < 256 then
+ local pn = rawget(properties,n)
+ if pn then
+ if p then
+ local d = getfield(p,"post")
+ if d then
+ local i = rawget(pn,"postinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ local d = getfield(p,"replace")
+ if d then
+ local i = rawget(pn,"replaceinjections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ local t = find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ else
+ local i = rawget(pn,"injections")
+ if i then
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ -- local rightkern = i.rightkern
+ -- if rightkern and rightkern ~= 0 then
+ -- insert_node_after(head,n,newkern(rightkern))
+ -- n = getnext(n) -- to be checked
+ -- end
+ end
+ end
+ else
+ -- this is the most common case
+ local i = rawget(pn,"injections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ end
+ else
+ break
+ end
+ p = nil
+ elseif id == disc_code then
+ local d = getfield(n,"pre")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"preinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"pre",h)
+ end
+ end
+ local d = getfield(n,"post")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"postinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"post",h)
+ end
+ end
+ local d = getfield(n,"replace")
+ if d then
+ local h = d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n) < 256 then
+ local p = rawget(properties,n)
+ if p then
+ local i = rawget(p,"replaceinjections")
+ if i then
+ local yoffset = i.yoffset
+ if yoffset and yoffset ~= 0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern = i.leftkern
+ if leftkern and leftkern ~= 0 then
+ h = insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern = i.rightkern
+ if rightkern and rightkern ~= 0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n = getnext(n) -- to be checked
+ end
+ end
+ end
+ else
+ break
+ end
+ end
+ if h ~= d then
+ setfield(n,"replace",h)
+ end
+ end
+ p = n
+ else
+ p = nil
+ end
+ n = getnext(n)
+ end
+ --
+ if keepregisteredcounts then
+ keepregisteredcounts = false
+ else
+ nofregisteredpairs = 0
+ nofregisteredkerns = 0
+ end
+ return tonode(head), true
+end
+
+function injections.handler(head,where) -- optimize for n=1 ?
+ if nofregisteredmarks > 0 or nofregisteredcursives > 0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs > 0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns > 0 then
+ return inject_kerns_only(head,where)
+ else
+ return head, false
+ end
+end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 24e49308c..81883b8b8 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 01/03/14 00:40:35
+-- merge date : 05/15/15 23:03:46
do -- begin closure to overcome local limits and interference
@@ -82,6 +82,16 @@ function optionalrequire(...)
return result
end
end
+if lua then
+ lua.mask=load([[τεχ = 1]]) and "utf" or "ascii"
+end
+local flush=io.flush
+if flush then
+ local execute=os.execute if execute then function os.execute(...) flush() return execute(...) end end
+ local exec=os.exec if exec then function os.exec (...) flush() return exec (...) end end
+ local spawn=os.spawn if spawn then function os.spawn (...) flush() return spawn (...) end end
+ local popen=io.popen if popen then function io.popen (...) flush() return popen (...) end end
+end
end -- closure
@@ -101,7 +111,9 @@ local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.forma
local floor=math.floor
local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+if setinspector then
+ setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+end
lpeg.patterns=lpeg.patterns or {}
local patterns=lpeg.patterns
local anything=P(1)
@@ -120,7 +132,7 @@ local uppercase=R("AZ")
local underscore=P("_")
local hexdigit=digit+lowercase+uppercase
local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
-local newline=crlf+S("\r\n")
+local newline=P("\r")*(P("\n")+P(true))+P("\n")
local escaped=P("\\")*anything
local squote=P("'")
local dquote=P('"')
@@ -142,8 +154,10 @@ patterns.utfbom_32_le=utfbom_32_le
patterns.utfbom_16_be=utfbom_16_be
patterns.utfbom_16_le=utfbom_16_le
patterns.utfbom_8=utfbom_8
-patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
-patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_16_be_nl=P("\000\r\000\n")+P("\000\r")+P("\000\n")
+patterns.utf_16_le_nl=P("\r\000\n\000")+P("\r\000")+P("\n\000")
+patterns.utf_32_be_nl=P("\000\000\000\r\000\000\000\n")+P("\000\000\000\r")+P("\000\000\000\n")
+patterns.utf_32_le_nl=P("\r\000\000\000\n\000\000\000")+P("\r\000\000\000")+P("\n\000\000\000")
patterns.utf8one=R("\000\127")
patterns.utf8two=R("\194\223")*utf8next
patterns.utf8three=R("\224\239")*utf8next*utf8next
@@ -170,10 +184,24 @@ patterns.spacer=spacer
patterns.whitespace=whitespace
patterns.nonspacer=nonspacer
patterns.nonwhitespace=nonwhitespace
-local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local fullstripper=whitespace^0*C((whitespace^0*nonwhitespace^1)^0)
local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+local b_collapser=Cs(whitespace^0/""*(nonwhitespace^1+whitespace^1/" ")^0)
+local e_collapser=Cs((whitespace^1*P(-1)/""+nonwhitespace^1+whitespace^1/" ")^0)
+local m_collapser=Cs((nonwhitespace^1+whitespace^1/" ")^0)
+local b_stripper=Cs(spacer^0/""*(nonspacer^1+spacer^1/" ")^0)
+local e_stripper=Cs((spacer^1*P(-1)/""+nonspacer^1+spacer^1/" ")^0)
+local m_stripper=Cs((nonspacer^1+spacer^1/" ")^0)
patterns.stripper=stripper
+patterns.fullstripper=fullstripper
patterns.collapser=collapser
+patterns.b_collapser=b_collapser
+patterns.m_collapser=m_collapser
+patterns.e_collapser=e_collapser
+patterns.b_stripper=b_stripper
+patterns.m_stripper=m_stripper
+patterns.e_stripper=e_stripper
patterns.lowercase=lowercase
patterns.uppercase=uppercase
patterns.letter=patterns.lowercase+patterns.uppercase
@@ -210,9 +238,12 @@ patterns.integer=sign^-1*digit^1
patterns.unsigned=digit^0*period*digit^1
patterns.float=sign^-1*patterns.unsigned
patterns.cunsigned=digit^0*comma*digit^1
+patterns.cpunsigned=digit^0*(period+comma)*digit^1
patterns.cfloat=sign^-1*patterns.cunsigned
+patterns.cpfloat=sign^-1*patterns.cpunsigned
patterns.number=patterns.float+patterns.integer
patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.cpnumber=patterns.cpfloat+patterns.integer
patterns.oct=zero*octdigit^1
patterns.octal=patterns.oct
patterns.HEX=zero*P("X")*(digit+uppercase)^1
@@ -395,7 +426,7 @@ function lpeg.replacer(one,two,makefunction,isutf)
return pattern
end
end
-function lpeg.finder(lst,makefunction)
+function lpeg.finder(lst,makefunction,isutf)
local pattern
if type(lst)=="table" then
pattern=P(false)
@@ -411,7 +442,11 @@ function lpeg.finder(lst,makefunction)
else
pattern=P(lst)
end
- pattern=(1-pattern)^0*pattern
+ if isutf then
+ pattern=((utf8char or 1)-pattern)^0*pattern
+ else
+ pattern=(1-pattern)^0*pattern
+ end
if makefunction then
return function(str)
return lpegmatch(pattern,str)
@@ -625,37 +660,139 @@ function lpeg.append(list,pp,delayed,checked)
end
return p
end
+local p_false=P(false)
+local p_true=P(true)
local function make(t)
- local p
+ local function making(t)
+ local p=p_false
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
+ else
+ p=p+P(k)*making(v)
+ end
+ end
+ end
+ if t[""] then
+ p=p+p_true
+ end
+ return p
+ end
+ local p=p_false
local keys=sortedkeys(t)
for i=1,#keys do
local k=keys[i]
- local v=t[k]
- if not p then
- if next(v) then
- p=P(k)*make(v)
+ if k~="" then
+ local v=t[k]
+ if v==true then
+ p=p+P(k)*p_true
+ elseif v==false then
else
- p=P(k)
+ p=p+P(k)*making(v)
end
- else
- if next(v) then
- p=p+P(k)*make(v)
+ end
+ end
+ return p
+end
+local function collapse(t,x)
+ if type(t)~="table" then
+ return t,x
+ else
+ local n=next(t)
+ if n==nil then
+ return t,x
+ elseif next(t,n)==nil then
+ local k=n
+ local v=t[k]
+ if type(v)=="table" then
+ return collapse(v,x..k)
else
- p=p+P(k)
+ return v,x..k
+ end
+ else
+ local tt={}
+ for k,v in next,t do
+ local vv,kk=collapse(v,k)
+ tt[kk]=vv
end
+ return tt,x
end
end
- return p
end
function lpeg.utfchartabletopattern(list)
local tree={}
- for i=1,#list do
- local t=tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c]={}
+ local n=#list
+ if n==0 then
+ for s in next,list do
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
+ end
+ end
+ else
+ for i=1,n do
+ local s=list[i]
+ local t=tree
+ local p,pk
+ for c in gmatch(s,".") do
+ if t==true then
+ t={ [c]=true,[""]=true }
+ p[pk]=t
+ p=t
+ t=false
+ elseif t==false then
+ t={ [c]=false }
+ p[pk]=t
+ p=t
+ t=false
+ else
+ local tc=t[c]
+ if not tc then
+ tc=false
+ t[c]=false
+ end
+ p=t
+ t=tc
+ end
+ pk=c
+ end
+ if t==false then
+ p[pk]=true
+ elseif t==true then
+ else
+ t[""]=true
end
- t=t[c]
end
end
return make(tree)
@@ -695,6 +832,65 @@ local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
local number=digit^1*(case_1+case_2)
local stripper=Cs((number+1)^0)
lpeg.patterns.stripzeros=stripper
+local byte_to_HEX={}
+local byte_to_hex={}
+local byte_to_dec={}
+local hex_to_byte={}
+for i=0,255 do
+ local H=format("%02X",i)
+ local h=format("%02x",i)
+ local d=format("%03i",i)
+ local c=char(i)
+ byte_to_HEX[c]=H
+ byte_to_hex[c]=h
+ byte_to_dec[c]=d
+ hex_to_byte[h]=c
+ hex_to_byte[H]=c
+end
+local hextobyte=P(2)/hex_to_byte
+local bytetoHEX=P(1)/byte_to_HEX
+local bytetohex=P(1)/byte_to_hex
+local bytetodec=P(1)/byte_to_dec
+local hextobytes=Cs(hextobyte^0)
+local bytestoHEX=Cs(bytetoHEX^0)
+local bytestohex=Cs(bytetohex^0)
+local bytestodec=Cs(bytetodec^0)
+patterns.hextobyte=hextobyte
+patterns.bytetoHEX=bytetoHEX
+patterns.bytetohex=bytetohex
+patterns.bytetodec=bytetodec
+patterns.hextobytes=hextobytes
+patterns.bytestoHEX=bytestoHEX
+patterns.bytestohex=bytestohex
+patterns.bytestodec=bytestodec
+function string.toHEX(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestoHEX,s)
+ end
+end
+function string.tohex(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestohex,s)
+ end
+end
+function string.todec(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(bytestodec,s)
+ end
+end
+function string.tobytes(s)
+ if not s or s=="" then
+ return s
+ else
+ return lpegmatch(hextobytes,s)
+ end
+end
end -- closure
@@ -748,11 +944,15 @@ function string.limit(str,n,sentinel)
end
end
local stripper=patterns.stripper
+local fullstripper=patterns.fullstripper
local collapser=patterns.collapser
local longtostring=patterns.longtostring
function string.strip(str)
return lpegmatch(stripper,str) or ""
end
+function string.fullstrip(str)
+ return lpegmatch(fullstripper,str) or ""
+end
function string.collapsespaces(str)
return lpegmatch(collapser,str) or ""
end
@@ -841,7 +1041,7 @@ end
function table.keys(t)
if t then
local keys,k={},0
- for key,_ in next,t do
+ for key in next,t do
k=k+1
keys[k]=key
end
@@ -851,32 +1051,52 @@ function table.keys(t)
end
end
local function compare(a,b)
- local ta,tb=type(a),type(b)
- if ta==tb then
- return a1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedindexonly(tab)
+ if tab then
+ local srt,s={},0
+ for key in next,tab do
+ if type(key)=="number" then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ if s>1 then
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
local function sortedhashkeys(tab,cmp)
if tab then
local srt,s={},0
- for key,_ in next,tab do
+ for key in next,tab do
if key then
s=s+1
srt[s]=key
end
end
- sort(srt,cmp)
+ if s>1 then
+ sort(srt,cmp)
+ end
return srt
else
return {}
@@ -904,13 +1160,15 @@ end
function table.allkeys(t)
local keys={}
for k,v in next,t do
- for k,v in next,v do
+ for k in next,v do
keys[k]=true
end
end
return sortedkeys(keys)
end
table.sortedkeys=sortedkeys
+table.sortedhashonly=sortedhashonly
+table.sortedindexonly=sortedindexonly
table.sortedhashkeys=sortedhashkeys
local function nothing() end
local function sortedhash(t,cmp)
@@ -921,19 +1179,21 @@ local function sortedhash(t,cmp)
else
s=sortedkeys(t)
end
- local n=0
local m=#s
- local function kv(s)
- if n0 then
+ local n=0
+ return function()
+ if n0 then
+ local nt=#t
+ if nt>0 then
local n=0
for _,v in next,t do
n=n+1
end
- if n==#t then
- local tt,nt={},0
- for i=1,#t do
+ if n==nt then
+ local tt={}
+ for i=1,nt do
local v=t[i]
local tv=type(v)
if tv=="number" then
- nt=nt+1
if hexify then
- tt[nt]=format("0x%04X",v)
+ tt[i]=format("0x%X",v)
else
- tt[nt]=tostring(v)
+ tt[i]=tostring(v)
end
elseif tv=="string" then
- nt=nt+1
- tt[nt]=format("%q",v)
+ tt[i]=format("%q",v)
elseif tv=="boolean" then
- nt=nt+1
- tt[nt]=v and "true" or "false"
+ tt[i]=v and "true" or "false"
else
- tt=nil
- break
+ return nil
end
end
return tt
@@ -1126,7 +1383,7 @@ local function do_serialize(root,name,depth,level,indexed)
local tn=type(name)
if tn=="number" then
if hexify then
- handle(format("%s[0x%04X]={",depth,name))
+ handle(format("%s[0x%X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
@@ -1143,7 +1400,7 @@ local function do_serialize(root,name,depth,level,indexed)
end
end
end
- if root and next(root) then
+ if root and next(root)~=nil then
local first,last=nil,0
if compact then
last=#root
@@ -1161,22 +1418,19 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k=sk[i]
local v=root[k]
- local tv,tk=type(v),type(k)
+ local tv=type(v)
+ local tk=type(k)
if compact and first and tk=="number" and k>=first and k<=last then
if tv=="number" then
if hexify then
- handle(format("%s 0x%04X,",depth,v))
+ handle(format("%s 0x%X,",depth,v))
else
handle(format("%s %s,",depth,v))
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
+ handle(format("%s %q,",depth,v))
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
handle(format("%s {},",depth))
elseif inline then
local st=simple_table(v)
@@ -1206,64 +1460,48 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="number" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ handle(format("%s [0x%X]=0x%X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
elseif tk=="boolean" then
if hexify then
- handle(format("%s [%s]=0x%04X,",depth,k and "true" or "false",v))
+ handle(format("%s [%s]=0x%X,",depth,k and "true" or "false",v))
else
handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
end
elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
+ handle(format("%s %s=0x%X,",depth,k,v))
else
handle(format("%s %s=%s,",depth,k,v))
end
else
if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
+ handle(format("%s [%q]=0x%X,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
end
elseif tv=="string" then
- if reduce and tonumber(v) then
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%s,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%X]=%q,",depth,k,v))
else
- handle(format("%s [%q]=%s,",depth,k,v))
+ handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
else
- if tk=="number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk=="boolean" then
- handle(format("%s [%s]=%q,",depth,k and "true" or "false",v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
+ handle(format("%s [%q]=%q,",depth,k,v))
end
elseif tv=="table" then
- if not next(v) then
+ if next(v)==nil then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
+ handle(format("%s [0x%X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
@@ -1279,7 +1517,7 @@ local function do_serialize(root,name,depth,level,indexed)
if st then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ handle(format("%s [0x%X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
@@ -1299,7 +1537,7 @@ local function do_serialize(root,name,depth,level,indexed)
elseif tv=="boolean" then
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v and "true" or "false"))
+ handle(format("%s [0x%X]=%s,",depth,k,v and "true" or "false"))
else
handle(format("%s [%s]=%s,",depth,k,v and "true" or "false"))
end
@@ -1315,7 +1553,7 @@ local function do_serialize(root,name,depth,level,indexed)
local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ handle(format("%s [0x%X]=load(%q),",depth,k,f))
else
handle(format("%s [%s]=load(%q),",depth,k,f))
end
@@ -1330,7 +1568,7 @@ local function do_serialize(root,name,depth,level,indexed)
else
if tk=="number" then
if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ handle(format("%s [0x%X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
@@ -1354,7 +1592,6 @@ local function serialize(_handle,root,name,specification)
noquotes=specification.noquotes
hexify=specification.hexify
handle=_handle or specification.handle or print
- reduce=specification.reduce or false
functions=specification.functions
compact=specification.compact
inline=specification.inline and compact
@@ -1371,7 +1608,6 @@ local function serialize(_handle,root,name,specification)
noquotes=false
hexify=false
handle=_handle or print
- reduce=false
compact=true
inline=true
functions=true
@@ -1384,7 +1620,7 @@ local function serialize(_handle,root,name,specification)
end
elseif tname=="number" then
if hexify then
- handle(format("[0x%04X]={",name))
+ handle(format("[0x%X]={",name))
else
handle("["..name.."]={")
end
@@ -1402,7 +1638,7 @@ local function serialize(_handle,root,name,specification)
local dummy=root._w_h_a_t_e_v_e_r_
root._w_h_a_t_e_v_e_r_=nil
end
- if next(root) then
+ if next(root)~=nil then
do_serialize(root,name,"",0)
end
end
@@ -1531,14 +1767,25 @@ local function identical(a,b)
end
table.identical=identical
table.are_equal=are_equal
-function table.compact(t)
- if t then
- for k,v in next,t do
- if not next(v) then
- t[k]=nil
+local function sparse(old,nest,keeptables)
+ local new={}
+ for k,v in next,old do
+ if not (v=="" or v==false) then
+ if nest and type(v)=="table" then
+ v=sparse(v,nest)
+ if keeptables or next(v)~=nil then
+ new[k]=v
+ end
+ else
+ new[k]=v
end
end
end
+ return new
+end
+table.sparse=sparse
+function table.compact(t)
+ return sparse(t,true,true)
end
function table.contains(t,v)
if t then
@@ -1636,15 +1883,17 @@ function table.print(t,...)
serialize(print,t,...)
end
end
-setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+if setinspector then
+ setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+end
function table.sub(t,i,j)
return { unpack(t,i,j) }
end
function table.is_empty(t)
- return not t or not next(t)
+ return not t or next(t)==nil
end
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and next(t,next(t))==nil
end
function table.loweredkeys(t)
local l={}
@@ -1689,6 +1938,44 @@ function table.values(t,s)
return {}
end
end
+function table.filtered(t,pattern,sort,cmp)
+ if t and type(pattern)=="string" then
+ if sort then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local m=#s
+ local function kv(s)
+ while n -0.0000000005 and a%s < 0.0000000005) and '0') or format((a%s %% 1 == 0) and '%%i' or '%%.9f',a%s))",n,n,n,n)
+ else
+ return format("format((a%s %% 1 == 0) and '%%i' or '%%%sf',a%s)",n,f,n)
+ end
+end
local format_g=function(f)
n=n+1
return format("format('%%%sg',a%s)",f,n)
@@ -2948,7 +3304,7 @@ local builder=Cs { "start",
(
P("%")/""*(
V("!")
-+V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("F")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
+V("c")+V("C")+V("S")
+V("Q")
+V("N")
@@ -2959,7 +3315,6 @@ local builder=Cs { "start",
+V("j")+V("J")
+V("m")+V("M")
+V("z")
-+V("*")
)+V("*")
)*(P(-1)+Carg(1))
)^0,
@@ -2968,6 +3323,7 @@ local builder=Cs { "start",
["i"]=(prefix_any*P("i"))/format_i,
["d"]=(prefix_any*P("d"))/format_d,
["f"]=(prefix_any*P("f"))/format_f,
+ ["F"]=(prefix_any*P("F"))/format_F,
["g"]=(prefix_any*P("g"))/format_g,
["G"]=(prefix_any*P("G"))/format_G,
["e"]=(prefix_any*P("e"))/format_e,
@@ -3002,11 +3358,12 @@ local builder=Cs { "start",
["a"]=(prefix_any*P("a"))/format_a,
["A"]=(prefix_any*P("A"))/format_A,
["*"]=Cs(((1-P("%"))^1+P("%%")/"%%")^1)/format_rest,
+ ["?"]=Cs(((1-P("%"))^1 )^1)/format_rest,
["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
}
local direct=Cs (
- P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
- )
+ P("%")*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*P(-1)/[[local format = string.format return function(str) return format("%0",str) end]]
+)
local function make(t,str)
local f
local p
@@ -3015,10 +3372,10 @@ local function make(t,str)
f=loadstripped(p)()
else
n=0
- p=lpegmatch(builder,str,1,"..",t._extensions_)
+ p=lpegmatch(builder,str,1,t._connector_,t._extensions_)
if n>0 then
p=format(template,preamble,t._preamble_,arguments[n],p)
- f=loadstripped(p)()
+ f=loadstripped(p,t._environment_)()
else
f=function() return str end
end
@@ -3030,10 +3387,22 @@ local function use(t,fmt,...)
return t[fmt](...)
end
strings.formatters={}
-function strings.formatters.new()
- local t={ _extensions_={},_preamble_="",_type_="formatter" }
- setmetatable(t,{ __index=make,__call=use })
- return t
+if _LUAVERSION<5.2 then
+ function strings.formatters.new(noconcat)
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_=preamble,_environment_={} }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
+else
+ function strings.formatters.new(noconcat)
+ local e={}
+ for k,v in next,environment do
+ e[k]=v
+ end
+ local t={ _type_="formatter",_connector_=noconcat and "," or "..",_extensions_={},_preamble_="",_environment_=e }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+ end
end
local formatters=strings.formatters.new()
string.formatters=formatters
@@ -3041,8 +3410,12 @@ string.formatter=function(str,...) return formatters[str](...) end
local function add(t,name,template,preamble)
if type(t)=="table" and t._type_=="formatter" then
t._extensions_[name]=template or "%s"
- if preamble then
+ if type(preamble)=="string" then
t._preamble_=preamble.."\n"..t._preamble_
+ elseif type(preamble)=="table" then
+ for k,v in next,preamble do
+ t._environment_[k]=v
+ end
end
end
end
@@ -3051,9 +3424,28 @@ patterns.xmlescape=Cs((P("<")/"<"+P(">")/">"+P("&")/"&"+P('"')/""
patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
patterns.luaescape=Cs(((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0)
patterns.luaquoted=Cs(Cc('"')*((1-S('"\n'))^1+P('"')/'\\"'+P('\n')/'\\n"')^0*Cc('"'))
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-add(formatters,"lua",[[lpegmatch(luaescape,%s)]],[[local luaescape = lpeg.patterns.luaescape]])
+if _LUAVERSION<5.2 then
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],"local luaescape = lpeg.patterns.luaescape")
+else
+ add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],{ xmlescape=lpeg.patterns.xmlescape })
+ add(formatters,"tex",[[lpegmatch(texescape,%s)]],{ texescape=lpeg.patterns.texescape })
+ add(formatters,"lua",[[lpegmatch(luaescape,%s)]],{ luaescape=lpeg.patterns.luaescape })
+end
+local dquote=patterns.dquote
+local equote=patterns.escaped+dquote/'\\"'+1
+local space=patterns.space
+local cquote=Cc('"')
+local pattern=Cs(dquote*(equote-P(-2))^0*dquote)
++Cs(cquote*(equote-space)^0*space*equote^0*cquote)
+function string.optionalquoted(str)
+ return lpegmatch(pattern,str) or str
+end
+local pattern=Cs((newline/os.newline+1)^0)
+function string.replacenewlines(str)
+ return lpegmatch(pattern,str)
+end
end -- closure
@@ -3073,8 +3465,13 @@ end
local dummyfunction=function()
end
local dummyreporter=function(c)
- return function(...)
- (texio.reporter or texio.write_nl)(c.." : "..string.formatters(...))
+ return function(f,...)
+ local r=texio.reporter or texio.write_nl
+ if f then
+ r(c.." : "..string.formatters(f,...))
+ else
+ r("")
+ end
end
end
statistics={
@@ -3250,6 +3647,17 @@ function caches.loaddata(paths,name)
for i=1,#paths do
local data=false
local luaname,lucname=makefullname(paths[i],name)
+ if lucname and not lfs.isfile(lucname) and type(caches.compile)=="function" then
+ texio.write(string.format("(compiling luc: %s)",lucname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ caches.compile(data,luaname,lucname)
+ return data
+ end
+ end
if lucname and lfs.isfile(lucname) then
texio.write(string.format("(load luc: %s)",lucname))
data=loadfile(lucname)
@@ -3303,7 +3711,21 @@ function caches.compile(data,luaname,lucname)
end
end
function table.setmetatableindex(t,f)
+ if type(t)~="table" then
+ f=f or t
+ t={}
+ end
setmetatable(t,{ __index=f })
+ return t
+end
+arguments={}
+if arg then
+ for i=1,#arg do
+ local k,v=string.match(arg[i],"^%-%-([^=]+)=?(.-)$")
+ if k and v then
+ arguments[k]=v
+ end
+ end
end
end -- closure
@@ -3473,9 +3895,17 @@ local free_node=node.free
local remove_node=node.remove
local new_node=node.new
local traverse_id=node.traverse_id
-local math_code=nodecodes.math
nodes.handlers.protectglyphs=node.protect_glyphs
nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+local math_code=nodecodes.math
+local end_of_math=node.end_of_math
+function node.end_of_math(n)
+ if n.id==math_code and n.subtype==1 then
+ return n
+ else
+ return end_of_math(n)
+ end
+end
function nodes.remove(head,current,free_too)
local t=current
head,current=remove_node(head,current)
@@ -3497,21 +3927,12 @@ function nodes.pool.kern(k)
n.kern=k
return n
end
-local getfield=node.getfield or function(n,tag) return n[tag] end
-local setfield=node.setfield or function(n,tag,value) n[tag]=value end
+local getfield=node.getfield
+local setfield=node.setfield
nodes.getfield=getfield
nodes.setfield=setfield
nodes.getattr=getfield
nodes.setattr=setfield
-if node.getid then nodes.getid=node.getid else function nodes.getid (n) return getfield(n,"id") end end
-if node.getsubtype then nodes.getsubtype=node.getsubtype else function nodes.getsubtype(n) return getfield(n,"subtype") end end
-if node.getnext then nodes.getnext=node.getnext else function nodes.getnext (n) return getfield(n,"next") end end
-if node.getprev then nodes.getprev=node.getprev else function nodes.getprev (n) return getfield(n,"prev") end end
-if node.getchar then nodes.getchar=node.getchar else function nodes.getchar (n) return getfield(n,"char") end end
-if node.getfont then nodes.getfont=node.getfont else function nodes.getfont (n) return getfield(n,"font") end end
-if node.getlist then nodes.getlist=node.getlist else function nodes.getlist (n) return getfield(n,"list") end end
-function nodes.tonut (n) return n end
-function nodes.tonode(n) return n end
nodes.tostring=node.tostring or tostring
nodes.copy=node.copy
nodes.copy_list=node.copy_list
@@ -3545,10 +3966,73 @@ nodes.set_attribute=node.set_attribute
nodes.unset_attribute=node.unset_attribute
nodes.protect_glyphs=node.protect_glyphs
nodes.unprotect_glyphs=node.unprotect_glyphs
-nodes.kerning=node.kerning
-nodes.ligaturing=node.ligaturing
nodes.mlist_to_hlist=node.mlist_to_hlist
-nodes.nuts=nodes
+local direct=node.direct
+local nuts={}
+nodes.nuts=nuts
+local tonode=direct.tonode
+local tonut=direct.todirect
+nodes.tonode=tonode
+nodes.tonut=tonut
+nuts.tonode=tonode
+nuts.tonut=tonut
+local getfield=direct.getfield
+local setfield=direct.setfield
+nuts.getfield=getfield
+nuts.setfield=setfield
+nuts.getnext=direct.getnext
+nuts.getprev=direct.getprev
+nuts.getid=direct.getid
+nuts.getattr=getfield
+nuts.setattr=setfield
+nuts.getfont=direct.getfont
+nuts.getsubtype=direct.getsubtype
+nuts.getchar=direct.getchar
+nuts.insert_before=direct.insert_before
+nuts.insert_after=direct.insert_after
+nuts.delete=direct.delete
+nuts.copy=direct.copy
+nuts.copy_list=direct.copy_list
+nuts.tail=direct.tail
+nuts.flush_list=direct.flush_list
+nuts.free=direct.free
+nuts.remove=direct.remove
+nuts.is_node=direct.is_node
+nuts.end_of_math=direct.end_of_math
+nuts.traverse=direct.traverse
+nuts.traverse_id=direct.traverse_id
+nuts.getprop=nuts.getattr
+nuts.setprop=nuts.setattr
+local new_nut=direct.new
+nuts.new=new_nut
+nuts.pool={}
+function nuts.pool.kern(k)
+ local n=new_nut("kern",1)
+ setfield(n,"kern",k)
+ return n
+end
+local propertydata=direct.get_properties_table()
+nodes.properties={ data=propertydata }
+direct.set_properties_mode(true,true)
+function direct.set_properties_mode() end
+nuts.getprop=function(n,k)
+ local p=propertydata[n]
+ if p then
+ return p[k]
+ end
+end
+nuts.setprop=function(n,k,v)
+ if v then
+ local p=propertydata[n]
+ if p then
+ p[k]=v
+ else
+ propertydata[n]={ [k]=v }
+ end
+ end
+end
+nodes.setprop=nodes.setproperty
+nodes.getprop=nodes.getproperty
end -- closure
@@ -3574,7 +4058,7 @@ fonts.analyzers={}
fonts.readers={}
fonts.definers={ methods={} }
fonts.loggers={ register=function() end }
-fontloader.totable=fontloader.to_table
+fontloader.totable=fontloader.to_table
end -- closure
@@ -3607,7 +4091,8 @@ constructors.autocleanup=true
constructors.namemode="fullpath"
constructors.version=1.01
constructors.cache=containers.define("fonts","constructors",constructors.version,false)
-constructors.privateoffset=0xF0000
+constructors.privateoffset=0xF0000
+constructors.cacheintex=true
constructors.keys={
properties={
encodingbytes="number",
@@ -3769,14 +4254,15 @@ constructors.sharefonts=false
constructors.nofsharedfonts=0
local sharednames={}
function constructors.trytosharefont(target,tfmdata)
- if constructors.sharefonts then
+ if constructors.sharefonts then
local characters=target.characters
local n=1
local t={ target.psname }
local u=sortedkeys(characters)
for i=1,#u do
+ local k=u[i]
n=n+1;t[n]=k
- n=n+1;t[n]=characters[u[i]].index or k
+ n=n+1;t[n]=characters[k].index or k
end
local h=md5.HEX(concat(t," "))
local s=sharednames[h]
@@ -3859,7 +4345,7 @@ function constructors.scale(tfmdata,specification)
targetparameters.textsize=textsize
targetparameters.forcedsize=forcedsize
targetparameters.extrafactor=extrafactor
- local tounicode=resources.tounicode
+ local tounicode=fonts.mappings.tounicode
local defaultwidth=resources.defaultwidth or 0
local defaultheight=resources.defaultheight or 0
local defaultdepth=resources.defaultdepth or 0
@@ -3885,6 +4371,7 @@ function constructors.scale(tfmdata,specification)
target.tounicode=1
target.cidinfo=properties.cidinfo
target.format=properties.format
+ target.cache=constructors.cacheintex and "yes" or "renew"
local fontname=properties.fontname or tfmdata.fontname
local fullname=properties.fullname or tfmdata.fullname
local filename=properties.filename or tfmdata.filename
@@ -3939,7 +4426,9 @@ function constructors.scale(tfmdata,specification)
local autoitalicamount=properties.autoitalicamount
local stackmath=not properties.nostackmath
local nonames=properties.noglyphnames
- local nodemode=properties.mode=="node"
+ local haskerns=properties.haskerns or properties.mode=="base"
+ local hasligatures=properties.hasligatures or properties.mode=="base"
+ local realdimensions=properties.realdimensions
if changed and not next(changed) then
changed=false
end
@@ -4002,38 +4491,44 @@ function constructors.scale(tfmdata,specification)
constructors.beforecopyingcharacters(target,tfmdata)
local sharedkerns={}
for unicode,character in next,characters do
- local chr,description,index,touni
+ local chr,description,index
if changed then
local c=changed[unicode]
if c then
description=descriptions[c] or descriptions[unicode] or character
character=characters[c] or character
index=description.index or c
- if tounicode then
- touni=tounicode[index]
- if not touni then
- local d=descriptions[unicode] or characters[unicode]
- local i=d.index or unicode
- touni=tounicode[i]
- end
- end
else
description=descriptions[unicode] or character
index=description.index or unicode
- if tounicode then
- touni=tounicode[index]
- end
end
else
description=descriptions[unicode] or character
index=description.index or unicode
- if tounicode then
- touni=tounicode[index]
- end
end
local width=description.width
local height=description.height
local depth=description.depth
+ if realdimensions then
+ if not height or height==0 then
+ local bb=description.boundingbox
+ local ht=bb[4]
+ if ht~=0 then
+ height=ht
+ end
+ if not depth or depth==0 then
+ local dp=-bb[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ elseif not depth or depth==0 then
+ local dp=-description.boundingbox[2]
+ if dp~=0 then
+ depth=dp
+ end
+ end
+ end
if width then width=hdelta*width else width=scaledwidth end
if height then height=vdelta*height else height=scaledheight end
if depth and depth~=0 then
@@ -4070,8 +4565,10 @@ function constructors.scale(tfmdata,specification)
}
end
end
- if touni then
- chr.tounicode=touni
+ local isunicode=description.unicode
+ if isunicode then
+ chr.unicode=isunicode
+ chr.tounicode=tounicode(isunicode)
end
if hasquality then
local ve=character.expansion_factor
@@ -4164,7 +4661,7 @@ function constructors.scale(tfmdata,specification)
end
end
end
- if not nodemode then
+ if haskerns then
local vk=character.kerns
if vk then
local s=sharedkerns[vk]
@@ -4175,6 +4672,8 @@ function constructors.scale(tfmdata,specification)
end
chr.kerns=s
end
+ end
+ if hasligatures then
local vl=character.ligatures
if vl then
if true then
@@ -4331,6 +4830,7 @@ function constructors.finalize(tfmdata)
tfmdata.extend=nil
tfmdata.slant=nil
tfmdata.units_per_em=nil
+ tfmdata.cache=nil
properties.finalized=true
return tfmdata
end
@@ -4675,6 +5175,16 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report)
end
end
end
+function constructors.addcoreunicodes(unicodes)
+ if not unicodes then
+ unicodes={}
+ end
+ unicodes.space=0x0020
+ unicodes.hyphen=0x002D
+ unicodes.zwj=0x200D
+ unicodes.zwnj=0x200C
+ return unicodes
+end
end -- closure
@@ -4694,6 +5204,7 @@ end
local fonts=fonts
fonts.encodings={}
fonts.encodings.agl={}
+fonts.encodings.known={}
setmetatable(fonts.encodings.agl,{ __index=function(t,k)
if k=="unicodes" then
texio.write(" ")
@@ -4765,7 +5276,7 @@ local function loadcidfile(filename)
ordering=ordering,
filename=filename,
unicodes=unicodes,
- names=names
+ names=names,
}
end
end
@@ -4802,12 +5313,25 @@ function cid.getmap(specification)
local ordering=specification.ordering
local supplement=specification.supplement
local filename=format(registry,ordering,supplement)
- local found=cidmap[lower(filename)]
+ local lowername=lower(filename)
+ local found=cidmap[lowername]
if found then
return found
end
- if trace_loading then
- report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ if ordering=="Identity" then
+ local found={
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes={},
+ names={},
+ }
+ cidmap[lowername]=found
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
end
found=locate(registry,ordering,supplement)
if not found then
@@ -4856,17 +5380,19 @@ if not modules then modules={} end modules ['font-map']={
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local tonumber=tonumber
+local tonumber,next,type=tonumber,next,type
local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
local utfbyte=utf.byte
local floor=math.floor
+local formatters=string.formatters
local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
local report_fonts=logs.reporter("fonts","loading")
local fonts=fonts or {}
local mappings=fonts.mappings or {}
fonts.mappings=mappings
+local allocate=utilities.storage.allocate
local function loadlumtable(filename)
local lumname=file.replacesuffix(file.basename(filename),"lum")
local lumfile=resolvers.findfile(lumname,"map") or ""
@@ -4900,11 +5426,13 @@ local function makenameparser(str)
return p
end
end
+local f_single=formatters["%04X"]
+local f_double=formatters["%04X%04X"]
local function tounicode16(unicode,name)
if unicode<0x10000 then
- return format("%04X",unicode)
+ return f_single(unicode)
elseif unicode<0x1FFFFFFFFF then
- return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
else
report_fonts("can't convert %a in %a into tounicode",unicode,name)
end
@@ -4912,17 +5440,43 @@ end
local function tounicode16sequence(unicodes,name)
local t={}
for l=1,#unicodes do
- local unicode=unicodes[l]
- if unicode<0x10000 then
- t[l]=format("%04X",unicode)
+ local u=unicodes[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
elseif unicode<0x1FFFFFFFFF then
- t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
else
- report_fonts ("can't convert %a in %a into tounicode",unicode,name)
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
end
end
return concat(t)
end
+local function tounicode(unicode,name)
+ if type(unicode)=="table" then
+ local t={}
+ for l=1,#unicode do
+ local u=unicode[l]
+ if u<0x10000 then
+ t[l]=f_single(u)
+ elseif u<0x1FFFFFFFFF then
+ t[l]=f_double(floor(u/1024),u%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",u,name)
+ return
+ end
+ end
+ return concat(t)
+ else
+ if unicode<0x10000 then
+ return f_single(unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return f_double(floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+end
local function fromunicode16(str)
if #str==4 then
return tonumber(str,16)
@@ -4933,17 +5487,41 @@ local function fromunicode16(str)
end
mappings.loadlumtable=loadlumtable
mappings.makenameparser=makenameparser
+mappings.tounicode=tounicode
mappings.tounicode16=tounicode16
mappings.tounicode16sequence=tounicode16sequence
mappings.fromunicode16=fromunicode16
local ligseparator=P("_")
local varseparator=P(".")
local namesplitter=Ct(C((1-ligseparator-varseparator)^1)*(ligseparator*C((1-ligseparator-varseparator)^1))^0)
+local overloads=allocate {
+ IJ={ name="I_J",unicode={ 0x49,0x4A },mess=0x0132 },
+ ij={ name="i_j",unicode={ 0x69,0x6A },mess=0x0133 },
+ ff={ name="f_f",unicode={ 0x66,0x66 },mess=0xFB00 },
+ fi={ name="f_i",unicode={ 0x66,0x69 },mess=0xFB01 },
+ fl={ name="f_l",unicode={ 0x66,0x6C },mess=0xFB02 },
+ ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 },mess=0xFB03 },
+ ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C },mess=0xFB04 },
+ fj={ name="f_j",unicode={ 0x66,0x6A } },
+ fk={ name="f_k",unicode={ 0x66,0x6B } },
+}
+for k,v in next,overloads do
+ local name=v.name
+ local mess=v.mess
+ if name then
+ overloads[name]=v
+ end
+ if mess then
+ overloads[mess]=v
+ end
+end
+mappings.overloads=overloads
function mappings.addtounicode(data,filename)
local resources=data.resources
local properties=data.properties
local descriptions=data.descriptions
local unicodes=resources.unicodes
+ local lookuptypes=resources.lookuptypes
if not unicodes then
return
end
@@ -4952,18 +5530,10 @@ function mappings.addtounicode(data,filename)
unicodes['zwj']=unicodes['zwj'] or 0x200D
unicodes['zwnj']=unicodes['zwnj'] or 0x200C
local private=fonts.constructors.privateoffset
- local unknown=format("%04X",utfbyte("?"))
- local unicodevector=fonts.encodings.agl.unicodes
- local tounicode={}
- local originals={}
- resources.tounicode=tounicode
- resources.originals=originals
+ local unicodevector=fonts.encodings.agl.unicodes
+ local missing={}
local lumunic,uparser,oparser
local cidinfo,cidnames,cidcodes,usedmap
- if false then
- lumunic=loadlumtable(filename)
- lumunic=lumunic and lumunic.tounicode
- end
cidinfo=properties.cidinfo
usedmap=cidinfo and fonts.cid.getmap(cidinfo)
if usedmap then
@@ -4976,11 +5546,13 @@ function mappings.addtounicode(data,filename)
for unic,glyph in next,descriptions do
local index=glyph.index
local name=glyph.name
- if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local r=overloads[name]
+ if r then
+ glyph.unicode=r.unicode
+ elseif unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
local unicode=lumunic and lumunic[name] or unicodevector[name]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
end
if (not unicode) and usedmap then
@@ -4988,8 +5560,7 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode=cidcodes[foundindex]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
else
local reference=cidnames[foundindex]
@@ -4998,21 +5569,18 @@ function mappings.addtounicode(data,filename)
if foundindex then
unicode=cidcodes[foundindex]
if unicode then
- originals[index]=unicode
- tounicode[index]=tounicode16(unicode,name)
+ glyph.unicode=unicode
ns=ns+1
end
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,reference)
if foundcodes then
- originals[index]=foundcodes
+ glyph.unicode=foundcodes
if multiple then
- tounicode[index]=tounicode16sequence(foundcodes)
nl=nl+1
unicode=true
else
- tounicode[index]=tounicode16(foundcodes,name)
ns=ns+1
unicode=foundcodes
end
@@ -5050,39 +5618,157 @@ function mappings.addtounicode(data,filename)
end
if n==0 then
elseif n==1 then
- originals[index]=t[1]
- tounicode[index]=tounicode16(t[1],name)
+ glyph.unicode=t[1]
else
- originals[index]=t
- tounicode[index]=tounicode16sequence(t)
+ glyph.unicode=t
end
nl=nl+1
end
if not unicode or unicode=="" then
local foundcodes,multiple=lpegmatch(uparser,name)
if foundcodes then
+ glyph.unicode=foundcodes
if multiple then
- originals[index]=foundcodes
- tounicode[index]=tounicode16sequence(foundcodes,name)
nl=nl+1
unicode=true
else
- originals[index]=foundcodes
- tounicode[index]=tounicode16(foundcodes,name)
ns=ns+1
unicode=foundcodes
end
end
end
+ local r=overloads[unicode]
+ if r then
+ unicode=r.unicode
+ glyph.unicode=unicode
+ end
+ if not unicode then
+ missing[name]=true
+ end
+ end
+ end
+ if next(missing) then
+ local guess={}
+ local function check(gname,code,unicode)
+ local description=descriptions[code]
+ local variant=description.name
+ if variant==gname then
+ return
+ end
+ local unic=unicodes[variant]
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ else
+ return
+ end
+ if descriptions[code].unicode then
+ return
+ end
+ local g=guess[variant]
+ if g then
+ g[gname]=unicode
+ else
+ guess[variant]={ [gname]=unicode }
+ end
+ end
+ for unicode,description in next,descriptions do
+ local slookups=description.slookups
+ if slookups then
+ local gname=description.name
+ for tag,data in next,slookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ elseif lookuptype=="substitution" then
+ check(gname,data,unicode)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local gname=description.name
+ for tag,list in next,mlookups do
+ local lookuptype=lookuptypes[tag]
+ if lookuptype=="alternate" then
+ for i=1,#list do
+ local data=list[i]
+ for i=1,#data do
+ check(gname,data[i],unicode)
+ end
+ end
+ elseif lookuptype=="substitution" then
+ for i=1,#list do
+ check(gname,list[i],unicode)
+ end
+ end
+ end
+ end
+ end
+ local done=true
+ while done do
+ done=false
+ for k,v in next,guess do
+ if type(v)~="number" then
+ for kk,vv in next,v do
+ if vv==-1 or vv>=private or (vv>=0xE000 and vv<=0xF8FF) or vv==0xFFFE or vv==0xFFFF then
+ local uu=guess[kk]
+ if type(uu)=="number" then
+ guess[k]=uu
+ done=true
+ end
+ else
+ guess[k]=vv
+ done=true
+ end
+ end
+ end
+ end
+ end
+ local orphans=0
+ local guessed=0
+ for k,v in next,guess do
+ if type(v)=="number" then
+ descriptions[unicodes[k]].unicode=descriptions[v].unicode or v
+ guessed=guessed+1
+ else
+ local t=nil
+ local l=lower(k)
+ local u=unicodes[l]
+ if not u then
+ orphans=orphans+1
+ elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then
+ local unicode=descriptions[u].unicode
+ if unicode then
+ descriptions[unicodes[k]].unicode=unicode
+ guessed=guessed+1
+ else
+ orphans=orphans+1
+ end
+ else
+ orphans=orphans+1
+ end
+ end
+ end
+ if trace_loading and orphans>0 or guessed>0 then
+ report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
end
end
if trace_mapping then
for unic,glyph in table.sortedhash(descriptions) do
local name=glyph.name
local index=glyph.index
- local toun=tounicode[index]
- if toun then
- report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ local unicode=glyph.unicode
+ if unicode then
+ if type(unicode)=="table" then
+ local unicodes={}
+ for i=1,#unicode do
+ unicodes[i]=formatters("%U",unicode[i])
+ end
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode)
+ end
else
report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
end
@@ -5224,14 +5910,15 @@ local function read_from_tfm(specification)
properties.fontname=tfmdata.fontname
properties.psname=tfmdata.psname
properties.filename=specification.filename
+ properties.format=fonts.formats.tfm
parameters.size=size
- shared.rawdata={}
- shared.features=features
- shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
tfmdata.properties=properties
tfmdata.resources=resources
tfmdata.parameters=parameters
tfmdata.shared=shared
+ shared.rawdata={}
+ shared.features=features
+ shared.processes=next(features) and tfm.setfeatures(tfmdata,features) or nil
parameters.slant=parameters.slant or parameters[1] or 0
parameters.space=parameters.space or parameters[2] or 0
parameters.space_stretch=parameters.space_stretch or parameters[3] or 0
@@ -5263,6 +5950,10 @@ local function read_from_tfm(specification)
features.encoding=encoding
end
end
+ properties.haskerns=true
+ properties.haslogatures=true
+ resources.unicodes={}
+ resources.lookuptags={}
return tfmdata
end
end
@@ -5318,15 +6009,20 @@ local trace_indexing=false trackers.register("afm.indexing",function(v) trace_in
local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end)
local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
local report_afm=logs.reporter("fonts","afm loading")
+local setmetatableindex=table.setmetatableindex
local findbinfile=resolvers.findbinfile
local definers=fonts.definers
local readers=fonts.readers
local constructors=fonts.constructors
+local fontloader=fontloader
+local font_to_table=fontloader.to_table
+local open_font=fontloader.open
+local close_font=fontloader.close
local afm=constructors.newhandler("afm")
local pfb=constructors.newhandler("pfb")
local afmfeatures=constructors.newfeatures("afm")
local registerafmfeature=afmfeatures.register
-afm.version=1.410
+afm.version=1.500
afm.cache=containers.define("fonts","afm",afm.version,true)
afm.autoprefixed=true
afm.helpdata={}
@@ -5334,6 +6030,7 @@ afm.syncspace=true
afm.addligatures=true
afm.addtexligatures=true
afm.addkerns=true
+local overloads=fonts.mappings.overloads
local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local function setmode(tfmdata,value)
if value then
@@ -5436,10 +6133,10 @@ local function get_variables(data,fontmetrics)
end
local function get_indexes(data,pfbname)
data.resources.filename=resolvers.unresolve(pfbname)
- local pfbblob=fontloader.open(pfbname)
+ local pfbblob=open_font(pfbname)
if pfbblob then
local characters=data.characters
- local pfbdata=fontloader.to_table(pfbblob)
+ local pfbdata=font_to_table(pfbblob)
if pfbdata then
local glyphs=pfbdata.glyphs
if glyphs then
@@ -5464,7 +6161,7 @@ local function get_indexes(data,pfbname)
elseif trace_loading then
report_afm("no data in pfb file %a",pfbname)
end
- fontloader.close(pfbblob)
+ close_font(pfbblob)
elseif trace_loading then
report_afm("invalid pfb file %a",pfbname)
end
@@ -5521,7 +6218,7 @@ local function readafm(filename)
return nil
end
end
-local addkerns,addligatures,addtexligatures,unify,normalize
+local addkerns,addligatures,addtexligatures,unify,normalize,fixnames
function afm.load(filename)
filename=resolvers.findfile(filename,'afm') or ""
if filename~="" and not fonts.names.ignoredfile(filename) then
@@ -5564,6 +6261,7 @@ function afm.load(filename)
addkerns(data)
end
normalize(data)
+ fixnames(data)
report_afm("add tounicode data")
fonts.mappings.addtounicode(data,filename)
data.size=size
@@ -5571,6 +6269,7 @@ function afm.load(filename)
data.pfbsize=pfbsize
data.pfbtime=pfbtime
report_afm("saving %a in cache",name)
+ data.resources.unicodes=nil
data=containers.write(afm.cache,name,data)
data=containers.read(afm.cache,name)
end
@@ -5620,7 +6319,6 @@ unify=function(data,filename)
if unicode then
krn[unicode]=kern
else
- print(unicode,name)
end
end
description.kerns=krn
@@ -5631,18 +6329,30 @@ unify=function(data,filename)
local filename=resources.filename or file.removesuffix(file.basename(filename))
resources.filename=resolvers.unresolve(filename)
resources.unicodes=unicodes
- resources.marks={}
- resources.names=names
+ resources.marks={}
resources.private=private
end
normalize=function(data)
end
+fixnames=function(data)
+ for k,v in next,data.descriptions do
+ local n=v.name
+ local r=overloads[n]
+ if r then
+ local name=r.name
+ if trace_indexing then
+ report_afm("renaming characters %a to %a",n,name)
+ end
+ v.name=name
+ v.unicode=r.unicode
+ end
+ end
+end
local addthem=function(rawdata,ligatures)
if ligatures then
local descriptions=rawdata.descriptions
local resources=rawdata.resources
local unicodes=resources.unicodes
- local names=resources.names
for ligname,ligdata in next,ligatures do
local one=descriptions[unicodes[ligname]]
if one then
@@ -5775,8 +6485,8 @@ local function copytotfm(data)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname or metadata.fullname
local fullname=metadata.fullname or metadata.fontname
- local endash=unicodes['space']
- local emdash=unicodes['emdash']
+ local endash=0x0020
+ local emdash=0x2014
local spacer="space"
local spaceunits=500
local monospaced=metadata.isfixedpitch
@@ -5830,7 +6540,7 @@ local function copytotfm(data)
if charxheight then
parameters.x_height=charxheight
else
- local x=unicodes['x']
+ local x=0x0078
if x then
local x=descriptions[x]
if x then
@@ -5877,7 +6587,34 @@ function afm.setfeatures(tfmdata,features)
return {}
end
end
-local function checkfeatures(specification)
+local function addtables(data)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
end
local function afmtotfm(specification)
local afmname=specification.filename or specification.name
@@ -5904,6 +6641,7 @@ local function afmtotfm(specification)
if not tfmdata then
local rawdata=afm.load(afmname)
if rawdata and next(rawdata) then
+ addtables(rawdata)
adddimensions(rawdata)
tfmdata=copytotfm(rawdata)
if tfmdata and next(tfmdata) then
@@ -5938,6 +6676,7 @@ end
local function prepareligatures(tfmdata,ligatures,value)
if value then
local descriptions=tfmdata.descriptions
+ local hasligatures=false
for unicode,character in next,tfmdata.characters do
local description=descriptions[unicode]
local dligatures=description.ligatures
@@ -5953,8 +6692,10 @@ local function prepareligatures(tfmdata,ligatures,value)
type=0
}
end
+ hasligatures=true
end
end
+ tfmdata.properties.hasligatures=hasligatures
end
end
local function preparekerns(tfmdata,kerns,value)
@@ -5963,6 +6704,7 @@ local function preparekerns(tfmdata,kerns,value)
local resources=rawdata.resources
local unicodes=resources.unicodes
local descriptions=tfmdata.descriptions
+ local haskerns=false
for u,chr in next,tfmdata.characters do
local d=descriptions[u]
local newkerns=d[kerns]
@@ -5978,8 +6720,10 @@ local function preparekerns(tfmdata,kerns,value)
kerns[uk]=v
end
end
+ haskerns=true
end
end
+ tfmdata.properties.haskerns=haskerns
end
end
local list={
@@ -6405,10 +7149,12 @@ local type,next,tonumber,tostring=type,next,tonumber,tostring
local abs=math.abs
local insert=table.insert
local lpegmatch=lpeg.match
-local reversed,concat,remove=table.reversed,table.concat,table.remove
+local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove,table.sortedkeys
local ioflush=io.flush
local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
local formatters=string.formatters
+local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match
+local setmetatableindex=table.setmetatableindex
local allocate=utilities.storage.allocate
local registertracker=trackers.register
local registerdirective=directives.register
@@ -6423,26 +7169,28 @@ local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dyna
local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end)
+local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end)
local report_otf=logs.reporter("fonts","otf loading")
local fonts=fonts
local otf=fonts.handlers.otf
otf.glists={ "gsub","gpos" }
-otf.version=2.749
+otf.version=2.803
otf.cache=containers.define("fonts","otf",otf.version,true)
-local fontdata=fonts.hashes.identifiers
+local hashes=fonts.hashes
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local fontdata=hashes and hashes.identifiers
local chardata=characters and characters.data
-local otffeatures=fonts.constructors.newfeatures("otf")
+local otffeatures=constructors.newfeatures("otf")
local registerotffeature=otffeatures.register
local enhancers=allocate()
otf.enhancers=enhancers
local patches={}
enhancers.patches=patches
-local definers=fonts.definers
-local readers=fonts.readers
-local constructors=fonts.constructors
local forceload=false
local cleanup=0
-local usemetatables=false
local packdata=true
local syncspace=true
local forcenotdef=false
@@ -6451,7 +7199,11 @@ local overloadkerns=false
local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes
local wildcard="*"
local default="dflt"
-local fontloaderfields=fontloader.fields
+local fontloader=fontloader
+local open_font=fontloader.open
+local close_font=fontloader.close
+local font_fields=fontloader.fields
+local apply_featurefile=fontloader.apply_featurefile
local mainfields=nil
local glyphfields=nil
local formats=fonts.formats
@@ -6461,7 +7213,6 @@ formats.ttc="truetype"
formats.dfont="truetype"
registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
-registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
@@ -6493,7 +7244,7 @@ local function load_featurefile(raw,featurefile)
if trace_loading then
report_otf("using featurefile %a",featurefile)
end
- fontloader.apply_featurefile(raw,featurefile)
+ apply_featurefile(raw,featurefile)
end
end
local function showfeatureorder(rawdata,filename)
@@ -6553,7 +7304,6 @@ local valid_fields=table.tohash {
"extrema_bound",
"familyname",
"fontname",
- "fontname",
"fontstyle_id",
"fontstyle_name",
"fullname",
@@ -6579,6 +7329,7 @@ local valid_fields=table.tohash {
"upos",
"use_typo_metrics",
"uwidth",
+ "validation_state",
"version",
"vert_base",
"weight",
@@ -6590,7 +7341,6 @@ local ordered_enhancers={
"prepare lookups",
"analyze glyphs",
"analyze math",
- "prepare tounicode",
"reorganize lookups",
"reorganize mark classes",
"reorganize anchor classes",
@@ -6603,9 +7353,12 @@ local ordered_enhancers={
"check glyphs",
"check metadata",
"check extra features",
+ "prepare tounicode",
"check encoding",
"add duplicates",
"cleanup tables",
+ "compact lookups",
+ "purge names",
}
local actions=allocate()
local before=allocate()
@@ -6742,12 +7495,12 @@ function otf.load(filename,sub,featurefile)
report_otf("loading %a, hash %a",filename,hash)
local fontdata,messages
if sub then
- fontdata,messages=fontloader.open(filename,sub)
+ fontdata,messages=open_font(filename,sub)
else
- fontdata,messages=fontloader.open(filename)
+ fontdata,messages=open_font(filename)
end
if fontdata then
- mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ mainfields=mainfields or (font_fields and font_fields(fontdata))
end
if trace_loading and messages and #messages>0 then
if type(messages)=="string" then
@@ -6787,6 +7540,7 @@ function otf.load(filename,sub,featurefile)
},
lookuptypes={},
},
+ warnings={},
metadata={
},
properties={
@@ -6795,7 +7549,7 @@ function otf.load(filename,sub,featurefile)
goodies={},
helpers={
tounicodelist=splitter,
- tounicodetable=lpeg.Ct(splitter),
+ tounicodetable=Ct(splitter),
},
}
starttiming(data)
@@ -6820,7 +7574,7 @@ function otf.load(filename,sub,featurefile)
report_otf("preprocessing and caching time %s, packtime %s",
elapsedtime(data),packdata and elapsedtime(packtime) or 0)
end
- fontloader.close(fontdata)
+ close_font(fontdata)
if cleanup>3 then
collectgarbage("collect")
end
@@ -6838,6 +7592,34 @@ function otf.load(filename,sub,featurefile)
report_otf("loading from cache using hash %a",hash)
end
enhance("unpack",data,filename,nil,false)
+ local resources=data.resources
+ local lookuptags=resources.lookuptags
+ local unicodes=resources.unicodes
+ if not lookuptags then
+ lookuptags={}
+ resources.lookuptags=lookuptags
+ end
+ setmetatableindex(lookuptags,function(t,k)
+ local v=type(k)=="number" and ("lookup "..k) or k
+ t[k]=v
+ return v
+ end)
+ if not unicodes then
+ unicodes={}
+ resources.unicodes=unicodes
+ setmetatableindex(unicodes,function(t,k)
+ setmetatableindex(unicodes,nil)
+ for u,d in next,data.descriptions do
+ local n=d.name
+ if n then
+ t[n]=u
+ else
+ end
+ end
+ return rawget(t,k)
+ end)
+ end
+ constructors.addcoreunicodes(unicodes)
if applyruntimefixes then
applyruntimefixes(filename,data)
end
@@ -6874,26 +7656,16 @@ actions["add dimensions"]=function(data,filename)
local defaultheight=resources.defaultheight or 0
local defaultdepth=resources.defaultdepth or 0
local basename=trace_markwidth and file.basename(filename)
- if usemetatables then
- for _,d in next,descriptions do
- local wd=d.width
- if not wd then
- d.width=defaultwidth
- elseif trace_markwidth and wd~=0 and d.class=="mark" then
- report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
- end
- setmetatable(d,mt)
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
end
- else
- for _,d in next,descriptions do
- local bb,wd=d.boundingbox,d.width
- if not wd then
- d.width=defaultwidth
- elseif trace_markwidth and wd~=0 and d.class=="mark" then
- report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
- end
- if bb then
- local ht,dp=bb[4],-bb[2]
+ if bb then
+ local ht=bb[4]
+ local dp=-bb[2]
if ht==0 or ht<0 then
else
d.height=ht
@@ -6902,7 +7674,6 @@ actions["add dimensions"]=function(data,filename)
else
d.depth=dp
end
- end
end
end
end
@@ -6969,17 +7740,26 @@ actions["prepare glyphs"]=function(data,filename,raw)
local glyph=cidglyphs[index]
if glyph then
local unicode=glyph.unicode
+ if unicode>=0x00E000 and unicode<=0x00F8FF then
+ unicode=-1
+ elseif unicode>=0x0F0000 and unicode<=0x0FFFFD then
+ unicode=-1
+ elseif unicode>=0x100000 and unicode<=0x10FFFD then
+ unicode=-1
+ end
local name=glyph.name or cidnames[index]
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=cidunicodes[index]
end
if unicode and descriptions[unicode] then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ if trace_private then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ end
unicode=-1
end
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
if not name then
- name=format("u%06X",private)
+ name=format("u%06X.ctx",private)
end
unicode=private
unicodes[name]=private
@@ -6990,7 +7770,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
nofnames=nofnames+1
else
if not name then
- name=format("u%06X",unicode)
+ name=format("u%06X.ctx",unicode)
end
unicodes[name]=unicode
nofunicodes=nofunicodes+1
@@ -7023,7 +7803,7 @@ actions["prepare glyphs"]=function(data,filename,raw)
if glyph then
local unicode=glyph.unicode
local name=glyph.name
- if not unicode or unicode==-1 or unicode>=criterium then
+ if not unicode or unicode==-1 then
unicode=private
unicodes[name]=private
if trace_private then
@@ -7031,12 +7811,29 @@ actions["prepare glyphs"]=function(data,filename,raw)
end
private=private+1
else
+ if unicode>criterium then
+ local taken=descriptions[unicode]
+ if taken then
+ if unicode>=private then
+ private=unicode+1
+ else
+ private=private+1
+ end
+ descriptions[private]=taken
+ unicodes[taken.name]=private
+ indices[taken.index]=private
+ if trace_private then
+ report_otf("slot %U is moved to %U due to private in font",unicode)
+ end
+ else
+ if unicode>=private then
+ private=unicode+1
+ end
+ end
+ end
unicodes[name]=unicode
end
indices[index]=unicode
- if not name then
- name=format("u%06X",unicode)
- end
descriptions[unicode]={
boundingbox=glyph.boundingbox,
name=name,
@@ -7045,7 +7842,6 @@ actions["prepare glyphs"]=function(data,filename,raw)
}
local altuni=glyph.altuni
if altuni then
- local d
for i=1,#altuni do
local a=altuni[i]
local u=a.unicode
@@ -7058,15 +7854,8 @@ actions["prepare glyphs"]=function(data,filename,raw)
vv={ [u]=unicode }
variants[v]=vv
end
- elseif d then
- d[#d+1]=u
- else
- d={ u }
end
end
- if d then
- duplicates[unicode]=d
- end
end
else
report_otf("potential problem: glyph %U is used but empty",index)
@@ -7084,47 +7873,45 @@ actions["check encoding"]=function(data,filename,raw)
local duplicates=resources.duplicates
local mapdata=raw.map or {}
local unicodetoindex=mapdata and mapdata.map or {}
+ local indextounicode=mapdata and mapdata.backmap or {}
local encname=lower(data.enc_name or mapdata.enc_name or "")
- local criterium=0xFFFF
+ local criterium=0xFFFF
+ local privateoffset=constructors.privateoffset
if find(encname,"unicode") then
if trace_loading then
report_otf("checking embedded unicode map %a",encname)
end
- for unicode,index in next,unicodetoindex do
- if unicode<=criterium and not descriptions[unicode] then
- local parent=indices[index]
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ local reported={}
+ for maybeunicode,index in next,unicodetoindex do
+ if descriptions[maybeunicode] then
+ else
+ local unicode=indices[index]
+ if not unicode then
+ elseif maybeunicode==unicode then
+ elseif unicode>privateoffset then
else
- local parentdescription=descriptions[parent]
- if parentdescription then
- local altuni=parentdescription.altuni
- if not altuni then
- altuni={ { unicode=unicode } }
- parentdescription.altuni=altuni
- duplicates[parent]={ unicode }
+ local d=descriptions[unicode]
+ if d then
+ local c=d.copies
+ if c then
+ c[maybeunicode]=true
else
- local done=false
- for i=1,#altuni do
- if altuni[i].unicode==unicode then
- done=true
- break
- end
- end
- if not done then
- insert(altuni,{ unicode=unicode })
- insert(duplicates[parent],unicode)
- end
+ d.copies={ [maybeunicode]=true }
end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ elseif index and not reported[index] then
+ report_otf("missing index %i",index)
+ reported[index]=true
end
end
end
end
+ for unicode,data in next,descriptions do
+ local d=data.copies
+ if d then
+ duplicates[unicode]=sortedkeys(d)
+ data.copies=nil
+ end
+ end
elseif properties.cidinfo then
report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
else
@@ -7132,6 +7919,7 @@ actions["check encoding"]=function(data,filename,raw)
end
if mapdata then
mapdata.map={}
+ mapdata.backmap={}
end
end
actions["add duplicates"]=function(data,filename,raw)
@@ -7142,28 +7930,37 @@ actions["add duplicates"]=function(data,filename,raw)
local indices=resources.indices
local duplicates=resources.duplicates
for unicode,d in next,duplicates do
- for i=1,#d do
- local u=d[i]
- if not descriptions[u] then
- local description=descriptions[unicode]
- local duplicate=table.copy(description)
- duplicate.comment=format("copy of U+%05X",unicode)
- descriptions[u]=duplicate
- local n=0
- for _,description in next,descriptions do
- if kerns then
+ local nofduplicates=#d
+ if nofduplicates>4 then
+ if trace_loading then
+ report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
+ end
+ else
+ for i=1,nofduplicates do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local n=0
+ for _,description in next,descriptions do
local kerns=description.kerns
- for _,k in next,kerns do
- local ku=k[unicode]
- if ku then
- k[u]=ku
- n=n+1
+ if kerns then
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
end
end
end
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ if u>0 then
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
end
end
end
@@ -7358,10 +8155,16 @@ actions["reorganize subtables"]=function(data,filename,raw)
report_otf("skipping weird lookup number %s",k)
elseif features then
local f={}
+ local o={}
for i=1,#features do
local df=features[i]
local tag=strip(lower(df.tag))
- local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ o[#o+1]=tag
+ end
local dscripts=df.scripts
for i=1,#dscripts do
local d=dscripts[i]
@@ -7381,6 +8184,7 @@ actions["reorganize subtables"]=function(data,filename,raw)
subtables=subtables,
markclass=markclass,
features=f,
+ order=o,
}
else
lookups[name]={
@@ -7433,9 +8237,14 @@ local function t_hashed(t,cache)
local ti=t[i]
local tih=cache[ti]
if not tih then
- tih={}
- for i=1,#ti do
- tih[ti[i]]=true
+ local tn=#ti
+ if tn==1 then
+ tih={ [ti[1]]=true }
+ else
+ tih={}
+ for i=1,tn do
+ tih[ti[i]]=true
+ end
end
cache[ti]=tih
end
@@ -7448,12 +8257,17 @@ local function t_hashed(t,cache)
end
local function s_hashed(t,cache)
if t then
- local ht={}
local tf=t[1]
- for i=1,#tf do
- ht[i]={ [tf[i]]=true }
+ local nf=#tf
+ if nf==1 then
+ return { [tf[1]]=true }
+ else
+ local ht={}
+ for i=1,nf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
end
- return ht
else
return nil
end
@@ -7603,12 +8417,12 @@ actions["reorganize lookups"]=function(data,filename,raw)
local fore=glyphs.fore
if fore and fore~="" then
fore=s_uncover(splitter,s_u_cache,fore)
- rule.before=s_hashed(fore,s_h_cache)
+ rule.after=s_hashed(fore,s_h_cache)
end
local back=glyphs.back
if back then
back=s_uncover(splitter,s_u_cache,back)
- rule.after=s_hashed(back,s_h_cache)
+ rule.before=s_hashed(back,s_h_cache)
end
local names=glyphs.names
if names then
@@ -7616,6 +8430,14 @@ actions["reorganize lookups"]=function(data,filename,raw)
rule.current=s_hashed(names,s_h_cache)
end
rule.glyphs=nil
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#names do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
end
end
end
@@ -7632,7 +8454,9 @@ local function check_variants(unicode,the_variants,splitter,unicodes)
for i=1,#glyphs do
local g=glyphs[i]
if done[g] then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ if i>1 then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ end
else
if n==0 then
n=1
@@ -7871,6 +8695,10 @@ actions["check glyphs"]=function(data,filename,raw)
description.glyph=nil
end
end
+local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1)
+local function valid_ps_name(str)
+ return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false
+end
actions["check metadata"]=function(data,filename,raw)
local metadata=data.metadata
for _,k in next,mainfields do
@@ -7887,10 +8715,51 @@ actions["check metadata"]=function(data,filename,raw)
ttftables[i].data="deleted"
end
end
+ if metadata.validation_state and table.contains(metadata.validation_state,"bad_ps_fontname") then
+ local function valid(what)
+ local names=raw.names
+ for i=1,#names do
+ local list=names[i]
+ local names=list.names
+ if names then
+ local name=names[what]
+ if name and valid_ps_name(name) then
+ return name
+ end
+ end
+ end
+ end
+ local function check(what)
+ local oldname=metadata[what]
+ if valid_ps_name(oldname) then
+ report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
+ else
+ local newname=valid(what)
+ if not newname then
+ newname=formatters["bad-%s-%s"](what,file.nameonly(filename))
+ end
+ local warning=formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
+ data.warnings[#data.warnings+1]=warning
+ report_otf(warning)
+ metadata[what]=newname
+ end
+ end
+ check("fontname")
+ check("fullname")
+ end
end
actions["cleanup tables"]=function(data,filename,raw)
+ local duplicates=data.resources.duplicates
+ if duplicates then
+ for k,v in next,duplicates do
+ if #v==1 then
+ duplicates[k]=v[1]
+ end
+ end
+ end
data.resources.indices=nil
- data.helpers=nil
+ data.resources.unicodes=nil
+ data.helpers=nil
end
actions["reorganize glyph lookups"]=function(data,filename,raw)
local resources=data.resources
@@ -7972,6 +8841,7 @@ actions["reorganize glyph lookups"]=function(data,filename,raw)
end
end
end
+local zero={ 0,0 }
actions["reorganize glyph anchors"]=function(data,filename,raw)
local descriptions=data.descriptions
for unicode,description in next,descriptions do
@@ -7980,14 +8850,32 @@ actions["reorganize glyph anchors"]=function(data,filename,raw)
for class,data in next,anchors do
if class=="baselig" then
for tag,specification in next,data do
- for i=1,#specification do
- local si=specification[i]
- specification[i]={ si.x or 0,si.y or 0 }
+ local n=0
+ for k,v in next,specification do
+ if k>n then
+ n=k
+ end
+ local x,y=v.x,v.y
+ if x or y then
+ specification[k]={ x or 0,y or 0 }
+ else
+ specification[k]=zero
+ end
+ end
+ local t={}
+ for i=1,n do
+ t[i]=specification[i] or zero
end
+ data[tag]=t
end
else
for tag,specification in next,data do
- data[tag]={ specification.x or 0,specification.y or 0 }
+ local x,y=specification.x,specification.y
+ if x or y then
+ data[tag]={ x or 0,y or 0 }
+ else
+ data[tag]=zero
+ end
end
end
end
@@ -7995,6 +8883,142 @@ actions["reorganize glyph anchors"]=function(data,filename,raw)
end
end
end
+local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1
+local uselessname=(1-bogusname)^0*bogusname
+actions["purge names"]=function(data,filename,raw)
+ if purge_names then
+ local n=0
+ for u,d in next,data.descriptions do
+ if lpegmatch(uselessname,d.name) then
+ n=n+1
+ d.name=nil
+ end
+ end
+ if n>0 then
+ report_otf("%s bogus names removed",n)
+ end
+ end
+end
+actions["compact lookups"]=function(data,filename,raw)
+ if not compact_lookups then
+ report_otf("not compacting")
+ return
+ end
+ local last=0
+ local tags=table.setmetatableindex({},
+ function(t,k)
+ last=last+1
+ t[k]=last
+ return last
+ end
+ )
+ local descriptions=data.descriptions
+ local resources=data.resources
+ for u,d in next,descriptions do
+ local slookups=d.slookups
+ if type(slookups)=="table" then
+ local s={}
+ for k,v in next,slookups do
+ s[tags[k]]=v
+ end
+ d.slookups=s
+ end
+ local mlookups=d.mlookups
+ if type(mlookups)=="table" then
+ local m={}
+ for k,v in next,mlookups do
+ m[tags[k]]=v
+ end
+ d.mlookups=m
+ end
+ local kerns=d.kerns
+ if type(kerns)=="table" then
+ local t={}
+ for k,v in next,kerns do
+ t[tags[k]]=v
+ end
+ d.kerns=t
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local rules=v.rules
+ if rules then
+ for i=1,#rules do
+ local l=rules[i].lookups
+ if type(l)=="table" then
+ for i=1,#l do
+ l[i]=tags[l[i]]
+ end
+ end
+ end
+ end
+ l[tags[k]]=v
+ end
+ data.lookups=l
+ end
+ local lookups=resources.lookups
+ if lookups then
+ local l={}
+ for k,v in next,lookups do
+ local s=v.subtables
+ if type(s)=="table" then
+ for i=1,#s do
+ s[i]=tags[s[i]]
+ end
+ end
+ l[tags[k]]=v
+ end
+ resources.lookups=l
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for i=1,#sequences do
+ local s=sequences[i]
+ local n=s.name
+ if n then
+ s.name=tags[n]
+ end
+ local t=s.subtables
+ if type(t)=="table" then
+ for i=1,#t do
+ t[i]=tags[t[i]]
+ end
+ end
+ end
+ end
+ local lookuptypes=resources.lookuptypes
+ if lookuptypes then
+ local l={}
+ for k,v in next,lookuptypes do
+ l[tags[k]]=v
+ end
+ resources.lookuptypes=l
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookups in next,anchor_to_lookup do
+ local l={}
+ for lookup,value in next,lookups do
+ l[tags[lookup]]=value
+ end
+ anchor_to_lookup[anchor]=l
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ local l={}
+ for lookup,value in next,lookup_to_anchor do
+ l[tags[lookup]]=value
+ end
+ resources.lookup_to_anchor=l
+ end
+ tags=table.swapped(tags)
+ report_otf("%s lookup tags compacted",#tags)
+ resources.lookuptags=tags
+end
function otf.setfeatures(tfmdata,features)
local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
if okay then
@@ -8006,6 +9030,7 @@ end
local function copytotfm(data,cache_id)
if data then
local metadata=data.metadata
+ local warnings=data.warnings
local resources=data.resources
local properties=derivetable(data.properties)
local descriptions=derivetable(data.descriptions)
@@ -8080,6 +9105,7 @@ local function copytotfm(data,cache_id)
local filename=constructors.checkedfilename(resources)
local fontname=metadata.fontname
local fullname=metadata.fullname or fontname
+ local psname=fontname or fullname
local units=metadata.units_per_em or 1000
if units==0 then
units=1000
@@ -8094,8 +9120,8 @@ local function copytotfm(data,cache_id)
parameters.italicangle=italicangle
parameters.charwidth=charwidth
parameters.charxheight=charxheight
- local space=0x0020
- local emdash=0x2014
+ local space=0x0020
+ local emdash=0x2014
if monospaced then
if descriptions[space] then
spaceunits,spacer=descriptions[space].width,"space"
@@ -8142,7 +9168,7 @@ local function copytotfm(data,cache_id)
if charxheight then
parameters.x_height=charxheight
else
- local x=0x78
+ local x=0x0078
if x then
local x=descriptions[x]
if x then
@@ -8161,8 +9187,16 @@ local function copytotfm(data,cache_id)
properties.filename=filename
properties.fontname=fontname
properties.fullname=fullname
- properties.psname=fontname or fullname
+ properties.psname=psname
properties.name=filename or fullname
+ if warnings and #warnings>0 then
+ report_otf("warnings for font: %s",filename)
+ report_otf()
+ for i=1,#warnings do
+ report_otf(" %s",warnings[i])
+ end
+ report_otf()
+ end
return {
characters=characters,
descriptions=descriptions,
@@ -8171,6 +9205,7 @@ local function copytotfm(data,cache_id)
resources=resources,
properties=properties,
goodies=goodies,
+ warnings=warnings,
}
end
end
@@ -8184,6 +9219,33 @@ local function otftotfm(specification)
local features=specification.features.normal
local rawdata=otf.load(filename,sub,features and features.featurefile)
if rawdata and next(rawdata) then
+ local descriptions=rawdata.descriptions
+ local duplicates=rawdata.resources.duplicates
+ if duplicates then
+ local nofduplicates,nofduplicated=0,0
+ for parent,list in next,duplicates do
+ if type(list)=="table" then
+ local n=#list
+ for i=1,n do
+ local unicode=list[i]
+ if not descriptions[unicode] then
+ descriptions[unicode]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ end
+ nofduplicates=nofduplicates+n
+ else
+ if not descriptions[list] then
+ descriptions[list]=descriptions[parent]
+ nofduplicated=nofduplicated+1
+ end
+ nofduplicates=nofduplicates+1
+ end
+ end
+ if trace_otf and nofduplicated~=nofduplicates then
+ report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
+ end
+ end
rawdata.lookuphash={}
tfmdata=copytotfm(rawdata,cache_id)
if tfmdata and next(tfmdata) then
@@ -8309,7 +9371,7 @@ if not modules then modules={} end modules ['font-otb']={
}
local concat=table.concat
local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
-local type,next,tonumber,tostring=type,next,tonumber,tostring
+local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget
local lpegmatch=lpeg.match
local utfchar=utf.char
local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
@@ -8341,13 +9403,14 @@ local function gref(descriptions,n)
return f_unicode(n)
end
elseif n then
- local num,nam={},{}
- for i=2,#n do
+ local num,nam,j={},{},0
+ for i=1,#n do
local ni=n[i]
if tonumber(ni) then
+ j=j+1
local di=descriptions[ni]
- num[i]=f_unicode(ni)
- nam[i]=di and di.name or "-"
+ num[j]=f_unicode(ni)
+ nam[j]=di and di.name or "-"
end
end
return f_unilist(num,nam)
@@ -8355,36 +9418,36 @@ local function gref(descriptions,n)
return ""
end
end
-local function cref(feature,lookupname)
+local function cref(feature,lookuptags,lookupname)
if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookupname)
+ return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
else
return formatters["feature %a"](feature)
end
end
-local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
replacement and gref(descriptions,replacement),
value,
comment)
end
-local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
report_prepare("%s: base substitution %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,substitution))
end
-local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
report_prepare("%s: base ligature %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,ligature),
gref(descriptions,unicode))
end
-local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookupname),
+ cref(feature,lookuptags,lookupname),
gref(descriptions,unicode),
gref(descriptions,otherunicode),
value)
@@ -8421,7 +9484,7 @@ local function finalize_ligatures(tfmdata,ligatures)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
- local unicodes=resources.unicodes
+ local unicodes=resources.unicodes
local private=resources.private
local alldone=false
while not alldone do
@@ -8430,8 +9493,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local ligature=ligatures[i]
if ligature then
local unicode,lookupdata=ligature[1],ligature[2]
- if trace then
- trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ if trace_ligatures_detail then
+ report_prepare("building % a into %a",lookupdata,unicode)
end
local size=#lookupdata
local firstcode=lookupdata[1]
@@ -8443,8 +9506,8 @@ local function finalize_ligatures(tfmdata,ligatures)
local firstdata=characters[firstcode]
if not firstdata then
firstcode=private
- if trace then
- trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ if trace_ligatures_detail then
+ report_prepare("defining %a as %a",firstname,firstcode)
end
unicodes[firstname]=firstcode
firstdata={ intermediate=true,ligatures={} }
@@ -8457,18 +9520,18 @@ local function finalize_ligatures(tfmdata,ligatures)
local secondname=firstname.."_"..secondcode
if i==size-1 then
target=unicode
- if not unicodes[secondname] then
+ if not rawget(unicodes,secondname) then
unicodes[secondname]=unicode
end
okay=true
else
- target=unicodes[secondname]
+ target=rawget(unicodes,secondname)
if not target then
break
end
end
- if trace then
- trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ if trace_ligatures_detail then
+ report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
end
local firstligs=firstdata.ligatures
if firstligs then
@@ -8479,6 +9542,8 @@ local function finalize_ligatures(tfmdata,ligatures)
firstcode=target
firstname=secondname
end
+ elseif trace_ligatures_detail then
+ report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
end
if okay then
ligatures[i]=false
@@ -8488,62 +9553,66 @@ local function finalize_ligatures(tfmdata,ligatures)
end
alldone=done==0
end
- if trace then
- for k,v in next,characters do
- if v.ligatures then table.print(v,k) end
+ if trace_ligatures_detail then
+ for k,v in table.sortedhash(characters) do
+ if v.ligatures then
+ table.print(v,k)
+ end
end
end
- tfmdata.resources.private=private
+ resources.private=private
+ return true
end
end
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
+ local properties=tfmdata.properties
local changed=tfmdata.changed
- local unicodes=resources.unicodes
local lookuphash=resources.lookuphash
local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
local ligatures={}
- local alternate=tonumber(value)
+ local alternate=tonumber(value) or true and 1
local defaultalt=otf.defaultbasealternate
local trace_singles=trace_baseinit and trace_singles
local trace_alternatives=trace_baseinit and trace_alternatives
local trace_ligatures=trace_baseinit and trace_ligatures
local actions={
- substitution=function(lookupdata,lookupname,description,unicode)
+ substitution=function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
changed[unicode]=lookupdata
end,
- alternate=function(lookupdata,lookupname,description,unicode)
+ alternate=function(lookupdata,lookuptags,lookupname,description,unicode)
local replacement=lookupdata[alternate]
if replacement then
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt=="first" then
replacement=lookupdata[1]
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt=="last" then
replacement=lookupdata[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
end,
- ligature=function(lookupdata,lookupname,description,unicode)
+ ligature=function(lookupdata,lookuptags,lookupname,description,unicode)
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
end
ligatures[#ligatures+1]={ unicode,lookupdata }
end,
@@ -8559,7 +9628,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local lookuptype=lookuptypes[lookupname]
local action=actions[lookuptype]
if action then
- action(lookupdata,lookupname,description,unicode)
+ action(lookupdata,lookuptags,lookupname,description,unicode)
end
end
end
@@ -8574,22 +9643,24 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local action=actions[lookuptype]
if action then
for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
+ action(lookuplist[i],lookuptags,lookupname,description,unicode)
end
end
end
end
end
end
- finalize_ligatures(tfmdata,ligatures)
+ properties.hasligatures=finalize_ligatures(tfmdata,ligatures)
end
local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
- local unicodes=resources.unicodes
+ local properties=tfmdata.properties
+ local lookuptags=resources.lookuptags
local sharedkerns={}
local traceindeed=trace_baseinit and trace_kerns
+ local haskerns=false
for unicode,character in next,characters do
local description=descriptions[unicode]
local rawkerns=description.kerns
@@ -8611,13 +9682,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
newkerns={ [otherunicode]=value }
done=true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
elseif not newkerns[otherunicode] then
newkerns[otherunicode]=value
done=true
if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
end
end
end
@@ -8626,12 +9697,14 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
if done then
sharedkerns[rawkerns]=newkerns
character.kerns=newkerns
+ haskerns=true
else
sharedkerns[rawkerns]=false
end
end
end
end
+ properties.haskerns=haskerns
end
basemethods.independent={
preparesubstitutions=preparesubstitutions,
@@ -8657,13 +9730,13 @@ local function make_1(present,tree,name)
end
end
end
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
for k,v in next,tree do
if k=="ligature" then
local character=characters[preceding]
if not character then
if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
end
character=makefake(tfmdata,name,present)
end
@@ -8684,7 +9757,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don
else
local code=present[name] or unicode
local name=name.."_"..k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
end
end
end
@@ -8695,8 +9768,9 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local changed=tfmdata.changed
local lookuphash=resources.lookuphash
local lookuptypes=resources.lookuptypes
+ local lookuptags=resources.lookuptags
local ligatures={}
- local alternate=tonumber(value)
+ local alternate=tonumber(value) or true and 1
local defaultalt=otf.defaultbasealternate
local trace_singles=trace_baseinit and trace_singles
local trace_alternatives=trace_baseinit and trace_alternatives
@@ -8708,7 +9782,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for unicode,data in next,lookupdata do
if lookuptype=="substitution" then
if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
+ report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
end
changed[unicode]=data
elseif lookuptype=="alternate" then
@@ -8716,28 +9790,28 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
if replacement then
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
end
elseif defaultalt=="first" then
replacement=data[1]
changed[unicode]=replacement
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
elseif defaultalt=="last" then
replacement=data[#data]
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
else
if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
end
end
elseif lookuptype=="ligature" then
ligatures[#ligatures+1]={ unicode,data,lookupname }
if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
+ report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
end
end
end
@@ -8755,7 +9829,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
for i=1,nofligatures do
local ligature=ligatures[i]
local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
end
end
end
@@ -8763,7 +9837,9 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local characters=tfmdata.characters
local descriptions=tfmdata.descriptions
local resources=tfmdata.resources
+ local properties=tfmdata.properties
local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
local traceindeed=trace_baseinit and trace_kerns
for l=1,#lookuplist do
local lookupname=lookuplist[l]
@@ -8779,7 +9855,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
for otherunicode,kern in next,data do
if not kerns[otherunicode] and kern~=0 then
kerns[otherunicode]=kern
- report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
end
end
else
@@ -8803,8 +9879,9 @@ basemethods.shared={
basemethod="independent"
local function featuresinitializer(tfmdata,value)
if true then
- local t=trace_preparing and os.clock()
+ local starttime=trace_preparing and os.clock()
local features=tfmdata.shared.features
+ local fullname=tfmdata.properties.fullname or "?"
if features then
applybasemethod("initializehashes",tfmdata)
local collectlookups=otf.collectlookups
@@ -8814,26 +9891,35 @@ local function featuresinitializer(tfmdata,value)
local language=properties.language
local basesubstitutions=rawdata.resources.features.gsub
local basepositionings=rawdata.resources.features.gpos
- if basesubstitutions then
- for feature,data in next,basesubstitutions do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature,data in next,basepositionings do
- local value=features[feature]
- if value then
- local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
+ if basesubstitutions or basepositionings then
+ local sequences=tfmdata.resources.sequences
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local sfeatures=sequence.features
+ if sfeatures then
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local feature=order[i]
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if not validlookups then
+ elseif basesubstitutions and basesubstitutions[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
+ end
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ elseif basepositionings and basepositionings[feature] then
+ if trace_preparing then
+ report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
+ end
+ applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
end
end
end
@@ -8841,7 +9927,7 @@ local function featuresinitializer(tfmdata,value)
registerbasehash(tfmdata)
end
if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
end
end
end
@@ -8863,17 +9949,19 @@ end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['node-inj']={
+if not modules then modules={} end modules ['font-inj']={
version=1.001,
- comment="companion to node-ini.mkiv",
+ comment="companion to font-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files",
}
-local next=next
+if not nodes.properties then return end
+local next,rawget=next,rawget
local utfchar=utf.char
-local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
-local report_injections=logs.reporter("nodes","injections")
+local fastcopy=table.fastcopy
+local trace_injections=false trackers.register("fonts.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("fonts","injections")
local attributes,nodes,node=attributes,nodes,node
fonts=fonts
local fontdata=fonts.hashes.identifiers
@@ -8889,140 +9977,339 @@ local newkern=nodepool.kern
local tonode=nuts.tonode
local tonut=nuts.tonut
local getfield=nuts.getfield
+local setfield=nuts.setfield
local getnext=nuts.getnext
local getprev=nuts.getprev
local getid=nuts.getid
-local getattr=nuts.getattr
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
-local setfield=nuts.setfield
-local setattr=nuts.setattr
local traverse_id=nuts.traverse_id
local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
-local a_kernpair=attributes.private('kernpair')
-local a_ligacomp=attributes.private('ligacomp')
-local a_markbase=attributes.private('markbase')
-local a_markmark=attributes.private('markmark')
-local a_markdone=attributes.private('markdone')
-local a_cursbase=attributes.private('cursbase')
-local a_curscurs=attributes.private('curscurs')
-local a_cursdone=attributes.private('cursdone')
+local find_tail=nuts.tail
+local properties=nodes.properties.data
function injections.installnewkern(nk)
newkern=nk or newkern
end
-local cursives={}
-local marks={}
-local kerns={}
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
- local ws,wn=tfmstart.width,tfmnext.width
- local bound=#cursives+1
- setattr(start,a_cursbase,bound)
- setattr(nxt,a_curscurs,bound)
- cursives[bound]={ rlmode,dx,dy,ws,wn }
- return dx,dy,bound
+local nofregisteredkerns=0
+local nofregisteredpairs=0
+local nofregisteredmarks=0
+local nofregisteredcursives=0
+local keepregisteredcounts=false
+function injections.keepcounts()
+ keepregisteredcounts=true
+end
+function injections.resetcounts()
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ keepregisteredcounts=false
+end
+function injections.reset(n)
+ local p=rawget(properties,n)
+ if p and rawget(p,"injections") then
+ p.injections=nil
+ end
+end
+function injections.copy(target,source)
+ local sp=rawget(properties,source)
+ if sp then
+ local tp=rawget(properties,target)
+ local si=rawget(sp,"injections")
+ if si then
+ si=fastcopy(si)
+ if tp then
+ tp.injections=si
+ else
+ propertydata[target]={
+ injections=si,
+ }
+ end
+ else
+ if tp then
+ tp.injections=nil
+ end
+ end
+ end
end
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
- if x~=0 or w~=0 or y~=0 or h~=0 then
- local bound=getattr(current,a_kernpair)
- if bound then
- local kb=kerns[bound]
- kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+function injections.setligaindex(n,index)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.ligaindex=index
+ else
+ p.injections={
+ ligaindex=index
+ }
+ end
+ else
+ properties[n]={
+ injections={
+ ligaindex=index
+ }
+ }
+ end
+end
+function injections.getligaindex(n,default)
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ return i.ligaindex or default
+ end
+ end
+ return default
+end
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx=factor*(exit[1]-entry[1])
+ local dy=-factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ nofregisteredcursives=nofregisteredcursives+1
+ if rlmode<0 then
+ dx=-(dx+wn)
+ else
+ dx=dx-ws
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursiveanchor=true
+ else
+ p.injections={
+ cursiveanchor=true,
+ }
+ end
+ else
+ properties[start]={
+ injections={
+ cursiveanchor=true,
+ },
+ }
+ end
+ local p=rawget(properties,nxt)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.cursivex=dx
+ i.cursivey=dy
else
- bound=#kerns+1
- setattr(current,a_kernpair,bound)
- kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
+ p.injections={
+ cursivex=dx,
+ cursivey=dy,
+ }
end
- return x,y,w,h,bound
+ else
+ properties[nxt]={
+ injections={
+ cursivex=dx,
+ cursivey=dy,
+ },
+ }
+ end
+ return dx,dy,nofregisteredcursives
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,injection)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local yoffset=y-h
+ local leftkern=x
+ local rightkern=w-x
+ if leftkern~=0 or rightkern~=0 or yoffset~=0 then
+ nofregisteredpairs=nofregisteredpairs+1
+ if rlmode and rlmode<0 then
+ leftkern,rightkern=rightkern,leftkern
+ end
+ local p=rawget(properties,current)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ if leftkern~=0 then
+ i.leftkern=(i.leftkern or 0)+leftkern
+ end
+ if rightkern~=0 then
+ i.rightkern=(i.rightkern or 0)+rightkern
+ end
+ if yoffset~=0 then
+ i.yoffset=(i.yoffset or 0)+yoffset
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ p.injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ }
+ else
+ p.injections={
+ yoffset=yoffset,
+ }
+ end
+ elseif leftkern~=0 or rightkern~=0 then
+ properties[current]={
+ injections={
+ leftkern=leftkern,
+ rightkern=rightkern,
+ yoffset=yoffset,
+ },
+ }
+ else
+ properties[current]={
+ injections={
+ yoffset=yoffset,
+ },
+ }
+ end
+ return x,y,w,h,nofregisteredpairs
+ end
end
return x,y,w,h
end
-function injections.setkern(current,factor,rlmode,x,tfmchr)
+function injections.setkern(current,factor,rlmode,x,injection)
local dx=factor*x
if dx~=0 then
- local bound=#kerns+1
- setattr(current,a_kernpair,bound)
- kerns[bound]={ rlmode,dx }
- return dx,bound
+ nofregisteredkerns=nofregisteredkerns+1
+ local p=rawget(properties,current)
+ if not injection then
+ injection="injections"
+ end
+ if p then
+ local i=rawget(p,injection)
+ if i then
+ i.leftkern=dx+(i.leftkern or 0)
+ else
+ p[injection]={
+ leftkern=dx,
+ }
+ end
+ else
+ properties[current]={
+ [injection]={
+ leftkern=dx,
+ },
+ }
+ end
+ return dx,nofregisteredkerns
else
return 0,0
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,index,baseismark)
- local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
- local bound=getattr(base,a_markbase)
- local index=1
- if bound then
- local mb=marks[bound]
- if mb then
- index=#mb+1
- mb[index]={ dx,dy,rlmode }
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
- return dx,dy,bound
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ nofregisteredmarks=nofregisteredmarks+1
+ if rlmode>=0 then
+ dx=tfmbase.width-dx
+ end
+ local p=rawget(properties,start)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ i.markx=dx
+ i.marky=dy
+ i.markdir=rlmode or 0
+ i.markbase=nofregisteredmarks
+ i.markbasenode=base
else
- report_injections("possible problem, %U is base mark without data (id %a)",getchar(base),bound)
+ p.injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ }
end
+ else
+ properties[start]={
+ injections={
+ markx=dx,
+ marky=dy,
+ markdir=rlmode or 0,
+ markbase=nofregisteredmarks,
+ markbasenode=base,
+ },
+ }
end
- index=index or 1
- bound=#marks+1
- setattr(base,a_markbase,bound)
- setattr(start,a_markmark,bound)
- setattr(start,a_markdone,index)
- marks[bound]={ [index]={ dx,dy,rlmode,baseismark } }
- return dx,dy,bound
+ return dx,dy,nofregisteredmarks
end
local function dir(n)
return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
end
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- local kp=getattr(n,a_kernpair)
- local mb=getattr(n,a_markbase)
- local mm=getattr(n,a_markmark)
- local md=getattr(n,a_markdone)
- local cb=getattr(n,a_cursbase)
- local cc=getattr(n,a_curscurs)
- local char=getchar(n)
- report_injections("font %s, char %U, glyph %c",getfont(n),char,char)
- if kp then
- local k=kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m=marks[mm]
- if mb then
- local m=m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m=m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
+local function showchar(n,nested)
+ local char=getchar(n)
+ report_injections("%wfont %s, char %U, glyph %c",nested and 2 or 0,getfont(n),char,char)
+end
+local function show(n,what,nested,symbol)
+ if n then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,what)
+ if i then
+ local leftkern=i.leftkern or 0
+ local rightkern=i.rightkern or 0
+ local yoffset=i.yoffset or 0
+ local markx=i.markx or 0
+ local marky=i.marky or 0
+ local markdir=i.markdir or 0
+ local markbase=i.markbase or 0
+ local cursivex=i.cursivex or 0
+ local cursivey=i.cursivey or 0
+ local ligaindex=i.ligaindex or 0
+ local margin=nested and 4 or 2
+ if rightkern~=0 or yoffset~=0 then
+ report_injections("%w%s pair: lx %p, rx %p, dy %p",margin,symbol,leftkern,rightkern,yoffset)
+ elseif leftkern~=0 then
+ report_injections("%w%s kern: dx %p",margin,symbol,leftkern)
+ end
+ if markx~=0 or marky~=0 or markbase~=0 then
+ report_injections("%w%s mark: dx %p, dy %p, dir %s, base %s",margin,symbol,markx,marky,markdir,markbase~=0 and "yes" or "no")
+ end
+ if cursivex~=0 or cursivey~=0 then
+ report_injections("%w%s curs: dx %p, dy %p",margin,symbol,cursivex,cursivey)
+ end
+ if ligaindex~=0 then
+ report_injections("%w%s liga: index %i",margin,symbol,ligaindex)
+ end
+ end
+ end
+ end
+end
+local function showsub(n,what,where)
+ report_injections("begin subrun: %s",where)
+ for n in traverse_id(glyph_code,n) do
+ showchar(n,where)
+ show(n,what,where," ")
+ end
+ report_injections("end subrun")
+end
+local function trace(head,where)
+ report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ local n=head
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ showchar(n)
+ show(n,"injections",false," ")
+ show(n,"preinjections",false,"<")
+ show(n,"postinjections",false,">")
+ show(n,"replaceinjections",false,"=")
+ elseif id==disc_code then
+ local pre=getfield(n,"pre")
+ local post=getfield(n,"post")
+ local replace=getfield(n,"replace")
+ if pre then
+ showsub(pre,"preinjections","pre")
end
- if cb then
- report_injections(" cursbase: bound %a",cb)
+ if post then
+ showsub(post,"postinjections","post")
end
- if cc then
- local c=cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ if replace then
+ showsub(replace,"replaceinjections","replace")
end
end
+ n=getnext(n)
end
report_injections("end run")
end
@@ -9042,309 +10329,577 @@ local function show_result(head)
report_injections()
skipping=true
end
- current=getnext(current)
+ current=getnext(current)
+ end
+end
+local function collect_glyphs_1(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ end
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function collect_glyphs_2(head)
+ local glyphs,nofglyphs={},0
+ local marks,nofmarks={},0
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if getsubtype(n)<256 then
+ local f=getfont(n)
+ if f~=nf then
+ nf=f
+ tm=fontdata[nf].resources.marks
+ end
+ if tm and tm[getchar(n)] then
+ nofmarks=nofmarks+1
+ marks[nofmarks]=n
+ else
+ nofglyphs=nofglyphs+1
+ glyphs[nofglyphs]=n
+ end
+ end
+ end
+ return glyphs,nofglyphs,marks,nofmarks
+end
+local function inject_marks(marks,nofmarks)
+ for i=1,nofmarks do
+ local n=marks[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ if pn then
+ local p=pn.markbasenode
+ if p then
+ local px=getfield(p,"xoffset")
+ local ox=0
+ local rightkern=nil
+ local pp=rawget(properties,p)
+ if pp then
+ pp=rawget(pp,"injections")
+ if pp then
+ rightkern=pp.rightkern
+ end
+ end
+ if rightkern then
+ if pn.markdir<0 then
+ ox=px-pn.markx-rightkern
+ else
+ local leftkern=pp.leftkern
+ if leftkern then
+ ox=px-pn.markx
+ else
+ ox=px-pn.markx-leftkern
+ end
+ end
+ else
+ ox=px-pn.markx
+ local wn=getfield(n,"width")
+ if wn~=0 then
+ pn.leftkern=-wn/2
+ pn.rightkern=-wn/2
+ end
+ end
+ setfield(n,"xoffset",ox)
+ local py=getfield(p,"yoffset")
+ local oy=0
+ if marks[p] then
+ oy=py+pn.marky
+ else
+ oy=getfield(n,"yoffset")+py+pn.marky
+ end
+ setfield(n,"yoffset",oy)
+ else
+ end
+ end
+ end
+ end
+end
+local function inject_cursives(glyphs,nofglyphs)
+ local cursiveanchor,lastanchor=nil,nil
+ local minc,maxc,last=0,0,nil
+ for i=1,nofglyphs do
+ local n=glyphs[i]
+ local pn=rawget(properties,n)
+ if pn then
+ pn=rawget(pn,"injections")
+ end
+ if pn then
+ local cursivex=pn.cursivex
+ if cursivex then
+ if cursiveanchor then
+ if cursivex~=0 then
+ pn.leftkern=(pn.leftkern or 0)+cursivex
+ end
+ if lastanchor then
+ if maxc==0 then
+ minc=lastanchor
+ end
+ maxc=lastanchor
+ properties[cursiveanchor].cursivedy=pn.cursivey
+ end
+ last=n
+ else
+ maxc=0
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ if pn.cursiveanchor then
+ cursiveanchor=n
+ lastanchor=i
+ else
+ cursiveanchor=nil
+ lastanchor=nil
+ if maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ maxc=0
+ end
+ end
+ elseif maxc>0 then
+ local ny=getfield(n,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
+ end
+ maxc=0
+ cursiveanchor=nil
+ lastanchor=nil
+ end
+ end
+ if last and maxc>0 then
+ local ny=getfield(last,"yoffset")
+ for i=maxc,minc,-1 do
+ local ti=glyphs[i]
+ ny=ny+properties[ti].cursivedy
+ setfield(ti,"yoffset",ny)
+ end
+ end
+end
+local function inject_kerns(head,list,length)
+ for i=1,length do
+ local n=list[i]
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ end
+ end
+ end
+ end
+end
+local function inject_everything(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"everything")
+ end
+ local glyphs,nofglyphs,marks,nofmarks
+ if nofregisteredpairs>0 then
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_1(head)
+ else
+ glyphs,nofglyphs,marks,nofmarks=collect_glyphs_2(head)
+ end
+ if nofglyphs>0 then
+ if nofregisteredcursives>0 then
+ inject_cursives(glyphs,nofglyphs)
+ end
+ if nofregisteredmarks>0 then
+ inject_marks(marks,nofmarks)
+ end
+ inject_kerns(head,glyphs,nofglyphs)
end
+ if nofmarks>0 then
+ inject_kerns(head,marks,nofmarks)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ nofregisteredpairs=0
+ nofregisteredmarks=0
+ nofregisteredcursives=0
+ end
+ return tonode(head),true
end
-function injections.handler(head,where,keep)
+local function inject_kerns_only(head,where)
head=tonut(head)
- local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
- if has_kerns then
- local nf,tm=nil,nil
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- nofvalid=nofvalid+1
- valid[nofvalid]=n
- local f=getfont(n)
- if f~=nf then
- nf=f
- tm=fontdata[nf].resources.marks
- end
- if tm then
- mk[n]=tm[getchar(n)]
- end
- local k=getattr(n,a_kernpair)
- if k then
- local kk=kerns[k]
- if kk then
- local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
- local dy=y-h
- if dy~=0 then
- ky[n]=dy
+ if trace_injections then
+ trace(head,"kerns")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
+ end
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
end
- if w~=0 or x~=0 then
- wx[n]=kk
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ head=insert_node_before(head,n,newkern(leftkern))
end
- rl[n]=kk[1]
end
end
end
+ else
+ break
end
- else
- local nf,tm=nil,nil
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- nofvalid=nofvalid+1
- valid[nofvalid]=n
- local f=getfont(n)
- if f~=nf then
- nf=f
- tm=fontdata[nf].resources.marks
- end
- if tm then
- mk[n]=tm[getchar(n)]
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"preinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ end
+ end
+ else
+ break
end
end
- end
- end
- if nofvalid>0 then
- local cx={}
- if has_kerns and next(ky) then
- for n,k in next,ky do
- setfield(n,"yoffset",k)
+ if h~=d then
+ setfield(n,"pre",h)
end
end
- if has_cursives then
- local p_cursbase,p=nil,nil
- local t,d,maxt={},{},0
- for i=1,nofvalid do
- local n=valid[i]
- if not mk[n] then
- local n_cursbase=getattr(n,a_cursbase)
- if p_cursbase then
- local n_curscurs=getattr(n,a_curscurs)
- if p_cursbase==n_curscurs then
- local c=cursives[n_curscurs]
- if c then
- local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
- if rlmode>=0 then
- dx=dx-ws
- else
- dx=dx+wn
- end
- if dx~=0 then
- cx[n]=dx
- rl[n]=rlmode
- end
- dy=-dy
- maxt=maxt+1
- t[maxt]=p
- d[maxt]=dy
- else
- maxt=0
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
end
end
- elseif maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",getfield(ti,"yoffset")+ny)
- end
- maxt=0
- end
- if not n_cursbase and maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",ny)
- end
- maxt=0
end
- p_cursbase,p=n_cursbase,n
+ else
+ break
end
end
- if maxt>0 then
- local ny=getfield(n,"yoffset")
- for i=maxt,1,-1 do
- ny=ny+d[i]
- local ti=t[i]
- setfield(ti,"yoffset",ny)
- end
- maxt=0
- end
- if not keep then
- cursives={}
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p=valid[i]
- local p_markbase=getattr(p,a_markbase)
- if p_markbase then
- local mrks=marks[p_markbase]
- local nofmarks=#mrks
- for n in traverse_id(glyph_code,getnext(p)) do
- local n_markmark=getattr(n,a_markmark)
- if p_markbase==n_markmark then
- local index=getattr(n,a_markdone) or 1
- local d=mrks[index]
- if d then
- local rlmode=d[3]
- local k=wx[p]
- local px=getfield(p,"xoffset")
- local ox=0
- if k then
- local x=k[2]
- local w=k[4]
- if w then
- if rlmode and rlmode>=0 then
- ox=px-getfield(p,"width")+d[1]-(w-x)
- else
- ox=px-d[1]-x
- end
- else
- if rlmode and rlmode>=0 then
- ox=px-getfield(p,"width")+d[1]
- else
- ox=px-d[1]-x
- end
- end
- else
- local wp=getfield(p,"width")
- local wn=getfield(n,"width")
- if rlmode and rlmode>=0 then
- ox=px-wp+d[1]
- else
- ox=px-d[1]
- end
- if wn~=0 then
- insert_node_before(head,n,newkern(-wn/2))
- insert_node_after(head,n,newkern(-wn/2))
- end
- end
- setfield(n,"xoffset",ox)
- local py=getfield(p,"yoffset")
- local oy=0
- if mk[p] then
- oy=py+d[2]
- else
- oy=getfield(n,"yoffset")+py+d[2]
- end
- setfield(n,"yoffset",oy)
- if nofmarks==1 then
- break
- else
- nofmarks=nofmarks-1
- end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
end
- elseif not n_markmark then
- break
- else
end
end
+ else
+ break
end
end
- if not keep then
- marks={}
+ if h~=d then
+ setfield(n,"replace",h)
end
end
- if next(wx) then
- for n,k in next,wx do
- local x=k[2]
- local w=k[4]
- if w then
- local rl=k[1]
- local wx=w-x
- if rl<0 then
- if wx~=0 then
- insert_node_before(head,n,newkern(wx))
+ p=n
+ else
+ p=nil
+ end
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+local function inject_pairs_only(head,where)
+ head=tonut(head)
+ if trace_injections then
+ trace(head,"pairs")
+ end
+ local n=head
+ local p=nil
+ while n do
+ local id=getid(n)
+ if id==glyph_code then
+ if getsubtype(n)<256 then
+ local pn=rawget(properties,n)
+ if pn then
+ if p then
+ local d=getfield(p,"post")
+ if d then
+ local i=rawget(pn,"postinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
end
- if x~=0 then
- insert_node_after (head,n,newkern(x))
+ end
+ local d=getfield(p,"replace")
+ if d then
+ local i=rawget(pn,"replaceinjections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ local t=find_tail(d)
+ insert_node_after(d,t,newkern(leftkern))
+ end
end
else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ local i=rawget(pn,"injections")
+ if i then
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ setfield(p,"replace",newkern(leftkern))
+ end
+ end
+ end
+ else
+ local i=rawget(pn,"injections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ insert_node_before(head,n,newkern(leftkern))
end
- if wx~=0 then
- insert_node_after (head,n,newkern(wx))
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
end
end
- elseif x~=0 then
- insert_node_before(head,n,newkern(x))
end
end
+ else
+ break
end
- if next(cx) then
- for n,k in next,cx do
- if k~=0 then
- local rln=rl[n]
- if rln and rln<0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
+ p=nil
+ elseif id==disc_code then
+ local d=getfield(n,"pre")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"preinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
+ end
end
+ else
+ break
end
end
+ if h~=d then
+ setfield(n,"pre",h)
+ end
end
- if not keep then
- kerns={}
- end
-head=tonode(head)
- return head,true
- elseif not keep then
- kerns,cursives,marks={},{},{}
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if getsubtype(n)<256 then
- local k=getattr(n,a_kernpair)
- if k then
- local kk=kerns[k]
- if kk then
- local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
- if y and y~=0 then
- setfield(n,"yoffset",y)
- end
- if w then
- local wx=w-x
- if rl<0 then
- if wx~=0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x~=0 then
- insert_node_after (head,n,newkern(x))
+ local d=getfield(n,"post")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"postinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
end
- else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
end
- if wx~=0 then
- insert_node_after(head,n,newkern(wx))
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
end
end
- else
- if x~=0 then
- insert_node_before(head,n,newkern(x))
+ end
+ else
+ break
+ end
+ end
+ if h~=d then
+ setfield(n,"post",h)
+ end
+ end
+ local d=getfield(n,"replace")
+ if d then
+ local h=d
+ for n in traverse_id(glyph_code,d) do
+ if getsubtype(n)<256 then
+ local p=rawget(properties,n)
+ if p then
+ local i=rawget(p,"replaceinjections")
+ if i then
+ local yoffset=i.yoffset
+ if yoffset and yoffset~=0 then
+ setfield(n,"yoffset",yoffset)
+ end
+ local leftkern=i.leftkern
+ if leftkern and leftkern~=0 then
+ h=insert_node_before(h,n,newkern(leftkern))
+ end
+ local rightkern=i.rightkern
+ if rightkern and rightkern~=0 then
+ insert_node_after(head,n,newkern(rightkern))
+ n=getnext(n)
+ end
end
end
+ else
+ break
end
end
+ if h~=d then
+ setfield(n,"replace",h)
+ end
end
+ p=n
+ else
+ p=nil
end
- if not keep then
- kerns={}
- end
- return tonode(head),true
+ n=getnext(n)
+ end
+ if keepregisteredcounts then
+ keepregisteredcounts=false
+ else
+ nofregisteredpairs=0
+ nofregisteredkerns=0
+ end
+ return tonode(head),true
+end
+function injections.handler(head,where)
+ if nofregisteredmarks>0 or nofregisteredcursives>0 then
+ return inject_everything(head,where)
+ elseif nofregisteredpairs>0 then
+ return inject_pairs_only(head,where)
+ elseif nofregisteredkerns>0 then
+ return inject_kerns_only(head,where)
else
+ return head,false
end
- return tonode(head),false
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules={} end modules ['font-ota']={
+if not modules then modules={} end modules ['font-otx']={
version=1.001,
comment="companion to font-otf.lua (analysing)",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -9363,13 +10918,24 @@ analyzers.initializers=initializers
analyzers.methods=methods
analyzers.useunicodemarks=false
local a_state=attributes.private('state')
+local nuts=nodes.nuts
+local tonut=nuts.tonut
+local getfield=nuts.getfield
+local getnext=nuts.getnext
+local getprev=nuts.getprev
+local getid=nuts.getid
+local getprop=nuts.getprop
+local setprop=nuts.setprop
+local getfont=nuts.getfont
+local getsubtype=nuts.getsubtype
+local getchar=nuts.getchar
+local traverse_id=nuts.traverse_id
+local traverse_node_list=nuts.traverse
+local end_of_math=nuts.end_of_math
local nodecodes=nodes.nodecodes
local glyph_code=nodecodes.glyph
local disc_code=nodecodes.disc
local math_code=nodecodes.math
-local traverse_id=node.traverse_id
-local traverse_node_list=node.traverse
-local end_of_math=node.end_of_math
local fontdata=fonts.hashes.identifiers
local categories=characters and characters.categories or {}
local otffeatures=fonts.constructors.newfeatures("otf")
@@ -9411,51 +10977,52 @@ function analyzers.setstate(head,font)
local tfmdata=fontdata[font]
local descriptions=tfmdata.descriptions
local first,last,current,n,done=nil,nil,head,0,false
+ current=tonut(current)
while current do
- local id=current.id
- if id==glyph_code and current.font==font then
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font then
done=true
- local char=current.char
+ local char=getchar(current)
local d=descriptions[char]
if d then
if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
done=true
- current[a_state]=s_mark
+ setprop(current,a_state,s_mark)
elseif n==0 then
first,last,n=current,current,1
- current[a_state]=s_init
+ setprop(current,a_state,s_init)
else
last,n=current,n+1
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
end
else
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
first,last,n=nil,nil,0
end
elseif id==disc_code then
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
last=current
else
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
first,last,n=nil,nil,0
if id==math_code then
current=end_of_math(current)
end
end
- current=current.next
+ current=getnext(current)
end
if first and first==last then
- last[a_state]=s_isol
+ setprop(last,a_state,s_isol)
elseif last then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
end
return head,done
end
@@ -9511,6 +11078,7 @@ local isolated={
[0x0856]=true,[0x0858]=true,[0x0857]=true,
[0x07FA]=true,
[zwnj]=true,
+ [0x08AD]=true,
}
local final={
[0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
@@ -9528,15 +11096,16 @@ local final={
[0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
[0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
[0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
- [0x0778]=true,[0x0779]=true,
+ [0x0778]=true,[0x0779]=true,
[0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
[0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
- [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
- [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
- [0x072C]=true,[0x071E]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
[0x072F]=true,[0x074D]=true,
[0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
- [0x084F]=true
+ [0x084F]=true,
+ [0x08AE]=true,[0x08B1]=true,[0x08B2]=true,
}
local medial={
[0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
@@ -9596,12 +11165,12 @@ local medial={
[0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
[0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
[0x07E6]=true,
- [tatweel]=true,
- [zwj]=true,
+ [tatweel]=true,[zwj]=true,
+ [0x08A1]=true,[0x08AF]=true,[0x08B0]=true,
}
local arab_warned={}
local function warning(current,what)
- local char=current.char
+ local char=getchar(current)
if not arab_warned[char] then
log.report("analyze","arab: character %C has no %a class",char,what)
arab_warned[char]=true
@@ -9610,30 +11179,30 @@ end
local function finish(first,last)
if last then
if first==last then
- local fc=first.char
+ local fc=getchar(first)
if medial[fc] or final[fc] then
- first[a_state]=s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state]=s_error
+ setprop(first,a_state,s_error)
end
else
- local lc=last.char
+ local lc=getchar(last)
if medial[lc] or final[lc] then
- last[a_state]=s_fina
+ setprop(last,a_state,s_fina)
else
warning(last,"fina")
- last[a_state]=s_error
+ setprop(last,a_state,s_error)
end
end
first,last=nil,nil
elseif first then
- local fc=first.char
+ local fc=getchar(first)
if medial[fc] or final[fc] then
- first[a_state]=s_isol
+ setprop(first,a_state,s_isol)
else
warning(first,"isol")
- first[a_state]=s_error
+ setprop(first,a_state,s_error)
end
first=nil
end
@@ -9644,38 +11213,39 @@ function methods.arab(head,font,attr)
local tfmdata=fontdata[font]
local marks=tfmdata.resources.marks
local first,last,current,done=nil,nil,head,false
+ current=tonut(current)
while current do
- local id=current.id
- if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ local id=getid(current)
+ if id==glyph_code and getfont(current)==font and getsubtype(current)<256 and not getprop(current,a_state) then
done=true
- local char=current.char
+ local char=getchar(current)
if marks[char] or (useunicodemarks and categories[char]=="mn") then
- current[a_state]=s_mark
+ setprop(current,a_state,s_mark)
elseif isolated[char] then
first,last=finish(first,last)
- current[a_state]=s_isol
+ setprop(current,a_state,s_isol)
first,last=nil,nil
elseif not first then
if medial[char] then
- current[a_state]=s_init
+ setprop(current,a_state,s_init)
first,last=first or current,current
elseif final[char] then
- current[a_state]=s_isol
+ setprop(current,a_state,s_isol)
first,last=nil,nil
else
first,last=finish(first,last)
end
elseif medial[char] then
first,last=first or current,current
- current[a_state]=s_medi
+ setprop(current,a_state,s_medi)
elseif final[char] then
- if not last[a_state]==s_init then
- last[a_state]=s_medi
+ if getprop(last,a_state)~=s_init then
+ setprop(last,a_state,s_medi)
end
- current[a_state]=s_fina
+ setprop(current,a_state,s_fina)
first,last=nil,nil
elseif char>=0x0600 and char<=0x06FF then
- current[a_state]=s_rest
+ setprop(current,a_state,s_rest)
first,last=finish(first,last)
else
first,last=finish(first,last)
@@ -9688,7 +11258,7 @@ function methods.arab(head,font,attr)
current=end_of_math(current)
end
end
- current=current.next
+ current=getnext(current)
end
if first or last then
finish(first,last)
@@ -9745,6 +11315,7 @@ local report_chain=logs.reporter("fonts","otf chain")
local report_process=logs.reporter("fonts","otf process")
local report_prepare=logs.reporter("fonts","otf prepare")
local report_warning=logs.reporter("fonts","otf warning")
+local report_run=logs.reporter("fonts","otf run")
registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
@@ -9756,21 +11327,29 @@ local nuts=nodes.nuts
local tonode=nuts.tonode
local tonut=nuts.tonut
local getfield=nuts.getfield
+local setfield=nuts.setfield
local getnext=nuts.getnext
local getprev=nuts.getprev
local getid=nuts.getid
local getattr=nuts.getattr
+local setattr=nuts.setattr
+local getprop=nuts.getprop
+local setprop=nuts.setprop
local getfont=nuts.getfont
local getsubtype=nuts.getsubtype
local getchar=nuts.getchar
-local setfield=nuts.setfield
-local setattr=nuts.setattr
+local insert_node_before=nuts.insert_before
local insert_node_after=nuts.insert_after
local delete_node=nuts.delete
+local remove_node=nuts.remove
local copy_node=nuts.copy
+local copy_node_list=nuts.copy_list
local find_node_tail=nuts.tail
local flush_node_list=nuts.flush_list
+local free_node=nuts.free
local end_of_math=nuts.end_of_math
+local traverse_nodes=nuts.traverse
+local traverse_id=nuts.traverse_id
local setmetatableindex=table.setmetatableindex
local zwnj=0x200C
local zwj=0x200D
@@ -9788,25 +11367,22 @@ local math_code=nodecodes.math
local dir_code=whatcodes.dir
local localpar_code=whatcodes.localpar
local discretionary_code=disccodes.discretionary
+local regular_code=disccodes.regular
+local automatic_code=disccodes.automatic
local ligature_code=glyphcodes.ligature
local privateattribute=attributes.private
local a_state=privateattribute('state')
-local a_markbase=privateattribute('markbase')
-local a_markmark=privateattribute('markmark')
-local a_markdone=privateattribute('markdone')
-local a_cursbase=privateattribute('cursbase')
-local a_curscurs=privateattribute('curscurs')
-local a_cursdone=privateattribute('cursdone')
-local a_kernpair=privateattribute('kernpair')
-local a_ligacomp=privateattribute('ligacomp')
+local a_cursbase=privateattribute('cursbase')
local injections=nodes.injections
local setmark=injections.setmark
local setcursive=injections.setcursive
local setkern=injections.setkern
local setpair=injections.setpair
-local markonce=true
+local resetinjection=injections.reset
+local copyinjection=injections.copy
+local setligaindex=injections.setligaindex
+local getligaindex=injections.getligaindex
local cursonce=true
-local kernonce=true
local fonthashes=fonts.hashes
local fontdata=fonthashes.identifiers
local otffeatures=fonts.constructors.newfeatures("otf")
@@ -9822,6 +11398,7 @@ local currentfont=false
local lookuptable=false
local anchorlookups=false
local lookuptypes=false
+local lookuptags=false
local handlers={}
local rlmode=0
local featurevalue=false
@@ -9866,29 +11443,32 @@ local function gref(n)
end
local function cref(kind,chainname,chainlookupname,lookupname,index)
if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
elseif chainname then
- return formatters["feature %a, chain %a"](kind,chainname)
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
else
return formatters["feature %a"](kind)
end
end
local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
end
local function copy_glyph(g)
local components=getfield(g,"components")
if components then
setfield(g,"components",nil)
local n=copy_node(g)
+ copyinjection(n,g)
setfield(g,"components",components)
return n
else
- return copy_node(g)
+ local n=copy_node(g)
+ copyinjection(n,g)
+ return n
end
end
local function markstoligature(kind,lookupname,head,start,stop,char)
@@ -9903,6 +11483,7 @@ local function markstoligature(kind,lookupname,head,start,stop,char)
if head==start then
head=base
end
+ resetinjection(base)
setfield(base,"char",char)
setfield(base,"subtype",ligature_code)
setfield(base,"components",start)
@@ -9936,6 +11517,7 @@ local function getcomponentindex(start)
end
local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
if start==stop and getchar(start)==char then
+ resetinjection(start)
setfield(start,"char",char)
return head,start
end
@@ -9947,6 +11529,7 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
if start==head then
head=base
end
+ resetinjection(base)
setfield(base,"char",char)
setfield(base,"subtype",ligature_code)
setfield(base,"components",start)
@@ -9971,9 +11554,9 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
baseindex=baseindex+componentindex
componentindex=getcomponentindex(start)
elseif not deletemarks then
- setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
head,current=insert_node_after(head,current,copy_node(start))
elseif trace_marks then
@@ -9985,9 +11568,9 @@ local function toligature(kind,lookupname,head,start,stop,char,markflag,discfoun
while start and getid(start)==glyph_code do
local char=getchar(start)
if marks[char] then
- setattr(start,a_ligacomp,baseindex+(getattr(start,a_ligacomp) or componentindex))
+ setligaindex(start,baseindex+getligaindex(start,componentindex))
if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getattr(start,a_ligacomp))
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
end
else
break
@@ -10001,6 +11584,7 @@ function handlers.gsub_single(head,start,kind,lookupname,replacement)
if trace_singles then
logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
end
+ resetinjection(start)
setfield(start,"char",replacement)
return head,start,true
end
@@ -10038,11 +11622,13 @@ end
local function multiple_glyphs(head,start,multiple,ignoremarks)
local nofmultiples=#multiple
if nofmultiples>0 then
+ resetinjection(start)
setfield(start,"char",multiple[1])
if nofmultiples>1 then
local sn=getnext(start)
for k=2,nofmultiples do
local n=copy_node(start)
+ resetinjection(n)
setfield(n,"char",multiple[k])
setfield(n,"next",sn)
setfield(n,"prev",start)
@@ -10068,6 +11654,7 @@ function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
end
+ resetinjection(start)
setfield(start,"char",choice)
else
if trace_alternatives then
@@ -10155,6 +11742,7 @@ function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
end
else
+ resetinjection(start)
setfield(start,"char",lig)
if trace_ligatures then
logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
@@ -10200,7 +11788,7 @@ function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10246,7 +11834,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
end
end
end
- local index=getattr(start,a_ligacomp)
+ local index=getligaindex(start)
local baseanchors=descriptions[basechar]
if baseanchors then
baseanchors=baseanchors.anchors
@@ -10260,7 +11848,7 @@ function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequ
if ma then
ba=ba[index]
if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -10294,10 +11882,10 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
local markchar=getchar(start)
if marks[markchar] then
local base=getprev(start)
- local slc=getattr(start,a_ligacomp)
+ local slc=getligaindex(start)
if slc then
while base do
- local blc=getattr(base,a_ligacomp)
+ local blc=getligaindex(base)
if blc and blc~=slc then
base=getprev(base)
else
@@ -10318,7 +11906,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10344,7 +11932,7 @@ function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence
return head,start,false
end
function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
- local alreadydone=cursonce and getattr(start,a_cursbase)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
if not alreadydone then
local done=false
local startchar=getchar(start)
@@ -10485,6 +12073,7 @@ function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,loo
if trace_singles then
logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
end
+ resetinjection(start)
setfield(start,"char",replacement)
return head,start,true
else
@@ -10516,6 +12105,7 @@ function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lo
if trace_singles then
logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
end
+ resetinjection(current)
setfield(current,"char",replacement)
end
end
@@ -10575,6 +12165,7 @@ function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext
if trace_alternatives then
logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
end
+ resetinjection(start)
setfield(start,"char",choice)
else
if trace_alternatives then
@@ -10704,7 +12295,7 @@ function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext
if al[anchor] then
local ma=markanchors[anchor]
if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
@@ -10758,7 +12349,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
end
end
end
- local index=getattr(start,a_ligacomp)
+ local index=getligaindex(start)
local baseanchors=descriptions[basechar].anchors
if baseanchors then
local baseanchors=baseanchors['baselig']
@@ -10770,7 +12361,7 @@ function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcon
if ma then
ba=ba[index]
if ba then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
if trace_marks then
logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
@@ -10799,63 +12390,63 @@ end
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
local markchar=getchar(start)
if marks[markchar] then
- local subtables=currentlookup.subtables
- local lookupname=subtables[1]
- local markanchors=lookuphash[lookupname]
- if markanchors then
- markanchors=markanchors[markchar]
- end
- if markanchors then
- local base=getprev(start)
- local slc=getattr(start,a_ligacomp)
- if slc then
- while base do
- local blc=getattr(base,a_ligacomp)
- if blc and blc~=slc then
- base=getprev(base)
- else
- break
- end
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=getprev(start)
+ local slc=getligaindex(start)
+ if slc then
+ while base do
+ local blc=getligaindex(base)
+ if blc and blc~=slc then
+ base=getprev(base)
+ else
+ break
end
end
- if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
- local basechar=getchar(base)
- local baseanchors=descriptions[basechar].anchors
+ end
+ if base and getid(base)==glyph_code and getfont(base)==currentfont and getsubtype(base)<256 then
+ local basechar=getchar(base)
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
if baseanchors then
- baseanchors=baseanchors['basemark']
- if baseanchors then
- local al=anchorlookups[lookupname]
- for anchor,ba in next,baseanchors do
- if al[anchor] then
- local ma=markanchors[anchor]
- if ma then
- local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head,start,true
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
+ return head,start,true
end
end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
elseif trace_bugs then
logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
end
return head,start,false
end
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone=cursonce and getattr(start,a_cursbase)
+ local alreadydone=cursonce and getprop(start,a_cursbase)
if not alreadydone then
local startchar=getchar(start)
local subtables=currentlookup.subtables
@@ -11131,15 +12722,8 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif f==2 then
- match=seq[1][32]
else
- for n=f-1,1 do
- if not seq[n][32] then
- match=false
- break
- end
- end
+ match=false
end
end
if match and s>l then
@@ -11189,15 +12773,8 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
break
end
end
- elseif s-l==1 then
- match=seq[s][32]
else
- for n=l+1,s do
- if not seq[n][32] then
- match=false
- break
- end
- end
+ match=false
end
end
end
@@ -11235,7 +12812,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
else
local i=1
- repeat
+ while true do
if skipped then
while true do
local char=getchar(start)
@@ -11272,11 +12849,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq
end
end
end
- if start then
+ if i>nofchainlookups then
+ break
+ elseif start then
start=getnext(start)
else
end
- until i>nofchainlookups
+ end
end
else
local replacements=ck[7]
@@ -11352,14 +12931,20 @@ local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
local features=sequence.features
if features then
- for kind,scripts in next,features do
- local valid=enabled[kind]
- if valid then
- local languages=scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ local order=sequence.order
+ if order then
+ for i=1,#order do
+ local kind=order[i]
+ local valid=enabled[kind]
+ if valid then
+ local scripts=features[kind]
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
end
end
+ else
end
end
return false
@@ -11386,12 +12971,12 @@ function otf.dataset(tfmdata,font)
}
rs[language]=rl
local sequences=tfmdata.resources.sequences
-for s=1,#sequences do
- local v=enabled and initialize(sequences[s],script,language,enabled)
- if v then
- rl[#rl+1]=v
- end
-end
+ for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+ end
end
return rl
end
@@ -11412,6 +12997,7 @@ local function featuresprocessor(head,font,attr)
anchorlookups=resources.lookup_to_anchor
lookuptable=resources.lookups
lookuptypes=resources.lookuptypes
+ lookuptags=resources.lookuptags
currentfont=font
rlmode=0
local sequences=resources.sequences
@@ -11488,9 +13074,9 @@ local function featuresprocessor(head,font,attr)
if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(start)]
@@ -11521,9 +13107,9 @@ local function featuresprocessor(head,font,attr)
setfield(prev,"next",next)
local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
else
- a=not attribute or getattr(prev,a_state)==attribute
+ a=not attribute or getprop(prev,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(prev)]
@@ -11545,9 +13131,9 @@ local function featuresprocessor(head,font,attr)
if getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
local lookupmatch=lookupcache[getchar(start)]
@@ -11638,9 +13224,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
if id==glyph_code and getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11682,9 +13268,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
setfield(prev,"next",next)
local a=getattr(prev,0)
if a then
- a=(a==attr) and (not attribute or getattr(prev,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(prev,a_state)==attribute)
else
- a=not attribute or getattr(prev,a_state)==attribute
+ a=not attribute or getprop(prev,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11714,9 +13300,9 @@ elseif typ=="gpos_single" or typ=="gpos_pair" then
if getfont(start)==font and getsubtype(start)<256 then
local a=getattr(start,0)
if a then
- a=(a==attr) and (not attribute or getattr(start,a_state)==attribute)
+ a=(a==attr) and (not attribute or getprop(start,a_state)==attribute)
else
- a=not attribute or getattr(start,a_state)==attribute
+ a=not attribute or getprop(start,a_state)==attribute
end
if a then
for i=1,ns do
@@ -11944,6 +13530,7 @@ local function prepare_contextchains(tfmdata)
local rawdata=tfmdata.shared.rawdata
local resources=rawdata.resources
local lookuphash=resources.lookuphash
+ local lookuptags=resources.lookuptags
local lookups=rawdata.lookups
if lookups then
for lookupname,lookupdata in next,rawdata.lookups do
@@ -11956,7 +13543,7 @@ local function prepare_contextchains(tfmdata)
if not validformat then
report_prepare("unsupported format %a",format)
elseif not validformat[lookuptype] then
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
else
local contexts=lookuphash[lookupname]
if not contexts then
@@ -12005,7 +13592,7 @@ local function prepare_contextchains(tfmdata)
else
end
else
- report_prepare("missing lookuptype for lookupname %a",lookupname)
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
end
end
end
@@ -12420,6 +14007,14 @@ local function packdata(data)
features[script]=pack_normal(feature)
end
end
+ local order=sequence.order
+ if order then
+ sequence.order=pack_indexed(order)
+ end
+ local markclass=sequence.markclass
+ if markclass then
+ sequence.markclass=pack_boolean(markclass)
+ end
end
end
local lookups=resources.lookups
@@ -12749,27 +14344,6 @@ local function unpackdata(data)
rule.replacements=tv
end
end
- local fore=rule.fore
- if fore then
- local tv=tables[fore]
- if tv then
- rule.fore=tv
- end
- end
- local back=rule.back
- if back then
- local tv=tables[back]
- if tv then
- rule.back=tv
- end
- end
- local names=rule.names
- if names then
- local tv=tables[names]
- if tv then
- rule.names=tv
- end
- end
local lookups=rule.lookups
if lookups then
local tv=tables[lookups]
@@ -12832,6 +14406,20 @@ local function unpackdata(data)
end
end
end
+ local order=feature.order
+ if order then
+ local tv=tables[order]
+ if tv then
+ feature.order=tv
+ end
+ end
+ local markclass=feature.markclass
+ if markclass then
+ local tv=tables[markclass]
+ if tv then
+ feature.markclass=tv
+ end
+ end
end
end
local lookups=resources.lookups
@@ -12876,6 +14464,7 @@ if otf.enhancers.register then
otf.enhancers.register("unpack",unpackdata)
end
otf.enhancers.unpack=unpackdata
+otf.enhancers.pack=packdata
end -- closure
@@ -13231,8 +14820,8 @@ function definers.read(specification,size,id)
elseif trace_defining and type(tfmdata)=="table" then
local properties=tfmdata.properties or {}
local parameters=tfmdata.parameters or {}
- report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
- properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ report_defining("using %a font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format or "unknown",id,properties.name,parameters.size,properties.encodingbytes,
properties.encodingname,properties.fullname,file.basename(properties.filename))
end
statistics.stoptiming(fonts)
@@ -13563,13 +15152,40 @@ local fonts=fonts
local nodes=nodes
local traverse_id=node.traverse_id
local glyph_code=nodes.nodecodes.glyph
-function nodes.handlers.characters(head)
+local disc_code=nodes.nodecodes.disc
+local ligaturing=node.ligaturing
+local kerning=node.kerning
+local basepass=true
+local function l_warning() texio.write_nl("warning: node.ligaturing called directly") l_warning=nil end
+local function k_warning() texio.write_nl("warning: node.kerning called directly") k_warning=nil end
+function node.ligaturing(...)
+ if basepass and l_warning then
+ l_warning()
+ end
+ return ligaturing(...)
+end
+function node.kerning(...)
+ if basepass and k_warning then
+ k_warning()
+ end
+ return kerning(...)
+end
+function nodes.handlers.setbasepass(v)
+ basepass=v
+end
+function nodes.handlers.nodepass(head)
local fontdata=fonts.hashes.identifiers
if fontdata then
- local usedfonts,done,prevfont={},false,nil
+ local usedfonts={}
+ local basefonts={}
+ local prevfont=nil
+ local basefont=nil
for n in traverse_id(glyph_code,head) do
local font=n.font
if font~=prevfont then
+ if basefont then
+ basefont[2]=n.prev
+ end
prevfont=font
local used=usedfonts[font]
if not used then
@@ -13580,18 +15196,57 @@ function nodes.handlers.characters(head)
local processors=shared.processes
if processors and #processors>0 then
usedfonts[font]=processors
- done=true
+ elseif basepass then
+ basefont={ n,nil }
+ basefonts[#basefonts+1]=basefont
+ end
+ end
+ end
+ end
+ end
+ end
+ for d in traverse_id(disc_code,head) do
+ local r=d.replace
+ if r then
+ for n in traverse_id(glyph_code,r) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ end
+ end
end
end
end
end
end
end
- if done then
+ if next(usedfonts) then
for font,processors in next,usedfonts do
for i=1,#processors do
- local h,d=processors[i](head,font,0)
- head,done=h or head,done or d
+ head=processors[i](head,font,0) or head
+ end
+ end
+ end
+ if basepass and #basefonts>0 then
+ for i=1,#basefonts do
+ local range=basefonts[i]
+ local start=range[1]
+ local stop=range[2]
+ if stop then
+ start,stop=ligaturing(start,stop)
+ start,stop=kerning(start,stop)
+ elseif start then
+ start=ligaturing(start)
+ start=kerning(start)
end
end
end
@@ -13600,13 +15255,27 @@ function nodes.handlers.characters(head)
return head,false
end
end
+function nodes.handlers.basepass(head)
+ if not basepass then
+ head=ligaturing(head)
+ head=kerning(head)
+ end
+ return head,true
+end
+local nodepass=nodes.handlers.nodepass
+local basepass=nodes.handlers.basepass
+local injectpass=nodes.injections.handler
+local protectpass=nodes.handlers.protectglyphs
function nodes.simple_font_handler(head)
- head=nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head=node.ligaturing(head)
- head=node.kerning(head)
- return head
+ if head then
+ head=nodepass(head)
+ head=injectpass(head)
+ head=basepass(head)
+ protectpass(head)
+ return head,true
+ else
+ return head,false
+ end
end
end -- closure
diff --git a/tex/generic/context/luatex/luatex-fonts-ota.lua b/tex/generic/context/luatex/luatex-fonts-ota.lua
new file mode 100644
index 000000000..f083fe09e
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-ota.lua
@@ -0,0 +1,459 @@
+if not modules then modules = { } end modules ['font-otx'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (analysing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+
+if not trackers then trackers = { register = function() end } end
+
+----- trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local fonts, nodes, node = fonts, nodes, node
+
+local allocate = utilities.storage.allocate
+
+local otf = fonts.handlers.otf
+
+local analyzers = fonts.analyzers
+local initializers = allocate()
+local methods = allocate()
+
+analyzers.initializers = initializers
+analyzers.methods = methods
+analyzers.useunicodemarks = false
+
+local a_state = attributes.private('state')
+
+local nuts = nodes.nuts
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local traverse_id = nuts.traverse_id
+local traverse_node_list = nuts.traverse
+local end_of_math = nuts.end_of_math
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+
+local fontdata = fonts.hashes.identifiers
+local categories = characters and characters.categories or { } -- sorry, only in context
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+--[[ldx--
+Analyzers run per script and/or language and are needed in order to
+process features right.
+--ldx]]--
+
+-- never use these numbers directly
+
+local s_init = 1 local s_rphf = 7
+local s_medi = 2 local s_half = 8
+local s_fina = 3 local s_pref = 9
+local s_isol = 4 local s_blwf = 10
+local s_mark = 5 local s_pstf = 11
+local s_rest = 6
+
+local states = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ mark = s_mark,
+ rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+local features = {
+ init = s_init,
+ medi = s_medi,
+ fina = s_fina,
+ isol = s_isol,
+ -- mark = s_mark,
+ -- rest = s_rest,
+ rphf = s_rphf,
+ half = s_half,
+ pref = s_pref,
+ blwf = s_blwf,
+ pstf = s_pstf,
+}
+
+analyzers.states = states
+analyzers.features = features
+
+-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
+-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
+
+function analyzers.setstate(head,font)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local descriptions = tfmdata.descriptions
+ local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font then
+ done = true
+ local char = getchar(current)
+ local d = descriptions[char]
+ if d then
+ if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
+ done = true
+ setprop(current,a_state,s_mark)
+ elseif n == 0 then
+ first, last, n = current, current, 1
+ setprop(current,a_state,s_init)
+ else
+ last, n = current, n+1
+ setprop(current,a_state,s_medi)
+ end
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ end
+ elseif id == disc_code then
+ -- always in the middle
+ setprop(current,a_state,s_medi)
+ last = current
+ else -- finish
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ first, last, n = nil, nil, 0
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first and first == last then
+ setprop(last,a_state,s_isol)
+ elseif last then
+ setprop(last,a_state,s_fina)
+ end
+ return head, done
+end
+
+-- in the future we will use language/script attributes instead of the
+-- font related value, but then we also need dynamic features which is
+-- somewhat slower; and .. we need a chain of them
+
+local function analyzeinitializer(tfmdata,value) -- attr
+ local script, language = otf.scriptandlanguage(tfmdata) -- attr
+ local action = initializers[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(tfmdata,value)
+ else
+ local action = action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ -- skip
+ elseif type(action) == "function" then
+ return action(head,font,attr)
+ else
+ action = action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head, false
+end
+
+registerotffeature {
+ name = "analyze",
+ description = "analysis of character classes",
+ default = true,
+ initializers = {
+ node = analyzeinitializer,
+ },
+ processors = {
+ position = 1,
+ node = analyzeprocessor,
+ }
+}
+
+-- latin
+
+methods.latn = analyzers.setstate
+
+-- This info eventually can go into char-def and we will have a state
+-- table for generic then (unicode recognized all states but in practice
+-- only has only
+--
+-- isolated : isol
+-- final : isol_fina
+-- medial : isol_fina_medi_init
+--
+-- so in practice, without analyzer it's rather useless info which is
+-- why having it in char-def makes only sense for special purposes (like)
+-- like tracing cq. visualizing.
+
+local tatweel = 0x0640
+local zwnj = 0x200C
+local zwj = 0x200D
+
+local isolated = { -- isol
+ [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
+ [0x0604] = true,
+ [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
+ [0x06DD] = true,
+ -- mandaic
+ [0x0856] = true, [0x0858] = true, [0x0857] = true,
+ -- n'ko
+ [0x07FA] = true,
+ -- also here:
+ [zwnj] = true,
+ -- 7
+ [0x08AD] = true,
+}
+
+local final = { -- isol_fina
+ [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
+ [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
+ [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
+ [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
+ [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
+ [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
+ [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
+ [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
+ [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
+ [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
+ [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
+ [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
+ [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
+ [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
+ [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
+ [0x0778] = true, [0x0779] = true,
+ [0x08AA] = true, [0x08AB] = true, [0x08AC] = true,
+ [0xFEF5] = true, [0xFEF7] = true, [0xFEF9] = true, [0xFEFB] = true,
+ -- syriac
+ [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
+ [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
+ [0x072C] = true, [0x071E] = true,
+ [0x072F] = true, [0x074D] = true,
+ -- mandaic
+ [0x0840] = true, [0x0849] = true, [0x0854] = true, [0x0846] = true,
+ [0x084F] = true,
+ -- 7
+ [0x08AE] = true, [0x08B1] = true, [0x08B2] = true,
+}
+
+local medial = { -- isol_fina_medi_init
+ [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
+ [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
+ [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
+ [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
+ [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
+ [0x0641] = true, [0x0642] = true, [0x0643] = true,
+ [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
+ [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
+ [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
+ [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
+ [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
+ [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
+ [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
+ [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
+ [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
+ [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
+ [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
+ [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
+ [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
+ [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
+ [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
+ [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
+ [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
+ [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
+ [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
+ [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
+ [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
+ [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
+ [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
+ [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
+ [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
+ [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
+ [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
+ [0x077E] = true, [0x077F] = true,
+ [0x08A0] = true, [0x08A2] = true, [0x08A4] = true, [0x08A5] = true,
+ [0x08A6] = true, [0x0620] = true, [0x08A8] = true, [0x08A9] = true,
+ [0x08A7] = true, [0x08A3] = true,
+ -- syriac
+ [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
+ [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
+ [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
+ [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
+ [0x0729] = true, [0x072B] = true, [0x072D] = true, [0x072E] = true,
+ [0x074E] = true, [0x074F] = true,
+ -- mandaic
+ [0x0841] = true, [0x0842] = true, [0x0843] = true, [0x0844] = true,
+ [0x0845] = true, [0x0847] = true, [0x0848] = true, [0x0855] = true,
+ [0x0851] = true, [0x084E] = true, [0x084D] = true, [0x084A] = true,
+ [0x084B] = true, [0x084C] = true, [0x0850] = true, [0x0852] = true,
+ [0x0853] = true,
+ -- n'ko
+ [0x07D7] = true, [0x07E8] = true, [0x07D9] = true, [0x07EA] = true,
+ [0x07CA] = true, [0x07DB] = true, [0x07CC] = true, [0x07DD] = true,
+ [0x07CE] = true, [0x07DF] = true, [0x07D4] = true, [0x07E5] = true,
+ [0x07E9] = true, [0x07E7] = true, [0x07E3] = true, [0x07E2] = true,
+ [0x07E0] = true, [0x07E1] = true, [0x07DE] = true, [0x07DC] = true,
+ [0x07D1] = true, [0x07DA] = true, [0x07D8] = true, [0x07D6] = true,
+ [0x07D2] = true, [0x07D0] = true, [0x07CF] = true, [0x07CD] = true,
+ [0x07CB] = true, [0x07D3] = true, [0x07E4] = true, [0x07D5] = true,
+ [0x07E6] = true,
+ -- also here:
+ [tatweel]= true, [zwj] = true,
+ -- 7
+ [0x08A1] = true, [0x08AF] = true, [0x08B0] = true,
+}
+
+local arab_warned = { }
+
+-- todo: gref
+
+local function warning(current,what)
+ local char = getchar(current)
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char] = true
+ end
+end
+
+-- potential optimization: local medial_final = table.merged(medial,final)
+
+local function finish(first,last)
+ if last then
+ if first == last then
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ else
+ local lc = getchar(last)
+ if medial[lc] or final[lc] then
+ -- if laststate == 1 or laststate == 2 or laststate == 4 then
+ setprop(last,a_state,s_fina)
+ else
+ warning(last,"fina")
+ setprop(last,a_state,s_error)
+ end
+ end
+ first, last = nil, nil
+ elseif first then
+ -- first and last are either both set so we never com here
+ local fc = getchar(first)
+ if medial[fc] or final[fc] then
+ setprop(first,a_state,s_isol)
+ else
+ warning(first,"isol")
+ setprop(first,a_state,s_error)
+ end
+ first = nil
+ end
+ return first, last
+end
+
+function methods.arab(head,font,attr)
+ local useunicodemarks = analyzers.useunicodemarks
+ local tfmdata = fontdata[font]
+ local marks = tfmdata.resources.marks
+ local first, last, current, done = nil, nil, head, false
+ current = tonut(current)
+ while current do
+ local id = getid(current)
+ if id == glyph_code and getfont(current) == font and getsubtype(current)<256 and not getprop(current,a_state) then
+ done = true
+ local char = getchar(current)
+ if marks[char] or (useunicodemarks and categories[char] == "mn") then
+ setprop(current,a_state,s_mark)
+ elseif isolated[char] then -- can be zwj or zwnj too
+ first, last = finish(first,last)
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ elseif not first then
+ if medial[char] then
+ setprop(current,a_state,s_init)
+ first, last = first or current, current
+ elseif final[char] then
+ setprop(current,a_state,s_isol)
+ first, last = nil, nil
+ else -- no arab
+ first, last = finish(first,last)
+ end
+ elseif medial[char] then
+ first, last = first or current, current
+ setprop(current,a_state,s_medi)
+ elseif final[char] then
+ if getprop(last,a_state) ~= s_init then
+ -- tricky, we need to check what last may be !
+ setprop(last,a_state,s_medi)
+ end
+ setprop(current,a_state,s_fina)
+ first, last = nil, nil
+ elseif char >= 0x0600 and char <= 0x06FF then -- needs checking
+ setprop(current,a_state,s_rest)
+ first, last = finish(first,last)
+ else -- no
+ first, last = finish(first,last)
+ end
+ else
+ if first or last then
+ first, last = finish(first,last)
+ end
+ if id == math_code then
+ current = end_of_math(current)
+ end
+ end
+ current = getnext(current)
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head, done
+end
+
+methods.syrc = methods.arab
+methods.mand = methods.arab
+methods.nko = methods.arab
+
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua
new file mode 100644
index 000000000..dd3aa6153
--- /dev/null
+++ b/tex/generic/context/luatex/luatex-fonts-otn.lua
@@ -0,0 +1,2893 @@
+if not modules then modules = { } end modules ['font-otn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- todo: looks like we have a leak somewhere (probably in ligatures)
+-- todo: copy attributes to disc
+
+-- this is a context version which can contain experimental code, but when we
+-- have serious patches we also need to change the other two font-otn files
+
+-- preprocessors = { "nodes" }
+
+-- anchor class : mark, mkmk, curs, mklg (todo)
+-- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
+
+-- this is still somewhat preliminary and it will get better in due time;
+-- much functionality could only be implemented thanks to the husayni font
+-- of Idris Samawi Hamid to who we dedicate this module.
+
+-- in retrospect it always looks easy but believe it or not, it took a lot
+-- of work to get proper open type support done: buggy fonts, fuzzy specs,
+-- special made testfonts, many skype sessions between taco, idris and me,
+-- torture tests etc etc ... unfortunately the code does not show how much
+-- time it took ...
+
+-- todo:
+--
+-- extension infrastructure (for usage out of context)
+-- sorting features according to vendors/renderers
+-- alternative loop quitters
+-- check cursive and r2l
+-- find out where ignore-mark-classes went
+-- default features (per language, script)
+-- handle positions (we need example fonts)
+-- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
+-- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
+-- remove some optimizations (when I have a faster machine)
+--
+-- maybe redo the lot some way (more context specific)
+
+--[[ldx--
+This module is a bit more split up that I'd like but since we also want to test
+with plain it has to be so. This module is part of
+and discussion about improvements and functionality mostly happens on the
+ mailing list.
+
+The specification of OpenType is kind of vague. Apart from a lack of a proper
+free specifications there's also the problem that Microsoft and Adobe
+may have their own interpretation of how and in what order to apply features.
+In general the Microsoft website has more detailed specifications and is a
+better reference. There is also some information in the FontForge help files.
+
+Because there is so much possible, fonts might contain bugs and/or be made to
+work with certain rederers. These may evolve over time which may have the side
+effect that suddenly fonts behave differently.
+
+After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
+implementation. Of course all errors are mine and of course the code can be
+improved. There are quite some optimizations going on here and processing speed
+is currently acceptable. Not all functions are implemented yet, often because I
+lack the fonts for testing. Many scripts are not yet supported either, but I will
+look into them as soon as users ask for it.
+
+Because there are different interpretations possible, I will extend the code
+with more (configureable) variants. I can also add hooks for users so that they can
+write their own extensions.
+
+Glyphs are indexed not by unicode but in their own way. This is because there is no
+relationship with unicode at all, apart from the fact that a font might cover certain
+ranges of characters. One character can have multiple shapes. However, at the
+ end we use unicode so and all extra glyphs are mapped into a private
+space. This is needed because we need to access them and has to include
+then in the output eventually.
+
+The raw table as it coms from gets reorganized in to fit out needs.
+In that table is packed (similar tables are shared) and cached on disk
+so that successive runs can use the optimized table (after loading the table is
+unpacked). The flattening code used later is a prelude to an even more compact table
+format (and as such it keeps evolving).
+
+This module is sparsely documented because it is a moving target. The table format
+of the reader changes and we experiment a lot with different methods for supporting
+features.
+
+As with the code, we may decide to store more information in the
+ table.
+
+Incrementing the version number will force a re-cache. We jump the number by one
+when there's a fix in the library or code that
+results in different tables.
+--ldx]]--
+
+-- action handler chainproc chainmore comment
+--
+-- gsub_single ok ok ok
+-- gsub_multiple ok ok not implemented yet
+-- gsub_alternate ok ok not implemented yet
+-- gsub_ligature ok ok ok
+-- gsub_context ok --
+-- gsub_contextchain ok --
+-- gsub_reversecontextchain ok --
+-- chainsub -- ok
+-- reversesub -- ok
+-- gpos_mark2base ok ok
+-- gpos_mark2ligature ok ok
+-- gpos_mark2mark ok ok
+-- gpos_cursive ok untested
+-- gpos_single ok ok
+-- gpos_pair ok ok
+-- gpos_context ok --
+-- gpos_contextchain ok --
+--
+-- todo: contextpos and contextsub and class stuff
+--
+-- actions:
+--
+-- handler : actions triggered by lookup
+-- chainproc : actions triggered by contextual lookup
+-- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
+--
+-- remark: the 'not implemented yet' variants will be done when we have fonts that use them
+-- remark: we need to check what to do with discretionaries
+
+-- We used to have independent hashes for lookups but as the tags are unique
+-- we now use only one hash. If needed we can have multiple again but in that
+-- case I will probably prefix (i.e. rename) the lookups in the cached font file.
+
+-- Todo: make plugin feature that operates on char/glyphnode arrays
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local random = math.random
+local formatters = string.formatters
+
+local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
+
+local registertracker = trackers.register
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
+
+local report_direct = logs.reporter("fonts","otf direct")
+local report_subchain = logs.reporter("fonts","otf subchain")
+local report_chain = logs.reporter("fonts","otf chain")
+local report_process = logs.reporter("fonts","otf process")
+local report_prepare = logs.reporter("fonts","otf prepare")
+local report_warning = logs.reporter("fonts","otf warning")
+local report_run = logs.reporter("fonts","otf run")
+
+registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+
+registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+
+local nuts = nodes.nuts
+local tonode = nuts.tonode
+local tonut = nuts.tonut
+
+local getfield = nuts.getfield
+local setfield = nuts.setfield
+local getnext = nuts.getnext
+local getprev = nuts.getprev
+local getid = nuts.getid
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getprop = nuts.getprop
+local setprop = nuts.setprop
+local getfont = nuts.getfont
+local getsubtype = nuts.getsubtype
+local getchar = nuts.getchar
+
+local insert_node_before = nuts.insert_before
+local insert_node_after = nuts.insert_after
+local delete_node = nuts.delete
+local remove_node = nuts.remove
+local copy_node = nuts.copy
+local copy_node_list = nuts.copy_list
+local find_node_tail = nuts.tail
+local flush_node_list = nuts.flush_list
+local free_node = nuts.free
+local end_of_math = nuts.end_of_math
+local traverse_nodes = nuts.traverse
+local traverse_id = nuts.traverse_id
+
+local setmetatableindex = table.setmetatableindex
+
+local zwnj = 0x200C
+local zwj = 0x200D
+local wildcard = "*"
+local default = "dflt"
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local glyphcodes = nodes.glyphcodes
+local disccodes = nodes.disccodes
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local dir_code = whatcodes.dir
+local localpar_code = whatcodes.localpar
+
+local discretionary_code = disccodes.discretionary
+local regular_code = disccodes.regular
+local automatic_code = disccodes.automatic
+
+local ligature_code = glyphcodes.ligature
+
+local privateattribute = attributes.private
+
+-- Something is messed up: we have two mark / ligature indices, one at the injection
+-- end and one here ... this is based on KE's patches but there is something fishy
+-- there as I'm pretty sure that for husayni we need some connection (as it's much
+-- more complex than an average font) but I need proper examples of all cases, not
+-- of only some.
+
+local a_state = privateattribute('state')
+local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
+
+local injections = nodes.injections
+local setmark = injections.setmark
+local setcursive = injections.setcursive
+local setkern = injections.setkern
+local setpair = injections.setpair
+local resetinjection = injections.reset
+local copyinjection = injections.copy
+local setligaindex = injections.setligaindex
+local getligaindex = injections.getligaindex
+
+local cursonce = true
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local onetimemessage = fonts.loggers.onetimemessage or function() end
+
+otf.defaultnodealternate = "none" -- first last
+
+-- we share some vars here, after all, we have no nested lookups and less code
+
+local tfmdata = false
+local characters = false
+local descriptions = false
+local resources = false
+local marks = false
+local currentfont = false
+local lookuptable = false
+local anchorlookups = false
+local lookuptypes = false
+local lookuptags = false
+local handlers = { }
+local rlmode = 0
+local featurevalue = false
+
+-- head is always a whatsit so we can safely assume that head is not changed
+
+-- we use this for special testing and documentation
+
+local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+
+local function logwarning(...)
+ report_direct(...)
+end
+
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return ""
+ end
+end
+
+local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
+end
+
+-- We can assume that languages that use marks are not hyphenated. We can also assume
+-- that at most one discretionary is present.
+
+-- We do need components in funny kerning mode but maybe I can better reconstruct then
+-- as we do have the font components info available; removing components makes the
+-- previous code much simpler. Also, later on copying and freeing becomes easier.
+-- However, for arabic we need to keep them around for the sake of mark placement
+-- and indices.
+
+local function copy_glyph(g) -- next and prev are untouched !
+ local components = getfield(g,"components")
+ if components then
+ setfield(g,"components",nil)
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ setfield(g,"components",components)
+ return n
+ else
+ local n = copy_node(g)
+ copyinjection(n,g) -- we need to preserve the lig indices
+ return n
+ end
+end
+
+--
+
+
+-- start is a mark and we need to keep that one
+
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start == stop and getchar(start) == char then
+ return head, start
+ else
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if head == start then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start)
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ return head, base
+ end
+end
+
+-- The next code is somewhat complicated by the fact that some fonts can have ligatures made
+-- from ligatures that themselves have marks. This was identified by Kai in for instance
+-- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
+-- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
+-- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
+-- third component.
+
+local function getcomponentindex(start)
+ if getid(start) ~= glyph_code then
+ return 0
+ elseif getsubtype(start) == ligature_code then
+ local i = 0
+ local components = getfield(start,"components")
+ while components do
+ i = i + getcomponentindex(components)
+ components = getnext(components)
+ end
+ return i
+ elseif not marks[getchar(start)] then
+ return 1
+ else
+ return 0
+ end
+end
+
+-- eventually we will do positioning in an other way (needs addional w/h/d fields)
+
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
+ if start == stop and getchar(start) == char then
+ resetinjection(start)
+ setfield(start,"char",char)
+ return head, start
+ end
+ local prev = getprev(start)
+ local next = getnext(stop)
+ setfield(start,"prev",nil)
+ setfield(stop,"next",nil)
+ local base = copy_glyph(start)
+ if start == head then
+ head = base
+ end
+ resetinjection(base)
+ setfield(base,"char",char)
+ setfield(base,"subtype",ligature_code)
+ setfield(base,"components",start) -- start can have components
+ if prev then
+ setfield(prev,"next",base)
+ end
+ if next then
+ setfield(next,"prev",base)
+ end
+ setfield(base,"next",next)
+ setfield(base,"prev",prev)
+ if not discfound then
+ local deletemarks = markflag ~= "mark"
+ local components = start
+ local baseindex = 0
+ local componentindex = 0
+ local head = base
+ local current = base
+ -- first we loop over the glyphs in start .. stop
+ while start do
+ local char = getchar(start)
+ if not marks[char] then
+ baseindex = baseindex + componentindex
+ componentindex = getcomponentindex(start)
+ elseif not deletemarks then -- quite fishy
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
+ elseif trace_marks then
+ logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
+ end
+ start = getnext(start)
+ end
+ -- we can have one accent as part of a lookup and another following
+ -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
+ local start = getnext(current)
+ while start and getid(start) == glyph_code do
+ local char = getchar(start)
+ if marks[char] then
+ setligaindex(start,baseindex + getligaindex(start,componentindex))
+ if trace_marks then
+ logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
+ end
+ else
+ break
+ end
+ start = getnext(start)
+ end
+ end
+ return head, base
+end
+
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+end
+
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n = #alternatives
+ if value == "random" then
+ local r = random(1,n)
+ return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value == "first" then
+ return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value == "last" then
+ return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value = tonumber(value)
+ if type(value) ~= "number" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value > n then
+ local defaultalt = otf.defaultnodealternate
+ if defaultalt == "first" then
+ return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt == "last" then
+ return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value == 0 then
+ return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value < 1 then
+ return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+
+local function multiple_glyphs(head,start,multiple,ignoremarks)
+ local nofmultiples = #multiple
+ if nofmultiples > 0 then
+ resetinjection(start)
+ setfield(start,"char",multiple[1])
+ if nofmultiples > 1 then
+ local sn = getnext(start)
+ for k=2,nofmultiples do -- todo: use insert_node
+-- untested:
+--
+-- while ignoremarks and marks[getchar(sn)] then
+-- local sn = getnext(sn)
+-- end
+ local n = copy_node(start) -- ignore components
+ resetinjection(n)
+ setfield(n,"char",multiple[k])
+ setfield(n,"next",sn)
+ setfield(n,"prev",start)
+ if sn then
+ setfield(sn,"prev",n)
+ end
+ setfield(start,"next",n)
+ start = n
+ end
+ end
+ return head, start, true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(getchar(start)))
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
+ end
+ end
+ return head, start, true
+end
+
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple,sequence.flags[1])
+end
+
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s, stop, discfound = getnext(start), nil, false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ while s do
+ local id = getid(s)
+ if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
+ local lg = ligature[getchar(s)]
+ if lg then
+ stop = s
+ ligature = lg
+ s = getnext(s)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig = ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head, start = markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head, start, true
+ else
+ -- ok, goto next lookup
+ end
+ end
+ else
+ local skipmark = sequence.flags[1]
+ while s do
+ local id = getid(s)
+ if id == glyph_code and getsubtype(s)<256 then
+ if getfont(s) == currentfont then
+ local char = getchar(s)
+ if skipmark and marks[char] then
+ s = getnext(s)
+ else
+ local lg = ligature[char]
+ if lg then
+ stop = s
+ ligature = lg
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id == disc_code then
+ discfound = true
+ s = getnext(s)
+ else
+ break
+ end
+ end
+ local lig = ligature.ligature
+ if lig then
+ if stop then
+ if trace_ligatures then
+ local stopchar = getchar(stop)
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
+ else
+ head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ else
+ -- weird but happens (in some arabic font)
+ resetinjection(start)
+ setfield(start,"char",lig)
+ if trace_ligatures then
+ logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
+ end
+ end
+ return head, start, true
+ else
+ -- weird but happens
+ end
+ end
+ return head, start, false
+end
+
+--[[ldx--
+We get hits on a mark, but we're not sure if the it has to be applied so
+we need to explicitly test for basechar, baselig and basemark entries.
+--ldx]]--
+
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ -- check chainpos variant
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local index = getligaindex(start)
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ else
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
+ end
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar]
+ if baseanchors then
+ baseanchors = baseanchors.anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
+ local alreadydone = cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local done = false
+ local startchar = getchar(start)
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+end
+
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar = getchar(start)
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head, start, false
+end
+
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
+ -- todo: kerns in components of ligatures
+ local snext = getnext(start)
+ if not snext then
+ return head, start, false
+ else
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ local lookuptype = lookuptypes[lookupname]
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then -- probably not needed
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else -- wrong ... position has different entries
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ -- local a, b = krn[2], krn[6]
+ -- if a and a ~= 0 then
+ -- local k = setkern(snext,factor,rlmode,a)
+ -- if trace_kerns then
+ -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ -- end
+ -- end
+ -- if b and b ~= 0 then
+ -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
+ -- end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+end
+
+--[[ldx--
+I will implement multiple chain replacements once I run into a font that uses
+it. It's not that complex to handle.
+--ldx]]--
+
+local chainmores = { }
+local chainprocs = { }
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+
+local logwarning = report_subchain
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+
+local logwarning = report_chain
+
+-- We could share functions but that would lead to extra function calls with many
+-- arguments, redundant tests and confusing messages.
+
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head, start, false
+end
+
+-- The reversesub is a special case, which is why we need to store the replacements
+-- in a bit weird way. There is no lookup and the replacement comes from the lookup
+-- itself. It is meant mostly for dealing with Urdu.
+
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char = getchar(start)
+ local replacement = replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ resetinjection(start)
+ setfield(start,"char",replacement)
+ return head, start, true
+ else
+ return head, start, false
+ end
+end
+
+--[[ldx--
+This chain stuff is somewhat tricky since we can have a sequence of actions to be
+applied: single, alternate, multiple or ligature where ligature can be an invalid
+one in the sense that it will replace multiple by one but not neccessary one that
+looks like the combination (i.e. it is the counterpart of multiple then). For
+example, the following is valid:
+
+
+xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
+
+
+Therefore we we don't really do the replacement here already unless we have the
+single lookup case. The efficiency of the replacements can be improved by deleting
+as less as needed but that would also make the code even more messy.
+--ldx]]--
+
+-- local function delete_till_stop(head,start,stop,ignoremarks) -- keeps start
+-- local n = 1
+-- if start == stop then
+-- -- done
+-- elseif ignoremarks then
+-- repeat -- start x x m x x stop => start m
+-- local next = getnext(start)
+-- if not marks[getchar(next)] then
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- end
+-- n = n + 1
+-- until next == stop
+-- else -- start x x x stop => start
+-- repeat
+-- local next = getnext(start)
+-- local components = getfield(next,"components")
+-- if components then -- probably not needed
+-- flush_node_list(components)
+-- end
+-- head = delete_node(head,next)
+-- n = n + 1
+-- until next == stop
+-- end
+-- return head, n
+-- end
+
+--[[ldx--
+Here we replace start by a single variant, First we delete the rest of the
+match.
+--ldx]]--
+
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ -- todo: marks ?
+ local current = start
+ local subtables = currentlookup.subtables
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if getid(current) == glyph_code then
+ local currentchar = getchar(current)
+ local lookupname = subtables[1] -- only 1
+ local replacement = lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement = replacement[currentchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ resetinjection(current)
+ setfield(current,"char",replacement)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_single = chainprocs.gsub_single
+
+--[[ldx--
+Here we replace start by a sequence of new glyphs. First we delete the rest of
+the match.
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ -- local head, n = delete_till_stop(head,start,stop)
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local replacements = lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements = replacements[startchar]
+ if not replacements or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_multiple = chainprocs.gsub_multiple
+
+--[[ldx--
+Here we replace start by new glyph. First we delete the rest of the match.
+--ldx]]--
+
+-- char_1 mark_1 -> char_x mark_1 (ignore marks)
+-- char_1 mark_1 -> char_x
+
+-- to be checked: do we always have just one glyph?
+-- we can also have alternates for marks
+-- marks come last anyway
+-- are there cases where we need to delete the mark
+
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current = start
+ local subtables = currentlookup.subtables
+ local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if getid(current) == glyph_code then -- is this check needed?
+ local currentchar = getchar(current)
+ local lookupname = subtables[1]
+ local alternatives = lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives = alternatives[currentchar]
+ if alternatives then
+ local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ resetinjection(start)
+ setfield(start,"char",choice)
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head, start, true
+ elseif current == stop then
+ break
+ else
+ current = getnext(current)
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gsub_alternate = chainprocs.gsub_alternate
+
+--[[ldx--
+When we replace ligatures we use a helper that handles the marks. I might change
+this function (move code inline and handle the marks by a separate function). We
+assume rather stupid ligatures (no complex disc nodes).
+--ldx]]--
+
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local ligatures = lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures = ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s = getnext(start)
+ local discfound = false
+ local last = stop
+ local nofreplacements = 0
+ local skipmark = currentlookup.flags[1]
+ while s do
+ local id = getid(s)
+ if id == disc_code then
+ s = getnext(s)
+ discfound = true
+ else
+ local schar = getchar(s)
+ if skipmark and marks[schar] then -- marks
+ s = getnext(s)
+ else
+ local lg = ligatures[schar]
+ if lg then
+ ligatures, last, nofreplacements = lg, s, nofreplacements + 1
+ if s == stop then
+ break
+ else
+ s = getnext(s)
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2 = ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop = last
+ end
+ if trace_ligatures then
+ if start == stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
+ end
+ end
+ head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head, start, true, nofreplacements
+ elseif trace_bugs then
+ if start == stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
+ end
+ end
+ end
+ end
+ return head, start, false, 0
+end
+
+chainmores.gsub_ligature = chainprocs.gsub_ligature
+
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head, start, false
+ end
+ end
+ end
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['basechar']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [optional marks] [start=mark]
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ local basechar = getchar(base)
+ if marks[basechar] then
+ while true do
+ base = getprev(base)
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
+ basechar = getchar(base)
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head, start, false
+ end
+ end
+ end
+ -- todo: like marks a ligatures hash
+ local index = getligaindex(start)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors = baseanchors['baselig']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ ba = ba[index]
+ if ba then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar = getchar(start)
+ if marks[markchar] then
+ -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local markanchors = lookuphash[lookupname]
+ if markanchors then
+ markanchors = markanchors[markchar]
+ end
+ if markanchors then
+ local base = getprev(start) -- [glyph] [basemark] [start=mark]
+ local slc = getligaindex(start)
+ if slc then -- a rather messy loop ... needs checking with husayni
+ while base do
+ local blc = getligaindex(base)
+ if blc and blc ~= slc then
+ base = getprev(base)
+ else
+ break
+ end
+ end
+ end
+ if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
+ local basechar = getchar(base)
+ local baseanchors = descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors = baseanchors['basemark']
+ if baseanchors then
+ local al = anchorlookups[lookupname]
+ for anchor,ba in next, baseanchors do
+ if al[anchor] then
+ local ma = markanchors[anchor]
+ if ma then
+ local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head, start, true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone = cursonce and getprop(start,a_cursbase)
+ if not alreadydone then
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local exitanchors = lookuphash[lookupname]
+ if exitanchors then
+ exitanchors = exitanchors[startchar]
+ end
+ if exitanchors then
+ local done = false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt = getnext(start)
+ while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
+ local nextchar = getchar(nxt)
+ if marks[nextchar] then
+ -- should not happen (maybe warning)
+ nxt = getnext(nxt)
+ else
+ local entryanchors = descriptions[nextchar]
+ if entryanchors then
+ entryanchors = entryanchors.anchors
+ if entryanchors then
+ entryanchors = entryanchors['centry']
+ if entryanchors then
+ local al = anchorlookups[lookupname]
+ for anchor, entry in next, entryanchors do
+ if al[anchor] then
+ local exit = exitanchors[anchor]
+ if exit then
+ local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done = true
+ break
+ end
+ end
+ end
+ end
+ end
+ elseif trace_bugs then
+ -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head, start, done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
+ end
+ return head, start, false
+ end
+ end
+ return head, start, false
+end
+
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ -- untested .. needs checking for the new model
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar] -- needed ?
+ if kerns then
+ local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_single = chainprocs.gpos_single -- okay?
+
+-- when machines become faster i will make a shared function
+
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext = getnext(start)
+ if snext then
+ local startchar = getchar(start)
+ local subtables = currentlookup.subtables
+ local lookupname = subtables[1]
+ local kerns = lookuphash[lookupname]
+ if kerns then
+ kerns = kerns[startchar]
+ if kerns then
+ local lookuptype = lookuptypes[lookupname]
+ local prev, done = start, false
+ local factor = tfmdata.parameters.factor
+ while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
+ local nextchar = getchar(snext)
+ local krn = kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev = snext
+ snext = getnext(snext)
+ else
+ if not krn then
+ -- skip
+ elseif type(krn) == "table" then
+ if lookuptype == "pair" then
+ local a, b = krn[2], krn[3]
+ if a and #a > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b > 0 then
+ local startchar = getchar(start)
+ local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a, b = krn[2], krn[6]
+ if a and a ~= 0 then
+ local k = setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ end
+ if b and b ~= 0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done = true
+ elseif krn ~= 0 then
+ local k = setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
+ end
+ done = true
+ end
+ break
+ end
+ end
+ return head, start, done
+ end
+ end
+ end
+ return head, start, false
+end
+
+chainmores.gpos_pair = chainprocs.gpos_pair -- okay?
+
+-- what pointer to return, spec says stop
+-- to be discussed ... is bidi changer a space?
+-- elseif char == zwnj and sequence[n][32] then -- brrr
+
+-- somehow l or f is global
+-- we don't need to pass the currentcontext, saves a bit
+-- make a slow variant then can be activated but with more tracing
+
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+
+local quit_on_no_replacement = true
+
+directives.register("otf.chain.quitonnoreplacement",function(value) -- maybe per font
+ quit_on_no_replacement = value
+end)
+
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
+ local flags = sequence.flags
+ local done = false
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
+ local markclass = sequence.markclass -- todo, first we need a proper test
+ local skipped = false
+ for k=1,#contexts do
+ local match = true
+ local current = start
+ local last = start
+ local ck = contexts[k]
+ local seq = ck[3]
+ local s = #seq
+ -- f..l = mid string
+ if s == 1 then
+ -- never happens
+ match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
+ else
+ -- maybe we need a better space check (maybe check for glue or category or combination)
+ -- we cannot optimize for n=2 because there can be disc nodes
+ local f, l = ck[4], ck[5]
+ -- current match
+ if f == 1 and f == l then -- current only
+ -- already a hit
+ -- match = true
+ else -- before/current/after | before/current | current/after
+ -- no need to test first hit (to be optimized)
+ if f == l then -- new, else last out of sync (f is > 1)
+ -- match = true
+ else
+ local n = f + 1
+ last = getnext(last)
+ while n <= l do
+ if last then
+ local id = getid(last)
+ if id == glyph_code then
+ if getfont(last) == currentfont and getsubtype(last)<256 then
+ local char = getchar(last)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last = getnext(last)
+ elseif seq[n][char] then
+ if n < l then
+ last = getnext(last)
+ end
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ last = getnext(last)
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ end
+ end
+ end
+ -- before
+ if match and f > 1 then
+ local prev = getprev(start)
+ if prev then
+ local n = f-1
+ while n >= 1 do
+ if prev then
+ local id = getid(prev)
+ if id == glyph_code then
+ if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
+ local char = getchar(prev)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then
+ n = n -1
+ else
+ match = false
+ break
+ end
+ prev = getprev(prev)
+ elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
+ n = n -1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ -- after
+ if match and s > l then
+ local current = last and getnext(last)
+ if current then
+ -- removed optimization for s-l == 1, we have to deal with marks anyway
+ local n = l + 1
+ while n <= s do
+ if current then
+ local id = getid(current)
+ if id == glyph_code then
+ if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
+ local char = getchar(current)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ skipped = true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ else
+ match = false
+ break
+ end
+ elseif id == disc_code then
+ -- skip 'm
+ elseif seq[n][32] then -- brrr
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ current = getnext(current)
+ elseif seq[n][32] then
+ n = n + 1
+ else
+ match = false
+ break
+ end
+ end
+ else
+ match = false
+ end
+ end
+ end
+ if match then
+ -- ck == currentcontext
+ if trace_contexts then
+ local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
+ local char = getchar(start)
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups = ck[6]
+ if chainlookups then
+ local nofchainlookups = #chainlookups
+ -- we can speed this up if needed
+ if nofchainlookups == 1 then
+ local chainlookupname = chainlookups[1]
+ local chainlookup = lookuptable[chainlookupname]
+ if chainlookup then
+ local cp = chainprocs[chainlookup.type]
+ if cp then
+ local ok
+ head, start, ok = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ if ok then
+ done = true
+ end
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else -- shouldn't happen
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i = 1
+ while true do
+ if skipped then
+ while true do
+ local char = getchar(start)
+ local ccd = descriptions[char]
+ if ccd then
+ local class = ccd.class
+ if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ start = getnext(start)
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname = chainlookups[i]
+ local chainlookup = lookuptable[chainlookupname]
+ if not chainlookup then
+ -- okay, n matches, < n replacements
+ i = i + 1
+ else
+ local cp = chainmores[chainlookup.type]
+ if not cp then
+ -- actually an error
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ i = i + 1
+ else
+ local ok, n
+ head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ -- messy since last can be changed !
+ if ok then
+ done = true
+ -- skip next one(s) if ligature
+ i = i + (n or 1)
+ else
+ i = i + 1
+ end
+ end
+ end
+ if i > nofchainlookups then
+ break
+ elseif start then
+ start = getnext(start)
+ else
+ -- weird
+ end
+ end
+ end
+ else
+ local replacements = ck[7]
+ if replacements then
+ head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ else
+ done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head, start, done
+end
+
+-- Because we want to keep this elsewhere (an because speed is less an issue) we
+-- pass the font id so that the verbose variant can access the relevant helper tables.
+
+local verbose_handle_contextchain = function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+
+otf.chainhandlers = {
+ normal = normal_handle_contextchain,
+ verbose = verbose_handle_contextchain,
+}
+
+function otf.setcontextchain(method)
+ if not method or method == "normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then -- no need for a message while making the format
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain = normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler = otf.chainhandlers[method]
+ handlers.contextchain = function(...)
+ return handler(currentfont,...) -- hm, get rid of ...
+ end
+ end
+ handlers.gsub_context = handlers.contextchain
+ handlers.gsub_contextchain = handlers.contextchain
+ handlers.gsub_reversecontextchain = handlers.contextchain
+ handlers.gpos_contextchain = handlers.contextchain
+ handlers.gpos_context = handlers.contextchain
+end
+
+otf.setcontextchain()
+
+local missing = { } -- we only report once
+
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+
+local logwarning = report_process
+
+local function report_missing_cache(typ,lookup)
+ local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
+ local t = f[typ] if not t then t = { } f[typ] = t end
+ if not t[lookup] then
+ t[lookup] = true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+
+local resolved = { } -- we only resolve a font,script,language pair once
+
+-- todo: pass all these 'locals' in a table
+
+local lookuphashes = { }
+
+setmetatableindex(lookuphashes, function(t,font)
+ local lookuphash = fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash = false
+ end
+ t[font] = lookuphash
+ return lookuphash
+end)
+
+-- fonts.hashes.lookups = lookuphashes
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local function initialize(sequence,script,language,enabled)
+ local features = sequence.features
+ if features then
+ local order = sequence.order
+ if order then
+ for i=1,#order do --
+ local kind = order[i] --
+ local valid = enabled[kind]
+ if valid then
+ local scripts = features[kind] --
+ local languages = scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ else
+ -- can't happen
+ end
+ end
+ return false
+end
+
+function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
+ local shared = tfmdata.shared
+ local properties = tfmdata.properties
+ local language = properties.language or "dflt"
+ local script = properties.script or "dflt"
+ local enabled = shared.features
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = {
+ -- indexed but we can also add specific data by key
+ }
+ rs[language] = rl
+ local sequences = tfmdata.resources.sequences
+ for s=1,#sequences do
+ local v = enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1] = v
+ end
+ end
+ end
+ return rl
+end
+
+-- elseif id == glue_code then
+-- if p[5] then -- chain
+-- local pc = pp[32]
+-- if pc then
+-- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
+-- if ok then
+-- done = true
+-- end
+-- if start then start = getnext(start) end
+-- else
+-- start = getnext(start)
+-- end
+-- else
+-- start = getnext(start)
+-- end
+
+-- there will be a new direction parser (pre-parsed etc)
+
+-- less bytecode: 290 -> 254
+--
+-- attr = attr or false
+--
+-- local a = getattr(start,0)
+-- if (a == attr and (not attribute or getprop(start,a_state) == attribute)) or (not attribute or getprop(start,a_state) == attribute) then
+-- -- the action
+-- end
+
+local function featuresprocessor(head,font,attr)
+
+ local lookuphash = lookuphashes[font] -- we can also check sequences here
+
+ if not lookuphash then
+ return head, false
+ end
+
+ head = tonut(head)
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions
+ characters = tfmdata.characters
+ resources = tfmdata.resources
+
+ marks = resources.marks
+ anchorlookups = resources.lookup_to_anchor
+ lookuptable = resources.lookups
+ lookuptypes = resources.lookuptypes
+ lookuptags = resources.lookuptags
+
+ currentfont = font
+ rlmode = 0
+
+ local sequences = resources.sequences
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+
+ local dirstack = { } -- could move outside function
+
+ -- We could work on sub start-stop ranges instead but I wonder if there is that
+ -- much speed gain (experiments showed that it made not much sense) and we need
+ -- to keep track of directions anyway. Also at some point I want to play with
+ -- font interactions and then we do need the full sweeps.
+
+ -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
+ -- so that multiple cases are also covered.)
+
+ -- todo: retain prev
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ featurevalue = dataset[1] -- todo: pass to function instead of using a global
+
+ local sequence = dataset[5] -- sequences[s] -- also dataset[5]
+ local rlparmode = 0
+ local topstack = 0
+ local success = false
+ local attribute = dataset[2]
+ local chain = dataset[3] -- sequence.chain or 0
+ local typ = sequence.type
+ local subtables = sequence.subtables
+ if chain < 0 then
+ -- this is a limited case, no special treatments like 'init' etc
+ local handler = handlers[typ]
+ -- we need to get rid of this slide! probably no longer needed in latest luatex
+ local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = a == attr
+ else
+ a = true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getprev(start) end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ else
+ start = getprev(start)
+ end
+ end
+ else
+ local handler = handlers[typ]
+ local ns = #subtables
+ local start = head -- local ?
+ rlmode = 0 -- to be checked ?
+ if ns == 1 then -- happens often
+ local lookupname = subtables[1]
+ local lookupcache = lookuphash[lookupname]
+ if not lookupcache then -- also check for empty cache
+ report_missing_cache(typ,lookupname)
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- sequence kan weg
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ done = true
+ success = true
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- sequence kan weg
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success = true
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = getnext(start)
+ elseif id == whatsit_code then -- will be function
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ -- one might wonder if the par dir should be looked at, so we might as well drop the next line
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+ else
+
+ local function subrun(start)
+ -- mostly for gsub, gpos would demand a more clever approach
+ local head = start
+ local done = false
+ while start do
+ local id = getid(start)
+ if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ end
+ if done then
+ success = true
+ return head
+ end
+ end
+
+ local function kerndisc(disc) -- we can assume that prev and next are glyphs
+ local prev = getprev(disc)
+ local next = getnext(disc)
+ if prev and next then
+ setfield(prev,"next",next)
+ -- setfield(next,"prev",prev)
+ local a = getattr(prev,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
+ else
+ a = not attribute or getprop(prev,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(prev)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local h, d, ok = handler(head,prev,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ done = true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ end
+ setfield(prev,"next",disc)
+ -- setfield(next,"prev",disc)
+ end
+ return next
+ end
+
+ while start do
+ local id = getid(start)
+ if id == glyph_code then
+ if getfont(start) == font and getsubtype(start) < 256 then
+ local a = getattr(start,0)
+ if a then
+ a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ else
+ a = not attribute or getprop(start,a_state) == attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname = subtables[i]
+ local lookupcache = lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch = lookupcache[getchar(start)]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start = getnext(start) end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == disc_code then
+ -- mostly for gsub
+ if getsubtype(start) == discretionary_code then
+ local pre = getfield(start,"pre")
+ if pre then
+ local new = subrun(pre)
+ if new then setfield(start,"pre",new) end
+ end
+ local post = getfield(start,"post")
+ if post then
+ local new = subrun(post)
+ if new then setfield(start,"post",new) end
+ end
+ local replace = getfield(start,"replace")
+ if replace then
+ local new = subrun(replace)
+ if new then setfield(start,"replace",new) end
+ end
+elseif typ == "gpos_single" or typ == "gpos_pair" then
+ kerndisc(start)
+ end
+ start = getnext(start)
+ elseif id == whatsit_code then
+ local subtype = getsubtype(start)
+ if subtype == dir_code then
+ local dir = getfield(start,"dir")
+ if dir == "+TRT" or dir == "+TLT" then
+ topstack = topstack + 1
+ dirstack[topstack] = dir
+ elseif dir == "-TRT" or dir == "-TLT" then
+ topstack = topstack - 1
+ end
+ local newdir = dirstack[topstack]
+ if newdir == "+TRT" then
+ rlmode = -1
+ elseif newdir == "+TLT" then
+ rlmode = 1
+ else
+ rlmode = rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype == localpar_code then
+ local dir = getfield(start,"dir")
+ if dir == "TRT" then
+ rlparmode = -1
+ elseif dir == "TLT" then
+ rlparmode = 1
+ else
+ rlparmode = 0
+ end
+ rlmode = rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ else
+ start = getnext(start)
+ end
+ end
+ end
+ end
+ if success then
+ done = true
+ end
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+
+ end
+
+ head = tonode(head)
+
+ return head, done
+end
+
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if target then
+ target[unicode] = lookupdata
+ else
+ lookuphash[lookupname] = { [unicode] = lookupdata }
+ end
+end
+
+local action = {
+
+ substitution = generic,
+ multiple = generic,
+ alternate = generic,
+ position = generic,
+
+ ligature = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ for i=1,#lookupdata do
+ local li = lookupdata[i]
+ local tu = target[li]
+ if not tu then
+ tu = { }
+ target[li] = tu
+ end
+ target = tu
+ end
+ target.ligature = unicode
+ end,
+
+ pair = function(lookupdata,lookupname,unicode,lookuphash)
+ local target = lookuphash[lookupname]
+ if not target then
+ target = { }
+ lookuphash[lookupname] = target
+ end
+ local others = target[unicode]
+ local paired = lookupdata[1]
+ if others then
+ others[paired] = lookupdata
+ else
+ others = { [paired] = lookupdata }
+ target[unicode] = others
+ end
+ end,
+
+}
+
+local function prepare_lookups(tfmdata)
+
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local anchor_to_lookup = resources.anchor_to_lookup
+ local lookup_to_anchor = resources.lookup_to_anchor
+ local lookuptypes = resources.lookuptypes
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+
+ -- we cannot free the entries in the descriptions as sometimes we access
+ -- then directly (for instance anchors) ... selectively freeing does save
+ -- much memory as it's only a reference to a table and the slot in the
+ -- description hash is not freed anyway
+
+ for unicode, character in next, characters do -- we cannot loop over descriptions !
+
+ local description = descriptions[unicode]
+
+ if description then
+
+ local lookups = description.slookups
+ if lookups then
+ for lookupname, lookupdata in next, lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+
+ local lookups = description.mlookups
+ if lookups then
+ for lookupname, lookuplist in next, lookups do
+ local lookuptype = lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata = lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+
+ local list = description.kerns
+ if list then
+ for lookup, krn in next, list do -- ref to glyph, saves lookup
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = krn
+ else
+ lookuphash[lookup] = { [unicode] = krn }
+ end
+ end
+ end
+
+ local list = description.anchors
+ if list then
+ for typ, anchors in next, list do -- types
+ if typ == "mark" or typ == "cexit" then -- or entry?
+ for name, anchor in next, anchors do
+ local lookups = anchor_to_lookup[name]
+ if lookups then
+ for lookup, _ in next, lookups do
+ local target = lookuphash[lookup]
+ if target then
+ target[unicode] = anchors
+ else
+ lookuphash[lookup] = { [unicode] = anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+
+ end
+
+end
+
+local function split(replacement,original)
+ local result = { }
+ for i=1,#replacement do
+ result[original[i]] = replacement[i]
+ end
+ return result
+end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true, contextsub = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
+
+local function prepare_contextchains(tfmdata)
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata.resources
+ local lookuphash = resources.lookuphash
+ local lookuptags = resources.lookuptags
+ local lookups = rawdata.lookups
+ if lookups then
+ for lookupname, lookupdata in next, rawdata.lookups do
+ local lookuptype = lookupdata.type
+ if lookuptype then
+ local rules = lookupdata.rules
+ if rules then
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
+ end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Eventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
+ end
+ end
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
+ end
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
+ end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
+ end
+ end
+ end
+ end
+ end
+ else
+ -- no rules
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
+ end
+ end
+ end
+end
+
+-- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ -- beware we need to use the topmost properties table
+ local rawdata = tfmdata.shared.rawdata
+ local properties = rawdata.properties
+ if not properties.initialized then
+ local starttime = trace_preparing and os.clock()
+ local resources = rawdata.resources
+ resources.lookuphash = resources.lookuphash or { }
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized = true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ position = 1,
+ node = featuresinitializer,
+ },
+ processors = {
+ node = featuresprocessor,
+ }
+}
+
+-- This can be used for extra handlers, but should be used with care!
+
+otf.handlers = handlers
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 7995be33e..c81e8cd1a 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -27,6 +27,17 @@ if not modules then modules = { } end modules ['luatex-fonts'] = {
-- also add more helper code here, but that depends to what extend metatex (sidetrack of context)
-- evolves into a low level layer (depends on time, as usual).
+texio.write_nl("")
+texio.write_nl("--------------------------------------------------------------------------------")
+texio.write_nl("The font code has been brought in sync with the context version of 2014.12.21 so")
+texio.write_nl("if things don't work out as expected the interfacing needs to be checked. When")
+texio.write_nl("this works as expected a second upgrade will happen that gives a more complete")
+texio.write_nl("support and another sync with the context code (that new code is currently being")
+texio.write_nl("tested. The base pass is now integrated in the main pass. The results can differ")
+texio.write_nl("from those in context because there we integrate some mechanisms differently.")
+texio.write_nl("--------------------------------------------------------------------------------")
+texio.write_nl("")
+
utf = utf or unicode.utf8
-- We have some (global) hooks (for latex):
@@ -210,12 +221,12 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then
loadmodule('font-oti.lua')
loadmodule('font-otf.lua')
loadmodule('font-otb.lua')
- loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
- loadmodule('font-ota.lua')
- loadmodule('font-otn.lua')
- loadmodule('font-otp.lua') -- optional
+ loadmodule('luatex-fonts-inj.lua')
+ loadmodule('luatex-fonts-ota.lua')
+ loadmodule('luatex-fonts-otn.lua')
+ loadmodule('font-otp.lua')
loadmodule('luatex-fonts-lua.lua')
- loadmodule('font-def.lua')
+ loadmodule('font-def.lua') -- this code (stripped) might end up in luatex-fonts-def.lua
loadmodule('luatex-fonts-def.lua')
loadmodule('luatex-fonts-ext.lua') -- some extensions
diff --git a/tex/generic/context/luatex/luatex-math.tex b/tex/generic/context/luatex/luatex-math.tex
index ab304b974..604b4a1f8 100644
--- a/tex/generic/context/luatex/luatex-math.tex
+++ b/tex/generic/context/luatex/luatex-math.tex
@@ -19,15 +19,6 @@
% a bunch of fonts:
-\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt
-\font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt
-
-\font\tentt = file:lmmono10-regular.otf at 10pt
-\font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt
-\font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt
-
\let \teni = \relax
\let \seveni = \relax
\let \fivei = \relax
@@ -35,19 +26,58 @@
\let \sevensy = \relax
\let \fivesy = \relax
\let \tenex = \relax
-\let \tenbf = \relax
\let \sevenbf = \relax
\let \fivebf = \relax
-\tenrm
+\def\latinmodern
+ {\font\tenrm = file:lmroman10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lmroman7-regular.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lmroman5-regular.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lmmono10-regular.otf at 10pt
+ \font\tensl = file:lmromanslant10-regular.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lmroman10-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lmroman10-bold.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lmroman10-bolditalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
-\font\mathfonttextupright = file:latinmodern-math.otf:ssty=0;fixmath=yes at 10pt
-\font\mathfontscriptupright = file:latinmodern-math.otf:ssty=1;fixmath=yes at 7pt
-\font\mathfontscriptscriptupright = file:latinmodern-math.otf:ssty=2;fixmath=yes at 5pt
+\def\lucidabright
+ {\font\tenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\sevenrm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 7pt
+ \font\fiverm = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 5pt
+ %
+ \font\tentt = file:lucidabrightot.otf at 10pt
+ \font\tenit = file:lucidabrightot.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenit = file:lucidabrightot-italic.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbf = file:lucidabrightot-demi.otf:+liga;+kern;+tlig;+trep at 10pt
+ \font\tenbi = file:lucidabrightot-demiitalic.otf:+liga;+kern;+tlig;+trep at 10pt
+ %
+ \font\mathfonttextupright = file:lucidabrightmathot.otf:ssty=0;fixmath=yes at 10pt
+ \font\mathfontscriptupright = file:lucidabrightmathot.otf:ssty=1;fixmath=yes at 7pt
+ \font\mathfontscriptscriptupright = file:lucidabrightmathot.otf:ssty=2;fixmath=yes at 5pt
+ %
+ \textfont 0 = \mathfonttextupright
+ \scriptfont 0 = \mathfontscriptupright
+ \scriptscriptfont 0 = \mathfontscriptscriptupright
+ %
+ \tenrm}
-\textfont 0 = \mathfonttextupright
-\scriptfont 0 = \mathfontscriptupright
-\scriptscriptfont 0 = \mathfontscriptscriptupright
+\directlua {
+ if arguments["mtx:lucidabright"] then
+ tex.print("\string\\lucidabright")
+ else
+ tex.print("\string\\latinmodern")
+ end
+}
\newtoks\everymathrm
\newtoks\everymathmit
@@ -58,12 +88,12 @@
\newtoks\everymathbi
\newtoks\everymathtt
-\def\rm{\fam0\relax\the\everymathmrm\relax\tenrm\relax}
-\def\it{\fam0\relax\the\everymathit \relax\tenit\relax}
-\def\sl{\fam0\relax\the\everymathsl \relax\tensl\relax}
-\def\bf{\fam0\relax\the\everymathbf \relax\tenbf\relax}
-\def\bi{\fam0\relax\the\everymathbi \relax\tenbi\relax}
-\def\tt{\fam0\relax\the\everymathtt \relax\tentt\relax}
+\def\rm{\fam0\relax\the\everymathrm\relax\tenrm\relax}
+\def\it{\fam0\relax\the\everymathit\relax\tenit\relax}
+\def\sl{\fam0\relax\the\everymathsl\relax\tensl\relax}
+\def\bf{\fam0\relax\the\everymathbf\relax\tenbf\relax}
+\def\bi{\fam0\relax\the\everymathbi\relax\tenbi\relax}
+\def\tt{\fam0\relax\the\everymathtt\relax\tentt\relax}
\let\mit \relax % use names or \Uchar or define a vector
\let\cal \relax % idem, i'm not in the mood for this now
@@ -1799,7 +1829,8 @@
% a few definitions:
-\def\sqrt{\Uroot "0 "221A }
+\def\sqrt {\Uroot "0 "221A{}}
+\def\root#1\of{\Uroot "0 "221A{#1}}
% \skewchar\teni='177 \skewchar\seveni='177 \skewchar\fivei='177
% \skewchar\tensy='60 \skewchar\sevensy='60 \skewchar\fivesy='60
diff --git a/tex/generic/context/luatex/luatex-mplib.tex b/tex/generic/context/luatex/luatex-mplib.tex
index 8af9f2d8a..09dd179f3 100644
--- a/tex/generic/context/luatex/luatex-mplib.tex
+++ b/tex/generic/context/luatex/luatex-mplib.tex
@@ -61,6 +61,7 @@
%D Now load the needed \LUA\ code.
\directlua{dofile(kpse.find_file('luatex-mplib.lua'))}
+% \directlua{dofile(resolvers.findfile('luatex-mplib.lua'))}
%D The following code takes care of encapsulating the literals:
diff --git a/tex/generic/context/luatex/luatex-plain.tex b/tex/generic/context/luatex/luatex-plain.tex
index 1ea8558e9..c9a9e36cf 100644
--- a/tex/generic/context/luatex/luatex-plain.tex
+++ b/tex/generic/context/luatex/luatex-plain.tex
@@ -20,6 +20,7 @@
\input {luatex-math}%
\input {luatex-languages}%
\input {luatex-mplib}%
+ % \input {luatex-gadgets}%
}
\edef\fmtversion{\fmtversion+luatex}
diff --git a/tex/generic/context/luatex/luatex-test.tex b/tex/generic/context/luatex/luatex-test.tex
index fbf8ce3cf..6f48e0ced 100644
--- a/tex/generic/context/luatex/luatex-test.tex
+++ b/tex/generic/context/luatex/luatex-test.tex
@@ -35,14 +35,16 @@
\font\gothic=msgothic(ms-gothic) {\gothic whatever}
-\font\testy=file:IranNastaliq.ttf:mode=node;script=arab;language=dflt;+calt;+ccmp;+init;+isol;+medi;+fina;+liga;+rlig;+kern;+mark;+mkmk at 14pt
-\testy این یک متن نمونه است با قلم ذر که درست آمده است.
+\bgroup
-\pdfprotrudechars2 \pdfadjustspacing2
+ \pdfprotrudechars2
+ \pdfadjustspacing2
-\font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
-\font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
-\font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;extend=1.5 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;slant=0.8 at 12pt \testb \input tufte \par
+ \font\testb=file:lmroman12-regular:+liga;protrusion=default at 12pt \testb \input tufte \par
+
+\egroup
\setmplibformat{plain}
@@ -64,13 +66,12 @@
\font\test=dejavuserif:+kern at 10pt \test
-\hsize 1mm
-\noindent Циолковский
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
\loadpatterns{ru}
-\noindent Циолковский
+\bgroup \hsize 1mm \noindent Циолковский \par \egroup
a bit of math
@@ -84,4 +85,30 @@ $$\sqrt {2} { { {1} \over { {1} \over {x} } } } $$
\cows Hello World!
+% math test
+
+\latinmodern
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
+\lucidabright
+
+\def\sqrt{\Uroot "0 "221A{}}
+
+\def\root#1\of{\Uroot "0 "221A{#1}}
+
+Inline $\sqrt{x}{1.2}$ math. % same for $\root n of x$
+
+$\root3\of x$
+
+$\sin{x}$
+
\end
--
cgit v1.2.3