summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2014-01-14 15:03:00 +0100
committerHans Hagen <pragma@wxs.nl>2014-01-14 15:03:00 +0100
commit9e6ed699c77bb2d08ab2c294fdb644046da2a6e8 (patch)
treedc994a7080f954e9c5fc1269c8529db754f42805 /tex
parentc346c3825d2d63e307b0d9bb5c548b96c25c38d9 (diff)
downloadcontext-9e6ed699c77bb2d08ab2c294fdb644046da2a6e8.tar.gz
beta 2014.01.14 15:03
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/bibl-tra.lua2
-rw-r--r--tex/context/base/context-version.pdfbin4086 -> 4085 bytes
-rw-r--r--tex/context/base/context.mkiv13
-rw-r--r--tex/context/base/font-map.lua30
-rw-r--r--tex/context/base/m-oldbibtex.mkiv16
-rw-r--r--tex/context/base/mult-fun.lua2
-rw-r--r--tex/context/base/publ-aut.lua465
-rw-r--r--tex/context/base/publ-dat.lua510
-rw-r--r--tex/context/base/publ-imp-apa.mkiv547
-rw-r--r--tex/context/base/publ-imp-cite.mkiv74
-rw-r--r--tex/context/base/publ-imp-commands.mkiv15
-rw-r--r--tex/context/base/publ-imp-definitions.mkiv68
-rw-r--r--tex/context/base/publ-ini.lua1222
-rw-r--r--tex/context/base/publ-ini.mkiv849
-rw-r--r--tex/context/base/publ-oth.lua146
-rw-r--r--tex/context/base/publ-tra.lua130
-rw-r--r--tex/context/base/publ-tra.mkiv26
-rw-r--r--tex/context/base/publ-usr.lua91
-rw-r--r--tex/context/base/publ-usr.mkiv2
-rw-r--r--tex/context/base/publ-xml.mkiv114
-rw-r--r--tex/context/base/s-abr-01.tex1
-rw-r--r--tex/context/base/status-files.pdfbin24538 -> 24760 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin228348 -> 229094 bytes
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
24 files changed, 4301 insertions, 24 deletions
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 82f8dc2aa..75dc3e86f 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['bibl-tra'] = {
-- temporary hack, needed for transition
-if not punlications then
+if not publications then
local hacks = utilities.storage.allocate()
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index c54d5b971..4e4621e23 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index ccb237732..847290b0f 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.01.11 23:58}
+\edef\contextversion{2014.01.14 15:03}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -479,14 +479,15 @@
% old bibtex support: (will be m-oldbibtex.mkiv)
-\loadmarkfile{bibl-bib}
-\loadmarkfile{bibl-tra}
+% \loadmarkfile{bibl-bib}
+% \loadmarkfile{bibl-tra}
% new bibtex support:
-% \loadmarkfile{publ-ini}
-% \loadmarkfile{publ-tra}
-% \loadmarkfile{publ-xml}
+\loadmarkfile{publ-ini}
+\loadmarkfile{publ-tra}
+\loadmarkfile{publ-xml}
+\loadmarkfile{publ-old}
%loadmarkfile{x-xtag} % no longer preloaded
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index ce724b973..f74e13e81 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -66,21 +66,6 @@ local function makenameparser(str)
end
end
--- local parser = makenameparser("Japan1")
--- local parser = makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("u123400987600")
--- test("index1234")
--- test("Japan1.123")
-
local function tounicode16(unicode,name)
if unicode < 0x10000 then
return format("%04X",unicode)
@@ -346,3 +331,18 @@ function mappings.addtounicode(data,filename)
report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
end
end
+
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
new file mode 100644
index 000000000..08c23e7cc
--- /dev/null
+++ b/tex/context/base/m-oldbibtex.mkiv
@@ -0,0 +1,16 @@
+%D \module
+%D [ file=m-oldbibtex,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=Falback on old method,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
+
+\endinput
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 2101b95e9..0f5bd8ace 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -17,7 +17,7 @@ return {
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
new file mode 100644
index 000000000..57abd3f32
--- /dev/null
+++ b/tex/context/base/publ-aut.lua
@@ -0,0 +1,465 @@
+if not modules then modules = { } end modules ['publ-aut'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+end
+
+local chardata = characters.data
+
+local concat = table.concat
+local lpeg = lpeg
+local utfchar = utf.char
+
+local publications = publications or { }
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.authors = publications.authors or { }
+local authors = publications.authors
+
+local P, C, V, Cs, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.match, lpeg.patterns
+
+-- local function makesplitter(separator)
+-- return Ct { "start",
+-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
+-- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
+-- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
+-- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
+-- }
+-- end
+
+local space = P(" ")
+local comma = P(",")
+local firstcharacter = lpegpatterns.utf8byte
+
+-- local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1)
+-- local commasplitter = lpeg.tsplitat(space^0 * comma * space^0)
+-- local spacesplitter = lpeg.tsplitat(space^1)
+
+local p_and = space^1 * "and" * space^1
+local p_comma = space^0 * comma * space^0
+local p_space = space^1
+
+local andsplitter = Ct { "start",
+ start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local commasplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local spacesplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local function is_upper(str)
+ local first = lpegmatch(firstcharacter,str)
+ local okay = chardata[first]
+ return okay and okay.category == "lu"
+end
+
+local cache = { } -- 33% reuse on tugboat.bib
+local nofhits = 0
+local nofused = 0
+
+local function splitauthorstring(str)
+ if not str then
+ return
+ end
+ nofused = nofused + 1
+ local authors = cache[str]
+ if authors then
+ -- hit 1
+ -- print("hit 1",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ return { authors } -- we assume one author
+ end
+ local authors = lpegmatch(andsplitter,str)
+ for i=1,#authors do
+ local author = authors[i]
+ local detail = cache[author]
+ if detail then
+ -- hit 2
+ -- print("hit 2",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ end
+ if not detail then
+ local firstnames, vons, surnames, initials, juniors, words
+ local split = lpegmatch(commasplitter,author)
+ local n = #split
+ if n == 1 then
+ -- First von Last
+ words = lpegmatch(spacesplitter,author)
+-- inspect(words)
+ firstnames, vons, surnames = { }, { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ if i < n then
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ elseif #vons == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ else
+ -- mess
+ end
+ -- safeguard
+ if #surnames == 0 then
+ firstnames = { }
+ vons = { }
+ surnames = { author }
+ end
+ elseif n == 2 then
+ -- von Last, First
+ words = lpegmatch(spacesplitter,split[2])
+ surnames = lpegmatch(spacesplitter,split[1])
+ firstnames, vons = { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ vons[#vons+1], i = words[i], i + 1
+ end
+ else
+ -- von Last, Jr ,First
+ firstnames = lpegmatch(spacesplitter,split[1])
+ juniors = lpegmatch(spacesplitter,split[2])
+ surnames = lpegmatch(spacesplitter,split[3])
+ if n > 3 then
+ -- error
+ end
+ end
+ if #surnames == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ end
+ if firstnames then
+ initials = { }
+ for i=1,#firstnames do
+ initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i]))
+ end
+ end
+ detail = {
+ original = author,
+ firstnames = firstnames,
+ vons = vons,
+ surnames = surnames,
+ initials = initials,
+ juniors = juniors,
+ }
+ cache[author] = detail
+ nofhits = nofhits + 1
+ end
+ authors[i] = detail
+ end
+ return authors
+end
+
+-- local function splitauthors(dataset,tag,field)
+-- local entries = datasets[dataset]
+-- local luadata = entries.luadata
+-- if not luadata then
+-- return { }
+-- end
+-- local entry = luadata[tag]
+-- if not entry then
+-- return { }
+-- end
+-- return splitauthorstring(entry[field])
+-- end
+
+local function the_initials(initials,symbol)
+ local t, symbol = { }, symbol or "."
+ for i=1,#initials do
+ t[i] = initials[i] .. symbol
+ end
+ return t
+end
+
+-- authors
+
+local settings = { }
+
+-- local defaultsettings = {
+-- firstnamesep = " ",
+-- vonsep = " ",
+-- surnamesep = " ",
+-- juniorsep = " ",
+-- surnamejuniorsep = ", ",
+-- juniorjuniorsep = ", ",
+-- surnamefirstnamesep = ", ",
+-- surnameinitialsep = ", ",
+-- namesep = ", ",
+-- lastnamesep = " and ",
+-- finalnamesep = " and ",
+-- etallimit = 1000,
+-- etaldisplay = 1000,
+-- etaltext = "",
+-- }
+
+local defaultsettings = {
+ firstnamesep = [[\btxlistvariantparameter{firstnamesep}]],
+ vonsep = [[\btxlistvariantparameter{vonsep}]],
+ surnamesep = [[\btxlistvariantparameter{surnamesep}]],
+ juniorsep = [[\btxlistvariantparameter{juniorsep}]],
+ surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]],
+ juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]],
+ surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]],
+ surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]],
+ namesep = [[\btxlistvariantparameter{namesep}]],
+ lastnamesep = [[\btxlistvariantparameter{lastnamesep}]],
+ finalnamesep = [[\btxlistvariantparameter{finalnamesep}]],
+ --
+ etaltext = [[\btxlistvariantparameter{etaltext}]],
+ --
+ etallimit = 1000,
+ etaldisplay = 1000,
+}
+
+function authors.setsettings(s)
+end
+
+authors.splitstring = splitauthorstring
+
+-- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr)
+
+function authors.normal(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = concat(firstnames," ")
+ result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr)
+
+function authors.normalshort(author,settings)
+ local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if initials and #initials > 0 then
+ result[#result+1] = concat(initials," ")
+ result[#result+1] = settings.initialsep or defaultsettings.initialsep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- vons surnames juniors, firstnames
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco)
+
+function authors.inverted(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep
+ result[#result+1] = concat(firstnames," ")
+ end
+ return concat(result)
+end
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T)
+
+function authors.invertedshort(author,settings)
+ local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames then
+ result[#result+1] = concat(surnames," ")
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if initials and #initials > 0 then
+ result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep
+ result[#result+1] = concat(the_initials(initials)," ")
+ end
+ return concat(result)
+end
+
+local lastconcatsize = 1
+
+local function concatnames(t,settings)
+ local namesep = settings.namesep
+ local lastnamesep = settings.lastnamesep
+ local finalnamesep = settings.finalnamesep
+ local lastconcatsize = #t
+ if lastconcatsize > 2 then
+ local s = { }
+ for i=1,lastconcatsize-2 do
+ s[i] = t[i] .. namesep
+ end
+ s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize]
+ return concat(s)
+ elseif lastconcatsize > 1 then
+ return concat(t,lastnamesep)
+ elseif lastconcatsize > 0 then
+ return t[1]
+ else
+ return ""
+ end
+end
+
+function authors.concat(dataset,tag,field,settings)
+ table.setmetatableindex(settings,defaultsettings)
+ local combined = settings.combiner
+ if not combiner or type(combiner) == "string" then
+ combiner = authors[combiner or "normal"] or authors.normal
+ end
+ local split = datasets[dataset].details[tag][field]
+ local etallimit = settings.etallimit or 1000
+ local etaldisplay = settings.etaldisplay or etallimit
+ local max = split and #split or 0
+ if max == 0 then
+ -- error
+ end
+ if max > etallimit and etaldisplay < max then
+ max = etaldisplay
+ end
+ local combined = { }
+ for i=1,max do
+ combined[i] = combiner(split[i],settings)
+ end
+ local result = concatnames(combined,settings)
+ if #combined <= max then
+ return result
+ else
+ return result .. settings.etaltext
+ end
+end
+
+function commands.btxauthor(...)
+ context(authors.concat(...))
+end
+
+function authors.short(author,year)
+ -- todo
+-- local result = { }
+-- if author then
+-- local authors = splitauthors(author)
+-- for a=1,#authors do
+-- local aa = authors[a]
+-- local initials = aa.initials
+-- for i=1,#initials do
+-- result[#result+1] = initials[i]
+-- end
+-- local surnames = aa.surnames
+-- for s=1,#surnames do
+-- result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s]))
+-- end
+-- end
+-- end
+-- if year then
+-- result[#result+1] = year
+-- end
+-- return concat(result)
+end
+
+-- We can consider creating a hashtable key -> entry but I wonder if
+-- pays off.
+
+-- local function test(sample)
+-- local authors = splitauthors(sample)
+-- print(table.serialize(authors))
+-- for i=1,#authors do
+-- local author = authors[i]
+-- print(normalauthor (author,settings))
+-- print(normalshortauthor (author,settings))
+-- print(invertedauthor (author,settings))
+-- print(invertedshortauthor(author,settings))
+-- end
+-- print(concatauthors(sample,settings,normalauthor))
+-- print(concatauthors(sample,settings,normalshortauthor))
+-- print(concatauthors(sample,settings,invertedauthor))
+-- print(concatauthors(sample,settings,invertedshortauthor))
+-- end
+
+-- local sample_a = "Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der"
+-- local sample_b = "Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut"
+
+-- test(sample_a)
+-- test(sample_b)
+
+
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
new file mode 100644
index 000000000..b1bf34265
--- /dev/null
+++ b/tex/context/base/publ-dat.lua
@@ -0,0 +1,510 @@
+if not modules then modules = { } end modules ['publ-dat'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
+-- todo: store bibroot and bibrootdt
+
+--[[ldx--
+<p>This is a prelude to integrated bibliography support. This file just loads
+bibtex files and converts them to xml so that the we access the content
+in a convenient way. Actually handling the data takes place elsewhere.</p>
+--ldx]]--
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+ dofile(resolvers.findfile("char-tex.lua"))
+end
+
+local chardata = characters.data
+local lowercase = characters.lower
+
+local lower, gsub, concat = string.lower, string.gsub, table.concat
+local next, type = next, type
+local utfchar = utf.char
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local textoutf = characters and characters.tex.toutf
+local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
+local formatters = string.formatters
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xmlconvert
+local setmetatableindex = table.setmetatableindex
+
+-- todo: more allocate
+
+local P, R, S, V, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local report = logs.reporter("publications")
+
+publications = publications or { }
+local publications = publications
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.statistics = publications.statistics or { }
+local publicationsstats = publications.statistics
+
+publicationsstats.nofbytes = 0
+publicationsstats.nofdefinitions = 0
+publicationsstats.nofshortcuts = 0
+publicationsstats.nofdatasets = 0
+
+local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
+
+local defaultshortcuts = {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+function publications.new(name)
+ publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
+ local dataset = {
+ name = name or "dataset " .. publicationsstats.nofdatasets,
+ nofentries = 0,
+ shortcuts = { },
+ luadata = { },
+ xmldata = xmlconvert(xmlplaceholder),
+ -- details = { },
+ nofbytes = 0,
+ entries = nil, -- empty == all
+ sources = { },
+ loaded = { },
+ fields = { },
+ userdata = { },
+ used = { },
+ commands = { }, -- for statistical purposes
+ status = {
+ resources = false,
+ userdata = false,
+ },
+ }
+ setmetatableindex(dataset,function(t,k)
+ -- will become a plugin
+ if k == "details" and publications.enhance then
+ dataset.details = { }
+ publications.enhance(dataset.name)
+ return dataset.details
+ end
+ end)
+ return dataset
+end
+
+function publications.markasupdated(name)
+ if name == "string" then
+ datasets[name].details = nil
+ else
+ datasets.details = nil
+ end
+end
+
+setmetatableindex(datasets,function(t,k)
+ local v = publications.new(k)
+ datasets[k] = v
+ return v
+end)
+
+-- we apply some normalization
+
+----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+local any = P(1)
+local done = P(-1)
+local one_l = P("{") / ""
+local one_r = P("}") / ""
+local two_l = P("{{") / ""
+local two_r = P("}}") / ""
+local special = P("#") / "\\letterhash"
+
+local filter_0 = S('\\{}')
+local filter_1 = (1-filter_0)^0 * filter_0
+local filter_2 = Cs(
+-- {{...}} ... {{...}}
+-- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+-- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (somemath + command + special + any )^0
+)
+
+-- Currently we expand shortcuts and for large ones (like the acknowledgements
+-- in tugboat.bib this is not that efficient. However, eventually strings get
+-- hashed again.
+
+local function do_shortcut(tag,key,value,shortcuts)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ tag = lowercase(tag)
+ if tag == "@string" then
+ shortcuts[key] = value
+ end
+end
+
+local function getindex(dataset,luadata,tag)
+ local found = luadata[tag]
+ if found then
+ return found.index or 0
+ else
+ local index = dataset.nofentries + 1
+ dataset.nofentries = index
+ return index
+ end
+end
+
+publications.getindex = getindex
+
+local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ local found = luadata[tag]
+ local index = getindex(dataset,luadata,tag)
+ local entries = {
+ category = gsub(lower(category),"^@",""),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
+ end
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
+ end
+ entries[normalized] = value
+ end
+ luadata[tag] = entries
+end
+
+local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+end
+
+local percent = P("%")
+local start = P("@")
+local comma = P(",")
+local hash = P("#")
+local escape = P("\\")
+local single = P("'")
+local double = P('"')
+local left = P('{')
+local right = P('}')
+local both = left + right
+local lineending = S("\n\r")
+local space = S(" \t\n\r\f") -- / " "
+local spacing = space^0
+local equal = P("=")
+----- collapsed = (space^1)/ " "
+local collapsed = (lpegpatterns.whitespace^1)/ " "
+
+----- balanced = lpegpatterns.balanced
+local balanced = P {
+ [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
+
+local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1)
+local s_quoted = ((escape*single) + collapsed + (1-single))^0
+local d_quoted = ((escape*double) + collapsed + (1-double))^0
+
+local b_value = (left /"") * balanced * (right /"")
+local s_value = (single/"") * (b_value + s_quoted) * (single/"")
+local d_value = (double/"") * (b_value + d_quoted) * (double/"")
+local r_value = keyword * Carg(1) /resolve
+
+local somevalue = s_value + d_value + b_value + r_value
+local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+
+local assignment = spacing * keyword * spacing * equal * spacing * value * spacing
+local shortcut = keyword * spacing * left * spacing * (assignment * comma^0)^0 * spacing * right * Carg(1)
+local definition = keyword * spacing * left * spacing * keyword * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1)
+local comment = keyword * spacing * left * (1-right)^0 * spacing * right
+local forget = percent^1 * (1-lineending)^0
+
+-- todo \%
+
+local bibtotable = (space + forget + shortcut/do_shortcut + definition/do_definition + comment + 1)^0
+
+-- loadbibdata -> dataset.luadata
+-- loadtexdata -> dataset.luadata
+-- loadluadata -> dataset.luadata
+
+-- converttoxml -> dataset.xmldata from dataset.luadata
+
+function publications.loadbibdata(dataset,content,source,kind)
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + #content
+ dataset.nofbytes = dataset.nofbytes + #content
+ if source then
+ table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
+ dataset.loaded[source] = kind or true
+ end
+ dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
+ publications.markasupdated(dataset)
+ lpegmatch(bibtotable,content or "",1,dataset)
+ statistics.stoptiming(publications)
+end
+
+-- we could use xmlescape again
+
+local cleaner_0 = S('<>&')
+local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+local cleaner_2 = Cs ( (
+ P("<") / "&lt;" +
+ P(">") / "&gt;" +
+ P("&") / "&amp;" +
+ P(1)
+)^0)
+
+local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+function publications.converttoxml(dataset,nice) -- we have fields !
+ local luadata = dataset and dataset.luadata
+ if luadata then
+ statistics.starttiming(publications)
+ statistics.starttiming(xml)
+ --
+ local result, r = { }, 0
+ --
+ r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
+ r = r + 1 ; result[r] = "<bibtex>"
+ --
+ if nice then
+ local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
+ local f_entry_stop = " </entry>"
+ local f_field = formatters[" <field name='%s'>%s</field>"]
+ for tag, entry in sortedhash(luadata) do
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ else
+ local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
+ local f_entry_stop = "</entry>"
+ local f_field = formatters["<field name='%s'>%s</field>"]
+ for tag, entry in next, luadata do
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ end
+ --
+ r = r + 1 ; result[r] = "</bibtex>"
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ dataset.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- &#038; -> &amp;
+ utfize_entities = true,
+ } )
+ --
+ statistics.stoptiming(xml)
+ statistics.stoptiming(publications)
+ if lxml then
+ lxml.register(formatters["btx:%s"](dataset.name),dataset.xmldata)
+ end
+ end
+end
+
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+function loaders.bib(dataset,filename,kind)
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",filename)
+ elseif trace then
+ report("loading file",filename)
+ end
+ publications.loadbibdata(dataset,data,filename,kind)
+end
+
+function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ if type(dataset) == "table" then
+ dataset = datasets[dataset]
+ end
+ local data = type(filename) == "table" and filename or table.load(filename)
+ if data then
+ local luadata = dataset.luadata
+ for tag, entry in next, data do
+ if type(entry) == "table" then
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
+end
+
+function loaders.xml(dataset,filename)
+ local luadata = dataset.luadata
+ local root = xml.load(filename)
+ for entry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = entry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category
+ }
+ for field in xmlcollected(entry,"/field") do
+ -- entry[field.at.name] = xmltext(field)
+ entry[field.at.name] = field.dt[1] -- no cleaning yet
+ end
+ -- local edt = entry.dt
+ -- for i=1,#edt do
+ -- local e = edt[i]
+ -- local a = e.at
+ -- if a and a.name then
+ -- t[a.name] = e.dt[1] -- no cleaning yet
+ -- end
+ -- end
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry
+ end
+end
+
+setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[k] = v
+ return v
+end)
+
+function publications.load(dataset,filename,kind)
+ statistics.starttiming(publications)
+ local files = settings_to_array(filename)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if dataset.loaded[fullname] then -- will become better
+ -- skip
+ elseif fullname == "" then
+ report("no file %a",fullname)
+ else
+ loaders[filetype](dataset,fullname)
+ end
+ if kind then
+ dataset.loaded[fullname] = kind
+ end
+ end
+ statistics.stoptiming(publications)
+end
+
+local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+function publications.analyze(dataset)
+ local data = dataset.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
+ end
+ dataset.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
+end
+
+-- str = [[
+-- @COMMENT { CRAP }
+-- @STRING{ hans = "h a n s" }
+-- @STRING{ taco = "t a c o" }
+-- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
+-- @SOMETHING{ key2, abc = hans # taco }
+-- @SOMETHING{ key3, abc = "hans" # taco }
+-- @SOMETHING{ key4, abc = hans # "taco" }
+-- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
+-- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
+-- ]]
+
+-- local dataset = publications.new()
+-- publications.tolua(dataset,str)
+-- publications.toxml(dataset)
+-- publications.toxml(dataset)
+-- print(dataset.xmldata)
+-- inspect(dataset.luadata)
+-- inspect(dataset.xmldata)
+-- inspect(dataset.shortcuts)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"IEEEabrv.bib")
+-- publications.load(dataset,"IEEEfull.bib")
+-- publications.load(dataset,"IEEEexample.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"gut.bib")
+-- publications.load(dataset,"komoedie.bib")
+-- publications.load(dataset,"texbook1.bib")
+-- publications.load(dataset,"texbook2.bib")
+-- publications.load(dataset,"texbook3.bib")
+-- publications.load(dataset,"texgraph.bib")
+-- publications.load(dataset,"texjourn.bib")
+-- publications.load(dataset,"texnique.bib")
+-- publications.load(dataset,"tugboat.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- print(table.serialize(dataset.luadata))
+-- print(table.serialize(dataset.xmldata))
+-- print(table.serialize(dataset.shortcuts))
+-- print(xml.serialize(dataset.xmldata))
diff --git a/tex/context/base/publ-imp-apa.mkiv b/tex/context/base/publ-imp-apa.mkiv
new file mode 100644
index 000000000..3df33ce63
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.mkiv
@@ -0,0 +1,547 @@
+%D \module
+%D [ file=publ-imp-apa,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+% common
+
+\loadbtxdefinitionfile[def]
+
+\startsetups btx:apa:common:wherefrom
+ \btxdoifelse {address} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {country} {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}\btxcomma\btxflush{country}
+ }
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}
+ }
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {country} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{country}
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \getvariable{btx:temp}{left}
+ \btxflush{\getvariable{btx:temp}{label}}
+ \getvariable{btx:temp}{right}
+ } {
+ \getvariable{btx:temp}{otherwise}
+ }
+ }
+ }
+\stopsetups
+
+% \setvariables[btx:temp][label=,left=,right=,otherwise=]
+
+\startsetups btx:apa:common:publisher
+ \begingroup
+ \setvariables[btx:temp][label=publisher]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization
+ \begingroup
+ \setvariables[btx:temp][label=organization]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school
+ \begingroup
+ \setvariables[btx:temp][label=school]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution
+ \begingroup
+ \setvariables[btx:temp][label=institution]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=school,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=institution,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:publisher:sentence
+ \begingroup
+ \setvariables[btx:temp][label=publisher,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization:sentence
+ \begingroup
+ \setvariables[btx:temp][label=organization,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:title-and-series
+ \btxdoif {title} {
+ \btxflush{title}
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:title-it-and-series
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\disablemode[btx:apa:edited-book]
+
+\startsetups btx:apa:common:author-and-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-or-key-and-year
+ \btxdoifelse {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-editors-crossref-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editors}
+ } {
+ % weird period
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket\btxperiod
+ }
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:editor-or-key-and-year
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editors}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxspace
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:note
+ \btxdoif {note} {
+ \btxspace\btxflush{note}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:comment
+ \btxdoif {comment} {
+ \btxspace\btxflush{comment}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:p
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace p\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pp
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace pp\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pages
+ \btxdoif {pages} {
+ \btxcomma pages~\btxflush{pages}
+ }
+\stopsetups
+
+\startsetups btx:apa:common:edition:sentense
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition\btxperiod
+ }
+\stopsetups
+
+% check when the next is used (no period)
+
+% \startsetups btx:apa:common:edition
+% \btxdoif {edition} {
+% \btxspace\btxflush{edition}\btxspace edition
+% }
+% \stopsetups
+
+% we can share more, todo
+
+% specific
+
+\startsetups btx:apa:article
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxdoif {title} {
+ \btxflush{title}\btxperiod
+ }
+ \btxdoifelse {journal} {
+ \bgroup\it\btxflush{journal}\/\egroup
+ } {
+ \btxdoif {crossref} {
+ In\btxspace\btxflush{crossref}
+ }
+ }
+ \btxdoifelse {volume} {
+ \btxcomma\bgroup\it\btxflush{volume}\/\egroup
+ \btxdoif {issue} {
+ \btxlparent\btxflush{issue}\btxrparent
+ }
+ \btxdoif {pages} {
+ \btxcomma\btxflush{pages}
+ }
+ \btxperiod
+ } {
+ \btxsetup{btx:apa:common:pages:pp}
+ }
+ \btxsetup{btx:apa:common:note}
+ \btxsetup{btx:apa:common:comment}
+\stopsetups
+
+\startsetups btx:apa:book
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace Number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}
+ }
+ \btxperiod
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ }
+ }
+ }
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}% twice?
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inbook
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoifelse {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ } {
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}\btxperiod
+ }
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ }
+ }
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:booklet
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publication:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:manual
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:organization:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:incollection
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxflushauthor{editor}\btxcomma
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in
+ }
+ \btxspace\btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition
+ }
+ \btxsetup{btx:apa:common:publisher:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:pages:pages}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inproceedings
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxdoif {editors} {
+ \btxflush{btx:apa:format:editors}
+ \btxcomma\btxsingularplural{editor}{editors}\btxcomma
+ }
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number~\btxflush{volume} in
+ }
+ \btxspace
+ \btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:proceedings
+ \btxsetup{btx:apa:common:editor-or-key-and-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in\btxspace
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:common:thesis
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ } {
+ \getvariable{btx:temp}{label}
+ }
+ \btxsetup{btx:apa:common:school:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:mastersthesis
+ \setvariables[btx:temp][label=Master's thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:phdthesis
+ \setvariables[btx:temp][label=PhD thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:techreport
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ \btxdoif {volume} {
+ \btxspace\btxflush{volume}
+ }
+ } {
+ \btxspace Technical Report
+ }
+ \btxsetup{btx:apa:common:institution:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:misc
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:unpublished
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxdoif {type} {
+ \btxlparent\btxflush{type}\btxrparent
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\endinput
diff --git a/tex/context/base/publ-imp-cite.mkiv b/tex/context/base/publ-imp-cite.mkiv
new file mode 100644
index 000000000..d64c2132c
--- /dev/null
+++ b/tex/context/base/publ-imp-cite.mkiv
@@ -0,0 +1,74 @@
+%D \module
+%D [ file=publ-imp-cite,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startsetups btx:cite:author
+ \btxcitevariant{author}
+\stopsetups
+
+\startsetups btx:cite:authoryear
+ \btxcitevariant{authoryear}
+\stopsetups
+
+\startsetups btx:cite:authoryears
+ \btxcitevariant{authoryears}
+\stopsetups
+
+% \startsetups btx:cite:authornum
+% \btxcitevariant{author}
+% \btxcitevariantparameter\c!inbetween
+% \btxcitevariant{num}
+% \stopsetups
+
+\startsetups btx:cite:authornum
+ \btxcitevariant{authornum}
+\stopsetups
+
+\startsetups btx:cite:year
+ \btxcitevariant{year}
+\stopsetups
+
+\startsetups btx:cite:short
+ \btxcitevariant{short}
+\stopsetups
+
+\startsetups btx:cite:serial
+ \btxcitevariant{serial}
+\stopsetups
+
+\startsetups btx:cite:key
+ \currentbtxtag % \btxcitevariant{tag}
+\stopsetups
+
+\startsetups btx:cite:doi
+ todo: \btxcitevariant{doi}
+\stopsetups
+
+\startsetups btx:cite:url
+ todo: \btxcitevariant{url}
+\stopsetups
+
+\startsetups btx:cite:type
+ \btxcitevariant{category}
+\stopsetups
+
+\startsetups btx:cite:page
+ \btxcitevariant{page}
+\stopsetups
+
+\startsetups btx:cite:none
+ % dummy
+\stopsetups
+
+\startsetups btx:cite:num
+ \btxcitevariant{num}
+\stopsetups
diff --git a/tex/context/base/publ-imp-commands.mkiv b/tex/context/base/publ-imp-commands.mkiv
new file mode 100644
index 000000000..14e2dbae1
--- /dev/null
+++ b/tex/context/base/publ-imp-commands.mkiv
@@ -0,0 +1,15 @@
+\unprotect
+
+% for tugboat
+
+\definebtxcommand\hbox {\hbox}
+\definebtxcommand\vbox {\vbox}
+\definebtxcommand\llap {\llap}
+\definebtxcommand\rlap {\rlap}
+\definebtxcommand\url #1{\hyphenatedurl{#1}}
+\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
+
+\let\<<
+\let\<>
+
+\protect \endinput
diff --git a/tex/context/base/publ-imp-definitions.mkiv b/tex/context/base/publ-imp-definitions.mkiv
new file mode 100644
index 000000000..2cf2e3e8e
--- /dev/null
+++ b/tex/context/base/publ-imp-definitions.mkiv
@@ -0,0 +1,68 @@
+%D \module
+%D [ file=publ-imp-def,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Definitions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Here we collect some helper setups. We assume that checking of a field
+%D happens in the calling setup, if only because that is the place where
+%D fences are also dealt with.
+
+\unprotect
+
+\startxmlsetups btx:format:crossref
+ \cite[\btxfield{crossref}]
+\stopxmlsetups
+
+\startxmlsetups btx:format:key
+ \btxfield{short}
+\stopxmlsetups
+
+\startxmlsetups btx:format:doi
+ \edef\currentbtxfielddoi{\btxfield{doi}}
+ \ifx\currentbtxfielddoi\empty
+ {\tttf no-doi}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
+ \else
+ \hyphenatedurl{\currentbtxfielddoi}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:url
+ \edef\currentbtxfieldurl{\btxfield{url}}
+ \ifx\currentbtxfieldurl\empty
+ {\tttf no-url}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
+ \else
+ \hyphenatedurl{\currentbtxfieldurl}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:month
+ \edef\currentbtxfieldmonth{\btxfield{month}}
+ \ifx\currentbtxfieldmonth\empty
+ {\tttf no-month}
+ \else
+ \edef\p_monthconversion{\btxlistvariantparameter\c!monthconversion}
+ \ifx\p_monthconversion\empty % month month:mnem
+ \currentbtxfieldmonth
+ \else
+ \doifnumberelse \currentbtxfieldmonth {
+ \convertnumber\p_monthconversion\currentbtxfieldmonth
+ } {
+ \currentbtxfieldmonth
+ }
+ \fi
+ \fi
+\stopxmlsetups
+
+\protect
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
new file mode 100644
index 000000000..a791f4726
--- /dev/null
+++ b/tex/context/base/publ-ini.lua
@@ -0,0 +1,1222 @@
+if not modules then modules = { } end modules ['publ-ini'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- use: for rest in gmatch(reference,"[^, ]+") do
+
+local next, rawget, type = next, rawget, type
+local match, gmatch, format, gsub = string.match, string.gmatch, string.format, string.gsub
+local concat, sort = table.concat, table.sort
+local utfsub = utf.sub
+local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local lpegmatch = lpeg.match
+
+local report = logs.reporter("publications")
+local trace = false trackers.register("publications", function(v) trace = v end)
+
+local context = context
+
+local datasets = publications.datasets
+
+local variables = interfaces.variables
+
+local v_local = variables["local"]
+local v_global = variables["global"]
+
+local v_force = variables.force
+local v_standard = variables.standard
+local v_start = variables.start
+local v_none = variables.none
+local v_left = variables.left
+local v_right = variables.right
+local v_middle = variables.middle
+local v_inbetween = variables.inbetween
+
+local v_short = variables.short
+local v_cite = variables.cite
+local v_default = variables.default
+local v_reference = variables.default
+local v_dataset = variables.dataset
+
+local numbertochar = converters.characters
+
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+local csname_id = token.csname_id
+
+statistics.register("publications load time", function()
+ local publicationsstats = publications.statistics
+ local nofbytes = publicationsstats.nofbytes
+ if nofbytes > 0 then
+ return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
+ statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts)
+ else
+ return nil
+ end
+end)
+
+luatex.registerstopactions(function()
+ logspushtarget("logfile")
+ logsnewline()
+ report("start used btx commands")
+ logsnewline()
+ local undefined = csname_id("undefined*crap")
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ local c = csname_id(command)
+ if c and c ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"known")
+ else
+ local u = csname_id(utf.upper(command))
+ if u and u ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
+ else
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ end
+ logsnewline()
+ report("stop used btxcommands")
+ logsnewline()
+ logspoptarget()
+end)
+
+-- multipass, we need to sort because hashing is random per run and not per
+-- version (not the best changed feature of lua)
+
+local collected = allocate()
+local tobesaved = allocate()
+
+-- we use a a dedicated (and efficient as it know what it deals with) serializer,
+-- also because we need to ignore the 'details' field
+
+local function serialize(t)
+ local f_key_table = formatters[" [%q] = {"]
+ local f_key_string = formatters[" %s = %q,"]
+ local r = { "return {" }
+ local m = 1
+ for tag, entry in sortedhash(t) do
+ m = m + 1
+ r[m] = f_key_table(tag)
+ local s = sortedkeys(entry)
+ for i=1,#s do
+ local k = s[i]
+ -- if k ~= "details" then
+ m = m + 1
+ r[m] = f_key_string(k,entry[k])
+ -- end
+ end
+ m = m + 1
+ r[m] = " },"
+ end
+ r[m] = "}"
+ return concat(r,"\n")
+end
+
+local function finalizer()
+ local prefix = tex.jobname -- or environment.jobname
+ local setnames = sortedkeys(datasets)
+ for i=1,#setnames do
+ local name = setnames[i]
+ local dataset = datasets[name]
+ local userdata = dataset.userdata
+ local checksum = nil
+ local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
+ if userdata and next(userdata) then
+ if job.passes.first then
+ local newdata = serialize(userdata)
+ checksum = md5.HEX(newdata)
+ io.savedata(username,newdata)
+ end
+ else
+ os.remove(username)
+ username = nil
+ end
+ local loaded = dataset.loaded
+ local sources = dataset.sources
+ local used = { }
+ for i=1,#sources do
+ local source = sources[i]
+ if loaded[source.filename] ~= "previous" then -- or loaded[source.filename] == "current"
+ used[#used+1] = source
+ end
+ end
+ tobesaved[name] = {
+ usersource = {
+ filename = username,
+ checksum = checksum,
+ },
+ datasources = used,
+ }
+ end
+end
+
+local function initializer()
+ statistics.starttiming(publications)
+collected = publications.collected or collected -- for the moment as we load runtime
+ for name, state in next, collected do
+ local dataset = datasets[name]
+ local datasources = state.datasources
+ local usersource = state.usersource
+ if datasources then
+ for i=1,#datasources do
+ local filename = datasources[i].filename
+ publications.load(dataset,filename,"previous")
+ end
+ end
+ if usersource then
+ dataset.userdata = table.load(usersource.filename) or { }
+ end
+ end
+ statistics.stoptiming(publications)
+ function initializer() end -- will go, for now, runtime loaded
+end
+
+job.register('publications.collected',tobesaved,initializer,finalizer)
+
+if not publications.authors then
+ initializer() -- for now, runtime loaded
+end
+
+-- basic access
+
+local function getfield(dataset,tag,name)
+ local d = datasets[dataset].luadata[tag]
+ return d and d[name]
+end
+
+local function getdetail(dataset,tag,name)
+ local d = datasets[dataset].details[tag]
+ return d and d[name]
+end
+
+-- basic loading
+
+function commands.usebtxdataset(name,filename)
+ publications.load(datasets[name],filename,"current")
+end
+
+function commands.convertbtxdatasettoxml(name,nice)
+ publications.converttoxml(datasets[name],nice)
+end
+
+-- enhancing
+
+local splitauthorstring = publications.authors.splitstring
+
+local pagessplitter = lpeg.splitat(lpeg.P("-")^1)
+
+-- maybe not redo when already done
+
+function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
+ statistics.starttiming(publications)
+ if type(dataset) == "string" then
+ dataset = datasets[dataset]
+ end
+ local luadata = dataset.luadata
+ local details = dataset.details
+ -- author, editor
+ for tag, entry in next, luadata do
+ local author = entry.author
+ local editor = entry.editor
+ details[tag] = {
+ author = author and splitauthorstring(author),
+ editor = editor and splitauthorstring(editor),
+ }
+ end
+ -- short
+ local shorts = { }
+ for tag, entry in next, luadata do
+ local author = details[tag].author
+ if author then
+ -- number depends on sort order
+ local t = { }
+ if #author == 0 then
+ -- what
+ else
+ local n = #author == 1 and 3 or 1
+ for i=1,#author do
+ local surnames = author[i].surnames
+ if not surnames or #surnames == 0 then
+ -- error
+ else
+ t[#t+1] = utfsub(surnames[1],1,n)
+ end
+ end
+ end
+ local year = tonumber(entry.year) or 0
+ local short = formatters["%t%02i"](t,math.mod(year,100))
+ local s = shorts[short]
+ if not s then
+ shorts[short] = tag
+ elseif type(s) == "string" then
+ shorts[short] = { s, tag }
+ else
+ s[#s+1] = tag
+ end
+ else
+ --
+ end
+ end
+ for short, tags in next, shorts do
+ if type(tags) == "table" then
+ table.sort(tags)
+ for i=1,#tags do
+ details[tags[i]].short = short .. numbertochar(i)
+ end
+ else
+ details[tags].short = short
+ end
+ end
+ -- pages
+ for tag, entry in next, luadata do
+ local pages = entry.pages
+ if pages then
+ local first, last = lpegmatch(pagessplitter,pages)
+ details[tag].pages = first and last and { first, last } or pages
+ end
+ end
+ statistics.stoptiming(publications)
+end
+
+function commands.addbtxentry(name,settings,content)
+ local dataset = datasets[name]
+ if dataset then
+ publications.addtexentry(dataset,settings,content)
+ end
+end
+
+function commands.setbtxdataset(name)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ context(name)
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.setbtxentry(name,tag)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ if dataset.luadata[tag] then
+ context(tag)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- rendering of fields
+
+function commands.btxflush(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local value = fields[field]
+ if type(value) == "string" then
+ context(value)
+ return
+ end
+ local details = dataset.details[tag]
+ if details then
+ local value = details[field]
+ if type(value) == "string" then
+ context(value)
+ return
+ end
+ end
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxdetail(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local details = dataset.details[tag]
+ if details then
+ local value = details[field]
+ if type(value) == "string" then
+ context(value)
+ else
+ report("unknown detail %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxfield(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local value = fields[field]
+ if type(value) == "string" then
+ context(value)
+ else
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- testing: to be speed up with testcase
+
+function commands.btxdoifelse(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.firstoftwoarguments()
+ return
+ end
+ end
+ context.secondoftwoarguments()
+end
+
+function commands.btxdoif(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.firstofoneargument()
+ return
+ end
+ end
+ context.gobbleoneargument()
+end
+
+function commands.btxdoifnot(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.gobbleoneargument()
+ return
+ end
+ end
+ context.firstofoneargument()
+end
+
+-- -- alternative approach: keep data at the tex end
+
+function publications.listconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ context.btxlistparameter("sep")
+ context(t[i])
+ end
+ context.btxlistparameter("finalsep")
+ else
+ context.btxlistparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.citeconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ context.btxcitevariantparameter("sep")
+ context(t[i])
+ end
+ context.btxcitevariantparameter("finalsep")
+ else
+ context.btxcitevariantparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.singularorplural(singular,plural)
+ if lastconcatsize and lastconcatsize > 1 then
+ context(plural)
+ else
+ context(singular)
+ end
+end
+
+function commands.makebibauthorlist(settings)
+ if not settings then
+ return
+ end
+ local dataset = datasets[settings.dataset]
+ if not dataset or dataset == "" then
+ return
+ end
+ local tag = settings.tag
+ if not tag or tag == "" then
+ return
+ end
+ local asked = settings_to_array(tag)
+ if #asked == 0 then
+ return
+ end
+ local compress = settings.compress
+ local interaction = settings.interactionn == v_start
+ local limit = tonumber(settings.limit)
+ local found = { }
+ local hash = { }
+ local total = 0
+ local luadata = dataset.luadata
+ for i=1,#asked do
+ local tag = asked[i]
+ local data = luadata[tag]
+ if data then
+ local author = data.a or "Xxxxxxxxxx"
+ local year = data.y or "0000"
+ if not compress or not hash[author] then
+ local t = {
+ author = author,
+ name = name, -- first
+ year = { [year] = name },
+ }
+ total = total + 1
+ found[total] = t
+ hash[author] = t
+ else
+ hash[author].year[year] = name
+ end
+ end
+ end
+ for i=1,total do
+ local data = found[i]
+ local author = data.author
+ local year = table.keys(data.year)
+ table.sort(year)
+ if interaction then
+ for i=1,#year do
+ year[i] = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.year[year[i]],year[i])
+ end
+ end
+ context.setvalue("currentbibyear",concat(year,","))
+ if author == "" then
+ context.setvalue("currentbibauthor","")
+ else -- needs checking
+ local authors = settings_to_array(author) -- {{}{}},{{}{}}
+ local nofauthors = #authors
+ if nofauthors == 1 then
+ if interaction then
+ author = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,author)
+ end
+ context.setvalue("currentbibauthor",author)
+ else
+ limit = limit or nofauthors
+ if interaction then
+ for i=1,#authors do
+ authors[i] = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,authors[i])
+ end
+ end
+ if limit == 1 then
+ context.setvalue("currentbibauthor",authors[1] .. "\\bibalternative{otherstext}")
+ elseif limit == 2 and nofauthors == 2 then
+ context.setvalue("currentbibauthor",concat(authors,"\\bibalternative{andtext}"))
+ else
+ for i=1,limit-1 do
+ authors[i] = authors[i] .. "\\bibalternative{namesep}"
+ end
+ if limit < nofauthors then
+ authors[limit+1] = "\\bibalternative{otherstext}"
+ context.setvalue("currentbibauthor",concat(authors,"",1,limit+1))
+ else
+ authors[limit-1] = authors[limit-1] .. "\\bibalternative{andtext}"
+ context.setvalue("currentbibauthor",concat(authors))
+ end
+ end
+ end
+ end
+ -- the following use: currentbibauthor and currentbibyear
+ if i == 1 then
+ context.ixfirstcommand()
+ elseif i == total then
+ context.ixlastcommand()
+ else
+ context.ixsecondcommand()
+ end
+ end
+end
+
+local patterns = { "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
+
+local function failure(name)
+ report("unknown library %a",name)
+end
+
+local function action(name,foundname)
+ context.input(foundname)
+end
+
+function commands.loadbtxdefinitionfile(name) -- a more specific name
+ commands.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false,
+ }
+end
+
+-- lists:
+
+publications.lists = publications.lists or { }
+local lists = publications.lists
+
+local context = context
+local structures = structures
+
+local references = structures.references
+local sections = structures.sections
+
+-- per rendering
+
+local renderings = { } --- per dataset
+
+table.setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+end)
+
+-- why shorts vs tags: only for sorting
+
+function lists.register(dataset,tag,short)
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
+end
+
+function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+end
+
+function lists.setmethod(dataset,method)
+ local r = renderings[dataset]
+ r.method = method or v_none
+ r.list = { }
+ r.done = { }
+end
+
+function lists.collectentries(specification)
+ local dataset = specification.btxdataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+ local method = rendering.method
+ if method == v_none then
+ return
+ end
+-- method=v_local --------------------
+ local result = structures.lists.filter(specification)
+ lists.result = result
+ local section = sections.currentid()
+ local list = rendering.list
+ local done = rendering.done
+ local alldone = rendering.alldone
+ if method == v_local then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and done[tag] ~= section then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ elseif method == v_global then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and not alldone[tag] and done[tag] ~= section then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ elseif method == v_force then
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag then
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ end
+end
+
+lists.sorters = {
+ [v_short] = function(dataset,rendering,list)
+ local shorts = rendering.shorts
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = shorts[aa], shorts[bb]
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_reference] = function(dataset,rendering,list)
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_dataset] = function(dataset,rendering,list)
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = list[aa].index or 0, list[bb].index or 0
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_default] = function(dataset,rendering,list) -- not really needed
+ local ordered = rendering.ordered
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = ordered[aa], ordered[bb]
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+}
+
+function lists.flushentries(dataset,sortvariant)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local compare = lists.sorters[sortvariant] or lists.sorters[v_default]
+ compare = type(compare) == "function" and compare(dataset,rendering,list)
+ if compare then
+ sort(list,compare)
+ end
+ for i=1,#list do
+ context.setvalue("currentbtxindex",i)
+ context.btxhandlelistentry(list[i][1]) -- we can pass i here too ... more efficient to avoid the setvalue
+ end
+end
+
+function lists.fetchentries(dataset)
+ local list = renderings[dataset].list
+ for i=1,#list do
+ context.setvalue("currentbtxindex",i)
+ context.btxchecklistentry(list[i][1])
+ end
+end
+
+function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i }
+ end
+end
+
+function lists.registerplaced(dataset,tag)
+ renderings[dataset].used[tag] = true
+end
+
+function lists.doifalreadyplaced(dataset,tag)
+ commands.doifelse(renderings[dataset].used[tag])
+end
+
+-- we ask for <n>:tag but when we can't find it we go back
+-- to look for previous definitions, and when not found again
+-- we look forward
+
+local function compare(a,b)
+ local aa, bb = a and a[3], b and b[3]
+ return aa and bb and aa < bb
+end
+
+-- maybe hash subsets
+-- how efficient is this? old leftovers?
+
+-- rendering ?
+
+local f_reference = formatters["r:%s:%s:%s"] -- dataset, instance, tag
+local f_destination = formatters["d:%s:%s:%s"] -- dataset, instance, tag
+
+function lists.resolve(dataset,reference) -- maybe already feed it split
+ -- needs checking (the prefix in relation to components)
+ local subsets = nil
+ local block = tex.count.btxblock
+ local collected = references.collected
+ local prefix = nil -- todo: dataset ?
+ if prefix and prefix ~= "" then
+ subsets = { collected[prefix] or collected[""] }
+ else
+ local components = references.productdata.components
+ local subset = collected[""]
+ if subset then
+ subsets = { subset }
+ else
+ subsets = { }
+ end
+ for i=1,#components do
+ local subset = collected[components[i]]
+ if subset then
+ subsets[#subsets+1] = subset
+ end
+ end
+ end
+ if #subsets > 0 then
+ local result, nofresult, done = { }, 0, { }
+ for i=1,#subsets do
+ local subset = subsets[i]
+ for rest in gmatch(reference,"[^, ]+") do
+ local blk, tag, found = block, nil, nil
+ if block then
+-- tag = blk .. ":" .. rest
+ tag = dataset .. ":" .. blk .. ":" .. rest
+ found = subset[tag]
+ if not found then
+ for i=block-1,1,-1 do
+ tag = i .. ":" .. rest
+ found = subset[tag]
+ if found then
+ blk = i
+ break
+ end
+ end
+ end
+ end
+ if not found then
+ blk = "*"
+ tag = dataset .. ":" .. blk .. ":" .. rest
+ found = subset[tag]
+ end
+ if found then
+ local current = tonumber(found.entries and found.entries.text) -- tonumber needed
+ if current and not done[current] then
+ nofresult = nofresult + 1
+ result[nofresult] = { blk, rest, current }
+ done[current] = true
+ end
+ end
+ end
+ end
+ local first, last, firsti, lasti, firstr, lastr
+ local collected, nofcollected = { }, 0
+ for i=1,nofresult do
+ local r = result[i]
+ local current = r[3]
+ if not first then
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ elseif current == last + 1 then
+ last, lasti, lastr = current, i, r
+ else
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ end
+ end
+ if first and last then
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ end
+ if nofcollected > 0 then
+ for i=1,nofcollected do
+ local c = collected[i]
+ if i == nofcollected then
+ context.btxlistvariantparameter("lastpubsep")
+ elseif i > 1 then
+ context.btxlistvariantparameter("pubsep")
+ end
+ if #c == 3 then -- a range (3 is first or last)
+ context.btxdirectlink(f_reference(dataset,c[1],c[2]),c[3])
+ else
+ local f, l = c[2], c[2]
+ context.btxdirectlink(f_reference(dataset,f[1],f[2]),f[3])
+ context.endash() -- to do
+ context.btxdirectlink(f_reference(dataset,l[4],l[5]),l[6])
+ end
+ end
+ else
+ context("[btx error 1]")
+ end
+ else
+ context("[btx error 2]")
+ end
+end
+
+local done = { }
+
+function commands.btxreference(dataset,block,tag,data)
+ local ref = f_reference(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+ context.dodirectfullreference(ref,data)
+ end
+end
+
+local done = { }
+
+function commands.btxdestination(dataset,block,tag,data)
+ local ref = f_destination(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+ context.dodirectfullreference(ref,data)
+ end
+end
+
+commands.btxsetlistmethod = lists.setmethod
+commands.btxresolvelistreference = lists.resolve
+commands.btxregisterlistentry = lists.registerplaced
+commands.btxaddtolist = lists.addentry
+commands.btxcollectlistentries = lists.collectentries
+commands.btxfetchlistentries = lists.fetchentries
+commands.btxflushlistentries = lists.flushentries
+commands.btxdoifelselistentryplaced = lists.doifalreadyplaced
+
+local citevariants = { }
+publications.citevariants = citevariants
+
+-- helper
+
+local compare = sorters.comparers.basic -- (a,b)
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
+local function sortedtags(dataset,list,sorttype)
+ local luadata = datasets[dataset].luadata
+ local valid = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local key = entry[sorttype]
+ if key then
+ valid[#valid+1] = {
+ tag = tag,
+ split = splitter(strip(key))
+ }
+ else
+ end
+ end
+ end
+ if #valid == 0 or #valid ~= #list then
+ return list
+ else
+ table.sort(valid,sorters.comparers.basic)
+ for i=1,#valid do
+ valid[i] = valid[i].tag
+ end
+ return valid
+ end
+end
+
+-- todo: standard : current
+
+local splitter = lpeg.splitat("::")
+
+function commands.btxhandlecite(dataset,tag,mark,variant,sorttype,setup) -- variant for tracing
+ local prefix, rest = lpegmatch(splitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ context.setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ if #tags > 0 then
+ if sorttype and sorttype ~= "" then
+ tags = sortedtags(dataset,tags,sorttype)
+ end
+ context.btxcitevariantparameter(v_left)
+ for i=1,#tags do
+ local tag = tags[i]
+ context.setvalue("currentbtxtag",tag)
+ if i > 1 then
+ context.btxcitevariantparameter(v_middle)
+ end
+ if mark then
+ context.dobtxmarkcitation(dataset,tag)
+ end
+ context.formatted.directsetup(setup) -- cite can become alternative
+ end
+ context.btxcitevariantparameter(v_right)
+ else
+ -- error
+ end
+end
+
+function commands.btxhandlenocite(dataset,tag)
+ local prefix, rest = lpegmatch(splitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ context.setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ for i=1,#tags do
+ context.dobtxmarkcitation(dataset,tags[i])
+ end
+end
+
+function commands.btxcitevariant(dataset,block,tags,variant)
+ local action = citevariants[variant] or citevariants.default
+ if action then
+ action(dataset,tags,variant)
+ end
+end
+
+function citevariants.default(dataset,tags,variant)
+ local content = getfield(dataset,tags,variant)
+ if content then
+ context(content)
+ end
+end
+
+-- todo : sort
+-- todo : choose between publications or commands namespace
+-- todo : use details.author
+-- todo : sort details.author
+
+local function collectauthoryears(dataset,tags)
+ local luadata = datasets[dataset].luadata
+ local list = settings_to_array(tags)
+ local found = { }
+ local result = { }
+ local order = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local year = entry.year
+ local author = entry.author
+ if author and year then
+ local a = found[author]
+ if not a then
+ a = { }
+ found[author] = a
+ order[#order+1] = author
+ end
+ local y = a[year]
+ if not y then
+ y = { }
+ a[year] = y
+ end
+ y[#y+1] = tag
+ end
+ end
+ end
+ -- found = { author = { year_1 = { e1, e2, e3 } } }
+ for i=1,#order do
+ local author = order[i]
+ local years = found[author]
+ local yrs = { }
+ for year, entries in next, years do
+ if subyears then
+ -- -- add letters to all entries of an author and if so shouldn't
+ -- -- we tag all years of an author as soon as we do this?
+ -- if #entries > 1 then
+ -- for i=1,#years do
+ -- local entry = years[i]
+ -- -- years[i] = year .. string.char(i + string.byte("0") - 1)
+ -- end
+ -- end
+ else
+ yrs[#yrs+1] = year
+ end
+ end
+ result[i] = { author = author, years = yrs }
+ end
+ return result, order
+end
+
+-- (name, name and name) .. how names? how sorted?
+-- todo: we loop at the tex end .. why not here
+-- \cite[{hh,afo},kvm]
+
+-- maybe we will move this tex anyway
+
+function citevariants.author(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+end
+
+local function authorandyear(dataset,tags,formatter)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ for i=1,#result do
+ local r = result[i]
+ order[i] = formatter(r.author,r.years) -- reuse order
+ end
+ publications.citeconcat(order)
+end
+
+function citevariants.authoryear(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s (%, t)"])
+end
+
+function citevariants.authoryears(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s, %, t"])
+end
+
+function citevariants.authornum(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+ context.btxcitevariantparameter(v_inbetween)
+ lists.resolve(dataset,tags) -- left/right ?
+end
+
+function citevariants.short(dataset,tags)
+ local short = getdetail(dataset,tags,"short")
+ if short then
+ context(short)
+ end
+end
+
+function citevariants.page(dataset,tags)
+ local pages = getdetail(dataset,tags,"pages")
+ if not pages then
+ -- nothing
+ elseif type(pages) == "table" then
+ context(pages[1])
+ context.btxcitevariantparameter(v_inbetween)
+ context(pages[2])
+ else
+ context(pages)
+ end
+end
+
+function citevariants.num(dataset,tags)
+ lists.resolve(dataset,tags)
+end
+
+function citevariants.serial(dataset,tags) -- the traditional fieldname is "serial" and not "index"
+ local index = getfield(dataset,tags,"index")
+ if index then
+ context(index)
+ end
+end
+
+-- List variants
+
+local listvariants = { }
+publications.listvariants = listvariants
+
+-- function commands.btxhandlelist(dataset,block,tag,variant,setup)
+-- if sorttype and sorttype ~= "" then
+-- tags = sortedtags(dataset,tags,sorttype)
+-- end
+-- context.setvalue("currentbtxtag",tag)
+-- context.btxlistvariantparameter(v_left)
+-- context.formatted.directsetup(setup)
+-- context.btxlistvariantparameter(v_right)
+-- end
+
+function commands.btxlistvariant(dataset,block,tags,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tags,variant,tonumber(listindex) or 0)
+ end
+end
+
+function listvariants.default(dataset,block,tags,variant)
+ context("?")
+end
+
+function listvariants.num(dataset,block,tags,variant,listindex)
+ context.btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
+end
+
+function listvariants.short(dataset,block,tags,variant,listindex)
+ local short = getdetail(dataset,tags,variant,variant)
+ if short then
+ context(short)
+ end
+end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
new file mode 100644
index 000000000..dafb0dca3
--- /dev/null
+++ b/tex/context/base/publ-ini.mkiv
@@ -0,0 +1,849 @@
+%D \module
+%D [ file=publ-ini,
+%D version=2013.05.12,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
+% todo: \v!cite => \s!cite
+% todo: interface with (ml)bibtex (export -> call -> import)
+
+% \definecolor[btx:field] [darkred]
+% \definecolor[btx:crossref][darkblue]
+% \definecolor[btx:key] [darkgreen]
+% \definecolor[btx:todo] [darkyellow]
+
+%D We operate on several axis:
+%D
+%D \startitemize[packed]
+%D \startitem we can have several databases (or combinations) \stopitem
+%D \startitem we can add entries to them if needed (coded in tex) \stopitem
+%D \startitem we can have several lists each using one of the databases \stopitem
+%D \startitem we can render each list or citation independently \stopitem
+%D \stopitemize
+%D
+%D We assume that the rendering of a list entry is consistent in a document,
+%D although one can redefine properties if needed. Adding more granularity would
+%D complicate the user interface beyond comprehension.
+
+\writestatus{loading}{ConTeXt Publication Support / Initialization}
+
+\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-aut}{1.001}
+\registerctxluafile{publ-usr}{1.001}
+\registerctxluafile{publ-ini}{1.001}
+\registerctxluafile{publ-oth}{1.001} % this could become an option
+
+\unprotect
+
+\installcorenamespace {btxdataset}
+\installcorenamespace {btxlistvariant}
+\installcorenamespace {btxcitevariant}
+\installcorenamespace {btxrendering}
+\installcorenamespace {btxcommand}
+\installcorenamespace {btxnumbering}
+
+\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
+\installcommandhandler \??btxlistvariant {btxlistvariant} \??btxlistvariant
+\installcommandhandler \??btxcitevariant {btxcitevariant} \??btxcitevariant
+\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+
+\unexpanded\def\usebtxdataset
+ {\dodoubleargument\publ_use_dataset}
+
+\def\publ_use_dataset[#1][#2]%
+ {\ifsecondargument
+ \ctxcommand{usebtxdataset("#1","#2")}%
+ \else
+ \ctxcommand{usebtxdataset("\v!standard","#1")}%
+ \fi}
+
+\definebtxdataset
+ [\v!standard]
+
+% \usebtxdataset
+% [standard]
+% [mybibs.bib]
+
+\unexpanded\def\startpublication
+ {\dodoubleempty\publ_set_publication}
+
+\let\stoppublication\relax
+
+\def\publ_set_publication[#1][#2]%
+ {\begingroup
+ \catcode\commentasciicode\othercatcode
+ \ifsecondargument
+ \expandafter\publ_set_publication_indeed
+ \else\iffirstargument
+ \doubleexpandafter\publ_set_publication_checked
+ \else
+ \doubleexpandafter\publ_set_publication_default
+ \fi\fi{#1}{#2}}
+
+\def\publ_set_publication_default#1#2%
+ {\publ_set_publication_indeed\v!standard{#1}}
+
+\def\publ_set_publication_checked#1#2%
+ {\doifassignmentelse{#1}
+ {\publ_set_publication_indeed\v!standard{#1}}
+ {\publ_set_publication_indeed{#1}{}}}
+
+\def\publ_set_publication_indeed#1#2#3\stoppublication
+ {\ctxcommand{addbtxentry("#1",\!!bs#2\!!es,\!!bs\detokenize{#3}\!!es)}%
+ \endgroup
+ \ignorespaces}
+
+% commands
+
+\unexpanded\def\btxcommand#1%
+ {\ifcsname\??btxcommand#1\endcsname
+ \expandafter\publ_command_yes
+ \else
+ \expandafter\publ_command_nop
+ \fi{#1}}
+
+\let\btxcmd\btxcommand
+
+\def\publ_command_yes#1%
+ {\csname\??btxcommand#1\endcsname}
+
+\def\publ_command_nop#1%
+ {\ifcsname#1\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant #1}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname#1\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
+ \else\ifcsname\utfupper{#1}\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant \utfupper{#1}}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname\utfupper{#1}\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
+ \else
+ \writestatus\m!publications{unknown command: #1}%
+ \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
+ \fi\fi
+ \publ_command_yes{#1}}
+
+\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
+ {\setuvalue{\??btxcommand\strippedcsname#1}}%
+
+% access
+
+\let\currentbtxtag \empty
+\let\currentbtxdataset\v!standard
+
+\unexpanded\def\setbtxdataset[#1]%
+ {\edef\currentbtxdataset{\ctxcommand{setbtxdataset("#1")}}}
+
+\unexpanded\def\setbtxentry[#1]%
+ {\edef\currentbtxtag{\ctxcommand{setbtxentry("\currentbtxdataset","#1")}}}
+
+% \let\btxsetdataset\setbtxdataset
+% \let\btxsetentry \setbtxentry
+
+\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}}
+%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}}
+\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}}
+
+\let\btxsetup \directsetup
+
+%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
+%D with another one? I think this assumption is okay. For manuals we might want to
+%D mix but we can work around it.
+
+%D Rendering.
+
+\unexpanded\def\btxspace {\removeunwantedspaces\space}
+\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
+\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
+\unexpanded\def\btxlparent {\removeunwantedspaces\space(}
+\unexpanded\def\btxrparent {\removeunwantedspaces)\space}
+\unexpanded\def\btxlbracket{\removeunwantedspaces\space[}
+\unexpanded\def\btxrbracket{\removeunwantedspaces]\space}
+
+%D Rendering lists and citations.
+
+\newconditional\c_btx_trace
+
+\installtextracker
+ {btxrendering}
+ {\settrue \c_btx_trace}
+ {\setfalse\c_btx_trace}
+
+\unexpanded\def\startbtxrendering
+ {\begingroup
+ \dosingleempty\btx_start_rendering}
+
+\def\btx_start_rendering[#1]%
+ {\edef\currentbtxrendering{#1}}
+
+\unexpanded\def\stopbtxrendering
+ {\endgroup}
+
+\unexpanded\def\btxtodo#1%
+ {[#1]}
+
+%D Specific rendering definitions (like apa):
+
+\unexpanded\def\loadbtxdefinitionfile[#1]%
+ {\ctxcommand{loadbtxdefinitionfile("#1")}}
+
+%D Lists:
+
+\newdimen\d_publ_number_width
+\newdimen\d_publ_number_distance
+
+\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
+\ifdefined\btxcounter \else \newcount\btxcounter \fi
+
+\newtoks \everysetupbtxlistplacement % name will change
+\newtoks \everysetupbtxciteplacement % name will change
+
+\def\btxlistnumberbox
+ {\hbox \ifzeropt\d_publ_number_width\else to \d_publ_number_width\fi}
+
+% \def\publ_list_processor % bibref -> btx (old method, keep as reference)
+% {\ctxcommand{btxaddtolist("\currentbtxrendering",\currentlistindex,"btxref")}}
+
+\definelist
+ [btx]
+ [\c!before=,
+ %\c!inbetween=,
+ \c!after=]
+
+\appendtoks
+ \definelist
+ [btx:\currentbtxrendering]%
+ [btx]
+ \setuplist
+ [btx:\currentbtxrendering]%
+ [\c!state=\v!start]%
+ % \installstructurelistprocessor
+ % {\currentbtxrendering:userdata}%
+ % {\publ_list_processor}%
+\to \everydefinebtxrendering
+
+\unexpanded\def\btx_entry_inject
+ {\begingroup
+ \edef\currentbtxcategory{\btxfield{category}}%
+ \ignorespaces
+ \directsetup{btx:\currentbtxalternative:\currentbtxcategory}%
+ \removeunwantedspaces
+ \endgroup}
+
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+
+\let\completelistofpublications\completebtxrendering
+\let\placelistofpublications \placebtxrendering
+
+\def\publ_place_list_check_criterium
+ {\edef\p_criterium{\btxrenderingparameter\c!criterium} % \v!cite will become \s!cite
+ \ifx\p_criterium\empty
+ \letlistparameter\c!criterium\v!previous
+ \else\ifx\p_criterium\v_cite
+ \letlistparameter\c!criterium\v!here
+ \else
+ \letlistparameter\c!criterium\v_cite
+ \fi\fi}
+
+\def\publ_place_list_complete[#1][#2]% title might become obsolete, just headtext
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \publ_place_list_check_criterium
+ \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
+ \ifx\currentbtxrenderingtitle\empty
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
+ \else
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
+ \fi
+ \publ_place_list_indeed
+ \stopnamedsection
+ \endgroup}
+
+\def\publ_place_list_standard[#1][#2]%
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \publ_place_list_check_criterium
+ \publ_place_list_indeed
+ \endgroup}
+
+\newconditional\c_publ_place_all
+\newconditional\c_publ_place_register % to be interfaced
+\newconditional\c_publ_place_check % to be interfaced
+
+\appendtoks
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \settrue\c_publ_place_all
+ \else
+ \setfalse\c_publ_place_all
+ \fi
+\to \everysetupbtxlistplacement
+
+\def\publ_place_list_indeed
+ {\startbtxrendering[\currentbtxrendering]%
+ \directsetup{\btxrenderingparameter\c!setups}%
+ % \determinelistcharacteristics[\currentbtxrendering]%
+ \edef\currentbtxalternative{\btxrenderingparameter\c!alternative}%
+ \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \the\everysetupbtxlistplacement
+ \forgetall
+ \ctxcommand{btxsetlistmethod("\currentbtxdataset","\btxrenderingparameter\c!method")}%
+ \startpacked[\v!blank]%
+ % here we just collect items
+ % \strc_lists_analyze
+ % {btx}%
+ % {\currentbtxcriterium}%
+ % {\namedlistparameter\currentbtxrendering\c!number}%
+ \ctxcommand{btxcollectlistentries {
+ names = "btx",
+ criterium = "\currentbtxcriterium",
+ number = "\namedlistparameter\currentbtxrendering\c!number",
+ btxdataset = "\currentbtxdataset",
+ }}%
+ % next we analyze the width
+ \ifx\btx_reference_inject_indeed\relax
+ \d_publ_number_width \zeropoint
+ \d_publ_number_distance\zeropoint
+ \else
+ \edef\p_width{\btxrenderingparameter\c!width}%
+ \ifx\p_width\v!auto
+ \scratchcounter\btxcounter
+ \setbox\scratchbox\vbox{\ctxcommand{btxfetchlistentries("\currentbtxdataset")}}%
+ \d_publ_number_width\wd\scratchbox
+ \global\btxcounter\scratchcounter
+ \else
+ \d_publ_number_width\p_width
+ \fi
+ \d_publ_number_distance\btxrenderingparameter\c!distance
+ \fi
+ \letlistparameter\c!width \d_publ_number_width
+ \letlistparameter\c!distance\d_publ_number_distance
+ % this actually typesets them
+ \letlistparameter\c!interaction\v!none
+ \ctxcommand{btxflushlistentries("\currentbtxdataset","\btxrenderingparameter\c!sorttype")}%
+ \stoppacked
+ \stopbtxrendering
+ \global\advance\btxblock\plusone}
+
+\def\currentbtxblock{\number\btxblock}
+
+\def\publ_place_list_entry_checked
+ {\ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_place_list_entry}
+
+\def\publ_place_list_entry_register
+ {\ctxcommand{btxregisterlistentry("\currentbtxdataset","\currentbtxtag")}}
+
+\unexpanded\def\btxhandlelistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifconditional\c_publ_place_all
+ \publ_place_list_entry
+ \else\ifconditional\c_publ_place_check
+ \publ_place_list_entry_checked
+ \else
+ \publ_place_list_entry
+ \fi\fi
+ \endgroup}
+
+\unexpanded\def\publ_place_list_entry
+ {%\dontleavehmode
+ %\begstrut
+ \global\advance\btxcounter\plusone
+ \ifconditional\c_publ_place_register
+ \publ_place_list_entry_register
+ \fi
+ \edef\currentlist {btx:\currentbtxrendering}%
+ \let\currentlistentrynumber \btx_reference_inject
+ \let\currentlistentrytitle \btx_entry_inject
+ \let\currentlistentrypagenumber\empty
+ \strc_lists_apply_renderingsetup
+ }%\endstrut}
+
+\unexpanded\def\btxchecklistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \publ_check_list_entry
+ \else
+ \ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_check_list_entry
+ \fi
+ \endgroup}
+
+\unexpanded\def\publ_check_list_entry
+ {\global\advance\btxcounter\plusone
+ % todo, switch to font
+ \hbox{\btx_reference_inject}%
+ \par}
+
+\unexpanded\def\btx_reference_inject % we can use a faster \reference
+ {\btxlistnumberbox\bgroup
+% \btxlistvariantparameter\c!left
+% {\tttf d>\currentbtxdataset:\currentbtxblock:\currentbtxtag}%
+ \ctxcommand{btxdestination("\currentbtxdataset","\currentbtxblock","\currentbtxtag","\number\btxcounter")}%
+ \btx_reference_inject_indeed
+% \btxlistvariantparameter\c!right
+ \egroup}
+
+\setuvalue{\??btxnumbering\v!short }{\btxlistvariant{short}} % these will be setups
+\setuvalue{\??btxnumbering\v!bib }{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\s!unknown}{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\v!yes }{\btxlistvariant{num}} % these will be setups
+
+\appendtoks
+ \edef\p_btx_numbering{\btxrenderingparameter\c!numbering}%
+ \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
+ \ifx\p_btx_numbering\v!no
+ \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
+ \letlistparameter\c!symbol \v!none
+ \letlistparameter\c!aligntitle \v!yes
+ \let\btx_reference_inject_indeed\relax
+ \else
+ \ifcsname\??btxnumbering\p_btx_numbering\endcsname \else
+ \let\p_btx_numbering\s!unknown
+ \fi
+ \letlistparameter\c!headnumber\v!always
+ \expandafter\let\expandafter\btx_reference_inject_indeed\csname\??btxnumbering\p_btx_numbering\endcsname
+ \fi
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}%
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btxflushauthor
+ {\doifnextoptionalelse\btx_flush_author_yes\btx_flush_author_nop}
+
+\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
+\def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}}
+
+\unexpanded\def\btx_flush_author#1#2%
+ {\edef\currentbtxfield{#2}%
+ \let\currentbtxlistvariant\currentbtxfield
+ \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{
+ combiner = "#1",
+ etallimit = \number\btxlistvariantparameter\c!etallimit,
+ etaldisplay = \number\btxlistvariantparameter\c!etaldisplay,
+ })}}
+
+\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
+\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
+\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
+\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
+
+% \btxflushauthor{author}
+% \btxflushauthor{artauthor}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% Interaction
+
+\newconditional\btxinteractive
+
+\unexpanded\def\btxdoifelseinteraction
+ {\iflocation
+ \edef\p_interaction{\btxcitevariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \doubleexpandafter\secondoftwoarguments
+ \else
+ \doubleexpandafter\firstoftwoarguments
+ \fi
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxlistvariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \else
+ \let\doifelsebtxinteractionelse\firstoftwoarguments
+ \settrue\btxinteractive
+ \fi
+ \else
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \fi
+\to \everysetupbtxlistplacement
+
+% bib -> btx
+
+\unexpanded\def\btxgotolink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
+\unexpanded\def\btxatlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
+\unexpanded\def\btxinlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
+
+\unexpanded\def\btxdirectlink#1#2{\goto{#2 {\tttf[#1]}}[#1]}
+\unexpanded\def\btxdirectlink#1#2{\goto{#2}[#1]}
+
+\let\gotobiblink\btxgotolink
+\let\atbiblink \btxatlink
+\let\inbiblink \btxinlink
+
+\unexpanded\def\btxnumberedreference[#1]% \bibtexnumref (replaced by \cite[num])
+ {\dontleavehmode
+ \begingroup
+ \btxcitevariantparameter\v!left
+ \penalty\plustenthousand % todo
+ \ctxcommand{btxresolvelistreference("\currentbtxdataset","#1")}% todo: split dataset from #1, so another call
+ \btxcitevariantparameter\v!right
+ \endgroup}
+
+% \def\btxnumberedplaceholder[#1]% \nobibtexnumref
+% {[#1]}
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxciteplacement
+
+\appendtoks
+ \doifnot{\btxlistvariantparameter\c!option}\v!continue
+ {\global\btxcounter\zerocount}%
+\to \everysetupbtxlistplacement
+
+%D When a publication is cited, we need to signal that somehow. This is done with the
+%D following (not user) command. We could tag without injecting a node but this way
+%D we also store the location, which makes it possible to ask local lists.
+
+\newconditional\c_publ_cite_write
+
+% for reference, but split now done at the lua end
+%
+% \def\publ_cite_write#1% not used
+% {\splitstring#1\at::\to\askedbtxrendering\and\askedbtxtag
+% \ifx\askedbtxtag\empty
+% \let\currentbtxtag \askedbtxrendering
+% \else
+% \let\currentbtxtag \askedbtxtag
+% \let\currentbtxrendering\askedbtxrendering
+% \fi
+% \iftrialtypesetting \else
+% \processcommacommand[\currentbtxtag]{\publ_cite_indeed\currentbtxrendering}%
+% \fi}
+
+\def\publ_cite_indeed#1#2%
+ {\expanded{\writedatatolist[btx][btxset=#1,btxref=#2]}}
+
+\def\dobtxmarkcitation#1#2% called from lua end
+ {\iftrialtypesetting \else
+ \writedatatolist[btx][btxset=#1,btxref=#2]% \c!location=\v!here
+ \fi}
+
+%D \macros{cite,nocite,citation,nocitation,usecitation}
+%D
+%D The inline \type {\cite} command creates a (often) short reference to a publication
+%D and for historic reasons uses a strict test for brackets. This means, at least
+%D in the default case that spaces are ignored in the argument scanner. The \type
+%D {\citation} commands is more liberal but also gobbles following spaces. Both
+%D commands insert a reference as well as a visual clue.
+%D
+%D The \type {no} commands all do the same (they are synonyms): they make sure that
+%D a reference is injected but show nothing. However, they do create a node so best
+%D attach them to some text in order to avoid spacing interferences. A slightly
+%D less efficient alternative is \type {\cite[none][tag]}.
+
+% [tags]
+% [settings|variant][tags]
+% [base::tags]
+% [settings|variant][base::tags]
+
+\unexpanded\def\btxcite
+ {\dontleavehmode
+ \begingroup
+ \strictdoifnextoptionalelse\publ_cite_tags_options\publ_cite_tags_indeed}
+
+\unexpanded\def\publ_cite_tags_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{\btxcitevariantparameter\c!alternative}%
+ \edef\currentbtxcitetag{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\let\publ_citation_tags_indeed\publ_cite_tags_indeed
+
+\unexpanded\def\publ_cite_tags_options[#1]%
+ {\strictdoifnextoptionalelse{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
+
+\unexpanded\def\publ_cite_tags_options_indeed#1[#2]%
+ {\edef\currentbtxcitetag{#2}%
+ \doifassignmentelse{#1}
+ {\publ_cite_tags_settings_indeed{#1}}
+ {\publ_cite_tags_variants_indeed{#1}}}
+
+\def\publ_cite_tags_settings_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ %\letinteractionparameter\c!color\empty
+ \getdummyparameters[\c!alternative=,\c!extras=,#1]%
+ \edef\p_alternative{\dummyparameter\c!alternative}%
+ \ifx\p_alternative\empty \else
+ \let\currentbtxcitevariant\p_alternative
+ \fi
+ \setupcurrentbtxcitevariantparameters[#1]%
+ \edef\p_extras{\dummyparameter\c!extras}%
+ \ifx\p_extras\empty \else
+ \edef\p_right{\btxcitevariantparameter\c!right}%
+ \ifx\p_right\empty \else
+ \setexpandedbtxcitevariantparameter\p_right{\p_extras\p_right}%
+ \fi
+ \fi
+ \publ_cite_variant
+ \endgroup}
+
+\def\publ_cite_tags_variants_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\newconditional\btxcitecompress
+
+\def\publ_cite_variant
+ {\edef\p_compress{\btxcitevariantparameter\c!compress}%
+ % \ifx\p_compress\v!no
+ % \setfalse\btxcitecompress
+ % \else
+ % \settrue\btxcitecompress
+ % \fi
+ \begingroup
+ \settrue\c_publ_cite_write
+ \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
+
+\unexpanded\def\publ_cite_handle_variant#1%
+ {\begingroup
+ \the\everysetupbtxciteplacement
+ \edef\currentbtxcitevariant{#1}%
+ \dosingleargument\publ_cite_handle_variant_indeed}
+
+\def\publ_cite_handle_variant_indeed[#1]%
+ {\usebtxcitevariantstyleandcolor\c!style\c!color
+ \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant
+ \ctxcommand{btxhandlecite(%
+ "\currentbtxdataset",%
+ "#1",%
+ \iftrialtypesetting false\else true\fi,%
+ "\currentbtxcitevariant",%
+ "\btxcitevariantparameter\c!sorttype",%
+ "\btxcitevariantparameter\c!setups"%
+ )}%
+ \endgroup}
+
+\unexpanded\def\btxcitation
+ {\dontleavehmode
+ \begingroup
+ \dodoubleempty\publ_citation}
+
+\def\publ_citation[#1][#2]% could be made more efficient but not now
+ {\ifsecondargument
+ \publ_cite_tags_options_indeed{#1}[#2]%
+ \else
+ \publ_cite_tags_indeed{#1}%
+ \fi}
+
+\unexpanded\def\btxnocite
+ {\dosingleempty\publ_cite_no}
+
+\unexpanded\def\publ_cite_no[#1]%
+ {\iftrialtypesetting \else
+ \ctxcommand{btxhandlenocite("\currentbtxdataset","#1")}%
+ \fi}
+
+%D Compatibility:
+
+\let\cite \btxcite
+\let\citation \btxcitation
+\let\nocite \btxnocite
+\let\nocitation \btxnocite
+\let\usepublication\btxnocite
+
+%D Cite: helpers
+
+\unexpanded\def\btxcitevariant#1%
+ {\ctxcommand{btxcitevariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1")}}
+
+%D List: helpers
+
+\def\currentbtxindex{0}
+
+\unexpanded\def\btxlistvariant#1%
+ {\ctxcommand{btxlistvariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1","\currentbtxindex")}} % some can go
+
+%D Loading variants:
+
+\appendtoks
+ \loadbtxdefinitionfile[\btxrenderingparameter\c!alternative]
+\to \everysetupbtxrendering
+
+%D Defaults:
+
+\setupbtxrendering
+ [\c!dataset=\v!standard,
+ \c!method=\v!global,
+ \c!setups=btx:rendering:\btxrenderingparameter\c!alternative,
+ \c!alternative=apa,
+ \c!sorttype=,
+ \c!criterium=,
+ \c!refcommand=authoryears, % todo
+ \c!numbering=\v!yes,
+% \c!autohang=\v!no,
+ \c!width=\v!auto,
+ \c!distance=1.5\emwidth]
+
+\definebtxrendering
+ [\v!standard]
+
+\setupbtxcitevariant
+ [\c!interaction=\v!start,
+ \c!setups=btx:cite:\btxcitevariantparameter\c!alternative,
+ \c!alternative=num,
+ \c!andtext={ and },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ and },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left=,
+ \c!right=]
+
+\definebtxcitevariant
+ [author]
+ [%c!sorttype=,
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryear]
+ [\c!compress=\v!yes,
+ \c!inbetween={, },
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryears]
+ [authoryear]
+
+\definebtxcitevariant
+ [authornum]
+ [author]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [year]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtxcitevariant
+ [key]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [serial]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [short]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [type]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [doi]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [url]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween=\endash]
+
+\definebtxcitevariant
+ [num]
+ [\c!compress=\v!yes,
+ \c!inbetween={--},
+ \c!left={[},
+ \c!right={]}]
+
+% \c!artauthor=invertedshort % todo
+% \c!editor =invertedshort % todo
+% \c!author =invertedshort % todo
+
+\setupbtxlistvariant
+ [\c!namesep={, },
+ \c!lastnamesep={ and },
+ \c!finalnamesep={ and },
+ \c!firstnamesep={ },
+ \c!juniorsep={ },
+ \c!vonsep={ },
+ \c!surnamesep={, },
+ \c!etallimit=5,
+ \c!etaldisplay=5,
+ \c!etaltext={ et al.},
+ \c!monthconversion=\v!number,
+ \c!authorconversion=\v!normal]
+
+\definebtxlistvariant
+ [author]
+
+\definebtxlistvariant
+ [editor]
+ [author]
+
+\definebtxlistvariant
+ [artauthor]
+ [author]
+
+% Do we want these in the format? Loading them delayed is somewhat messy.
+
+\loadbtxdefinitionfile[apa]
+\loadbtxdefinitionfile[cite]
+\loadbtxdefinitionfile[commands]
+\loadbtxdefinitionfile[definitions]
+
+\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
new file mode 100644
index 000000000..14da19f9c
--- /dev/null
+++ b/tex/context/base/publ-oth.lua
@@ -0,0 +1,146 @@
+if not modules then modules = { } end modules ['publ-oth'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local p_endofline = lpeg.patterns.newline
+
+local loaders = publications.loaders
+local getindex = publications.getindex
+
+local function addfield(t,k,v,fields)
+ k = fields[k]
+ if k then
+ local tk = t[k]
+ if tk then
+ t[k] = tk .. " and " .. v
+ else
+ t[k] = v
+ end
+ end
+ return t
+end
+
+local function checkfield(_,_,t,categories,all)
+ local tag = t.tag
+ if tag then
+ local category = t.category
+ t.tag = nil
+ t.category = categories[category] or category
+ all[tag] = t
+ end
+ return true
+end
+
+-- endnotes --
+
+local fields = {
+ ["@"] = "tag",
+ ["0"] = "category",
+ ["A"] = "author",
+ ["E"] = "editor",
+ ["T"] = "title",
+ ["D"] = "year",
+ ["I"] = "publisher",
+}
+
+local categories = {
+ ["Journal Article"] = "article",
+}
+
+local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+function publications.endnotes_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.endnote(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(filename) or ""))
+end
+
+-- refman --
+
+local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+local fields = {
+ ["SN"] = "tag",
+ ["TY"] = "category",
+ ["A1"] = "author",
+ ["E1"] = "editor",
+ ["T1"] = "title",
+ ["Y1"] = "year",
+ ["PB"] = "publisher",
+}
+
+local categories = {
+ ["JOUR"] = "article",
+}
+
+function publications.refman_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.refman(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.refman_to_btx(io.loaddata(filename) or ""))
+end
+
+-- test --
+
+-- local endnote = [[
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677776
+-- %D 1994
+-- %I IEEE Computer Society
+--
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677775
+-- %D 1994
+-- %I IEEE Computer Society
+-- ]]
+--
+-- local refman = [[
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677776
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+--
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677775
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+-- ]]
+--
+-- inspect(publications.endnotes_to_btx(endnote))
+-- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
new file mode 100644
index 000000000..708795727
--- /dev/null
+++ b/tex/context/base/publ-tra.lua
@@ -0,0 +1,130 @@
+if not modules then modules = { } end modules ['publ-tra'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local sortedhash = table.sortedhash
+
+local tracers = { }
+publications.tracers = tracers
+
+local NC, NR, bold = context.NC, context.NR, context.bold
+
+publications.tracers.fields = table.sorted {
+ "abstract",
+ "address",
+ "annotate",
+ "author",
+ "booktitle",
+ "chapter",
+ "comment",
+ "country",
+ "doi",
+ "edition",
+ "editor",
+ "eprint",
+ "howpublished",
+ "institution",
+ "isbn",
+ "issn",
+ "journal",
+ "key",
+ "keyword",
+ "keywords",
+ "language",
+ "lastchecked",
+ "month",
+ "names",
+ "note",
+ "notes",
+ "number",
+ "organization",
+ "pages",
+ "publisher",
+ "school",
+ "series",
+ "size",
+ "title",
+ "type",
+ "url",
+ "volume",
+ "year",
+ "nationality",
+ "assignee",
+ "bibnumber",
+ "day",
+ "dayfiled",
+ "monthfiled",
+ "yearfiled",
+ "revision",
+}
+
+publications.tracers.citevariants = table.sorted {
+ "author",
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "year",
+ "short",
+ "serial",
+ "key",
+ "doi",
+ "url",
+ "type",
+ "page",
+ "none",
+ "num",
+}
+
+publications.tracers.listvariants = table.sorted {
+ "author",
+ "editor",
+ "artauthor",
+}
+
+publications.tracers.categories = table.sorted {
+ "article",
+ "book",
+ "booklet",
+ "conference",
+ "inbook",
+ "incollection",
+ "inproceedings",
+ "manual",
+ "mastersthesis",
+ "misc",
+ "phdthesis",
+ "proceedings",
+ "techreport",
+ "unpublished",
+}
+
+function tracers.showdatasetfields(name)
+ if name and name ~= "" then
+ local luadata = publications.datasets[name].luadata
+ if next(luadata) then
+ context.starttabulate { "|lT|lT|pT|" }
+ NC() bold("tag")
+ NC() bold("category")
+ NC() bold("fields")
+ NC() NR() context.FL() -- HL()
+ for k, v in sortedhash(luadata) do
+ NC() context(k)
+ NC() context(v.category)
+ NC()
+ for k, v in sortedhash(v) do
+ if k ~= "details" and k ~= "tag" and k ~= "category" then
+ context("%s ",k)
+ end
+ end
+ NC() NR()
+ end
+ context.stoptabulate()
+ end
+ end
+end
+
+commands.showbtxdatasetfields = tracers.showdatasetfields
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
new file mode 100644
index 000000000..70db634fe
--- /dev/null
+++ b/tex/context/base/publ-tra.mkiv
@@ -0,0 +1,26 @@
+%D \module
+%D [ file=publ-tra,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Tracing,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / Tracing}
+
+\registerctxluafile{publ-tra}{1.001}
+
+\unprotect
+
+\unexpanded\def\showbtxdatasetfields
+ {\dosingleempty\publ_dataset_show_fields}
+
+\def\publ_dataset_show_fields[#1]%
+ {\ctxcommand{showbtxdatasetfields("\iffirstargument#1\else\currentbtxdataset\fi")}}
+
+\protect \endinput
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
new file mode 100644
index 000000000..6bb93ebee
--- /dev/null
+++ b/tex/context/base/publ-usr.lua
@@ -0,0 +1,91 @@
+if not modules then modules = { } end modules ['publ-usr'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local chardata = characters.data
+
+-- local str = [[
+-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
+-- \artauthor[]{Alexander}[A.]{}{Berdnikov}
+-- \artauthor[]{Hans}[H.]{}{Hagen}
+-- \artauthor[]{Taco}[T.]{}{Hoekwater}
+-- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
+-- \pubyear{2000}
+-- \arttitle{{Even more MetaFun with \MP: A request for permission}}
+-- \journal{TUGboat}
+-- \issn{0896-3207}
+-- \volume{21}
+-- \issue{2}
+-- \pages{129--130}
+-- \month{6}
+-- \stoppublication
+-- ]]
+
+local remapped = {
+ artauthor = "author",
+ arttitle = "title",
+}
+
+local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
+
+local function register(target,key,a,b,c,d,e)
+ key = remapped[key] or key
+ if b and d and e then
+ local s = nil
+ if b ~= "" and b then
+ s = s and s .. " " .. b or b
+ end
+ if d ~= "" and d then
+ s = s and s .. " " .. d or d
+ end
+ if e ~= "" and e then
+ s = s and s .. " " .. e or e
+ end
+ if a ~= "" and a then
+ s = s and s .. " " .. a or a
+ end
+ local value = target[key]
+ if s then
+ if value then
+ target[key] = value .. " and " .. s
+ else
+ target[key] = s
+ end
+ else
+ if not value then
+ target[key] = s
+ end
+ end
+ else
+ target[key] = b
+ end
+end
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local leftbracket = P("[")
+local rightbracket = P("]")
+
+local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
+local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
+local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
+local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
+
+local pattern = ((Carg(1) * key * value) / register + P(1))^0
+
+function publications.addtexentry(dataset,settings,content)
+ settings = utilities.parsers.settings_to_hash(settings)
+ local data = {
+ tag = settings.tag or settings.k or "no tag",
+ category = settings.category or settings.t or "article",
+ }
+ lpeg.match(pattern,content,1,data) -- can set tag too
+ dataset.userdata[data.tag] = data
+ dataset.luadata[data.tag] = data
+ publications.markasupdated(dataset)
+ return data
+end
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
new file mode 100644
index 000000000..cb078f424
--- /dev/null
+++ b/tex/context/base/publ-usr.mkiv
@@ -0,0 +1,2 @@
+% todo
+
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
new file mode 100644
index 000000000..007f9bb27
--- /dev/null
+++ b/tex/context/base/publ-xml.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=publ-xml,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / XML}
+
+\unprotect
+
+\unexpanded\def\convertbtxdatasettoxml
+ {\dosingleempty\publ_convert_to_xml}
+
+\def\publ_convert_to_xml[#1]%
+ {\ctxcommand{convertbtxdatasettoxml("\iffirstargument#1\else\v!standard\fi",true)}} % or current when not empty
+
+% \startxmlsetups btx:initialize
+% \xmlregistereddocumentsetups{#1}{}
+% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+% \xmlmain{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+% \startxmlsetups btx:entry
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\protect \endinput
+
+% \startxmlsetups bibtex:entry:getkeys
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
+% \stopxmlsetups
+
+% \startbuffer
+% \startxmlsetups xml:bibtex:sorter
+% \xmlresetsorter{bibtex}
+% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
+% \xmlfilter{#1}{
+% bibtex
+% /entry[@category='article']
+% /field[@name='author' and find(text(),'Knuth')]
+% /../command(bibtex:entry:getkeys)}
+% \xmlsortentries{bibtex}
+% \xmlflushsorter{bibtex}{bibtex:entry:flush}
+% \stopxmlsetups
+% \stopbuffer
+
+% \bgroup
+% \setups[bibtex-commands]
+% \getbuffer
+% \egroup
+
+% \startxmlsetups bibtex:entry:flush
+% \xmlfilter{#1}{/field[@name='author']/context()} / %
+% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
+% \xmlatt{#1}{tag}\par
+% \stopxmlsetups
+
+% \startpacked
+% \getbuffer
+% \stoppacked
+
+
+% \unexpanded\def\btx_xml_list_handle_entry
+% {\begingroup
+% \ignorespaces
+% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
+% \removeunwantedspaces
+% \endgroup}
+
+% \startxmlsetups btx:format
+% \btxlistparameter\c!before\relax % prevents lookahead
+% \edef\currentbibxmlnode {#1}
+% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
+% \edef\currentbtxcategory{\xmlatt{#1}{category}}
+% \ignorespaces
+% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
+% \removeunwantedspaces
+% \btxlistparameter\c!after\relax % prevents lookahead
+% \stopxmlsetups
+
+% \startxmlsetups btx:list
+% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
+% \stopxmlsetups
+
+% \startxmlsetups btx:btx
+% \xmlfilter{#1}{/entry/command(btx:format)}
+% \stopxmlsetups
+
+% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
+% \unexpanded\def\btx_xml_todo #1{[#1]}
+
+% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 386f2ba57..e9ea6393b 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -48,6 +48,7 @@
\logo [ASCIITEX] {ascii\TeX}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
+\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 7061e0103..28b0d63f7 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 054b088ad..1e51302f7 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index aea0c2e69..c6d88290e 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 01/11/14 23:58:35
+-- merge date : 01/14/14 15:03:49
do -- begin closure to overcome local limits and interference