summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.html2
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.man3
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.xml2
-rw-r--r--scripts/context/lua/mtx-bibtex.lua52
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4377 -> 4393 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-con.lua21
-rw-r--r--tex/context/base/core-con.mkiv9
-rw-r--r--tex/context/base/lang-def.mkiv31
-rw-r--r--tex/context/base/mult-def.mkiv3
-rw-r--r--tex/context/base/publ-aut.lua96
-rw-r--r--tex/context/base/publ-dat.lua1173
-rw-r--r--tex/context/base/publ-fnd.lua8
-rw-r--r--tex/context/base/publ-imp-apa.lua22
-rw-r--r--tex/context/base/publ-ini.lua3248
-rw-r--r--tex/context/base/publ-ini.mkiv94
-rw-r--r--tex/context/base/publ-jrn.lua15
-rw-r--r--tex/context/base/publ-oth.lua6
-rw-r--r--tex/context/base/publ-reg.lua131
-rw-r--r--tex/context/base/publ-tra.lua23
-rw-r--r--tex/context/base/publ-usr.lua23
-rw-r--r--tex/context/base/status-files.pdfbin24717 -> 24691 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin342244 -> 342436 bytes
-rw-r--r--tex/context/base/status-mkiv.lua6
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
26 files changed, 2600 insertions, 2374 deletions
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.html b/doc/context/scripts/mkiv/mtx-bibtex.html
index ba1591b4b..61a679716 100644
--- a/doc/context/scripts/mkiv/mtx-bibtex.html
+++ b/doc/context/scripts/mkiv/mtx-bibtex.html
@@ -41,12 +41,14 @@
<tr><th/><td/><td/></tr>
<tr><th>--toxml</th><td></td><td>convert bibtex database(s) to xml</td></tr>
<tr><th>--tolua</th><td></td><td>convert bibtex database(s) to lua</td></tr>
+ <tr><th>--search</th><td></td><td>seatch bibtex database(s)</td></tr>
</table>
<br/>
<h1>Example</h1>
<tt>mtxrun --script bibtex --tolua bibl-001.bib</tt>
<br/><tt>mtxrun --script bibtex --tolua --simple bibl-001.bib</tt>
<br/><tt>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</tt>
+<br/><tt>mtxrun --script bibtex --search --list --pattern=match(author:foo) bar.bib</tt>
<br/><br/> </div>
</div>
</body>
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.man b/doc/context/scripts/mkiv/mtx-bibtex.man
index cedf41b8b..ef44fc87d 100644
--- a/doc/context/scripts/mkiv/mtx-bibtex.man
+++ b/doc/context/scripts/mkiv/mtx-bibtex.man
@@ -16,6 +16,9 @@ convert bibtex database(s) to xml
.TP
.B --tolua
convert bibtex database(s) to lua
+.TP
+.B --search
+seatch bibtex database(s)
.SH AUTHOR
More information about ConTeXt and the tools that come with it can be found at:
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.xml b/doc/context/scripts/mkiv/mtx-bibtex.xml
index b33e1809c..5c28b946a 100644
--- a/doc/context/scripts/mkiv/mtx-bibtex.xml
+++ b/doc/context/scripts/mkiv/mtx-bibtex.xml
@@ -10,6 +10,7 @@
<subcategory>
<flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
<flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
+ <flag name="search"><short>seatch bibtex database(s)</short></flag>
</subcategory>
</category>
</flags>
@@ -20,6 +21,7 @@
<example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
<example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
<example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
+ <example><command>mtxrun --script bibtex --search --list --pattern=match(author:foo) bar.bib</command></example>
</subcategory>
</category>
</examples>
diff --git a/scripts/context/lua/mtx-bibtex.lua b/scripts/context/lua/mtx-bibtex.lua
index c81fd596f..92036e3a5 100644
--- a/scripts/context/lua/mtx-bibtex.lua
+++ b/scripts/context/lua/mtx-bibtex.lua
@@ -19,6 +19,7 @@ local helpinfo = [[
<subcategory>
<flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
<flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
+ <flag name="search"><short>seatch bibtex database(s)</short></flag>
</subcategory>
</category>
</flags>
@@ -29,6 +30,7 @@ local helpinfo = [[
<example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
<example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
<example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
+ <example><command>mtxrun --script bibtex --search --list --pattern=match(author:foo) bar.bib</command></example>
</subcategory>
</category>
</examples>
@@ -43,7 +45,9 @@ local application = logs.application {
local report = application.report
+require("util-seq")
require("publ-dat")
+require("publ-fnd")
scripts = scripts or { }
scripts.bibtex = scripts.bibtex or { }
@@ -57,7 +61,7 @@ function scripts.bibtex.toxml(files)
if filetype == "xml" then
target = filename
elseif filetype == "bib" then
- bibtex.load(instance,filename)
+ bibtex.load { dataset = instance, filename = filename }
else
-- not supported
end
@@ -77,7 +81,8 @@ function scripts.bibtex.tolua(files)
if filetype == "lua" then
target = filename
elseif filetype == "bib" then
- bibtex.load(instance,filename)
+ bibtex.load { dataset = instance, filename = filename }
+
else
-- not supported
end
@@ -92,7 +97,48 @@ function scripts.bibtex.tolua(files)
end
end
-if environment.arguments.toxml then
+function scripts.bibtex.search(files,pattern,list)
+ if pattern then
+ local dataset = publications.datasets["whatever"]
+ for i=1,#files do
+ local filename = resolvers.findfile(files[i])
+ if filename and filename ~= "" then
+ publications.load { dataset = "whatever", filename = filename }
+ end
+ end
+ local found = publications.search(dataset,pattern)
+ local tags = table.sortedkeys(found)
+ if #tags == 0 then
+ report("no match")
+ elseif list then
+ report("%s matches:",#tags)
+ local result = { }
+ local luadata = dataset.luadata
+ for i=1,#tags do
+ local tag = tags[i]
+ local entry = luadata[tag]
+ result[i] = {
+ tag,
+ entry.year,
+ entry.author,
+ entry.title,
+ }
+ end
+ utilities.formatters.formatcolumns(result)
+ logs.newline()
+ for i=1,#result do
+ texio.write_nl(result[i])
+ end
+ logs.newline()
+ else
+ report("%s matches: % t",#tags,tags)
+ end
+ end
+end
+
+if environment.arguments.search then
+ scripts.bibtex.search(environment.files,environment.arguments.pattern,environment.arguments.list)
+elseif environment.arguments.toxml then
scripts.bibtex.toxml(environment.files)
elseif environment.arguments.tolua then
scripts.bibtex.tolua(environment.files)
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index e3ce4e077..dcabceb93 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.11.11 12:12}
+\newcontextversion{2014.11.12 11:56}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 2391c5443..5c4a0262d 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 179709972..d625767ee 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.11.11 12:12}
+\edef\contextversion{2014.11.12 11:56}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua
index dd9f50dc8..e805b23e9 100644
--- a/tex/context/base/core-con.lua
+++ b/tex/context/base/core-con.lua
@@ -60,6 +60,14 @@ local counters = allocate {
0x006F, 0x0070, 0x0072, 0x0073, 0x0161,
0x0074, 0x0075, 0x0076, 0x007A, 0x017E
},
+ ['spanish'] = {
+ 0x0061, 0x0062, 0x0063, 0x0064, 0x0065,
+ 0x0066, 0x0067, 0x0068, 0x0069, 0x006A,
+ 0x006B, 0x006C, 0x006D, 0x006E, 0x00F1,
+ 0x006F, 0x0070, 0x0071, 0x0072, 0x0073,
+ 0x0074, 0x0075, 0x0076, 0x0077, 0x0078,
+ 0x0079, 0x007A
+ },
['greek'] = { -- this should be the lowercase table
-- 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
-- 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
@@ -131,6 +139,7 @@ counters['ar'] = counters['arabic']
counters['gr'] = counters['greek']
counters['g'] = counters['greek']
counters['sl'] = counters['slovenian']
+counters['es'] = counters['spanish']
counters['kr'] = counters['korean']
counters['kr-p'] = counters['korean-parent']
counters['kr-c'] = counters['korean-circle']
@@ -191,6 +200,8 @@ converters.maxchrs = maxchrs
local lowercharacter = characters.lcchars
local uppercharacter = characters.ucchars
+local defaultcounter = counters.default
+
local function do_alphabetic(n,mapping,mapper,t) -- todo: make zero based variant (initial n + 1)
if not t then
t = { }
@@ -208,11 +219,11 @@ local function do_alphabetic(n,mapping,mapper,t) -- todo: make zero based varian
end
function converters.alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters.default,lowercharacter)
+ return do_alphabetic(n,code and counters[code] or defaultcounter,lowercharacter)
end
function converters.Alphabetic(n,code)
- return do_alphabetic(n,counters[code] or counters.default,uppercharacter)
+ return do_alphabetic(n,code and counters[code] or defaultcounter,uppercharacter)
end
local lower_offset = 96
@@ -228,8 +239,8 @@ converters['A'] = converters.Characters
converters['AK'] = converters.Characters
converters['KA'] = converters.Characters
-function commands.alphabetic(n,c) context(do_alphabetic(n,counters[c],lowercharacter)) end
-function commands.Alphabetic(n,c) context(do_alphabetic(n,counters[c],uppercharacter)) end
+function commands.alphabetic(n,c) context(do_alphabetic(n,c and counters[c] or defaultcounter,lowercharacter)) end
+function commands.Alphabetic(n,c) context(do_alphabetic(n,c and counters[c] or defaultcounter,uppercharacter)) end
function commands.character (n) context(chr (n,lower_offset)) end
function commands.Character (n) context(chr (n,upper_offset)) end
function commands.characters(n) context(chrs(n,lower_offset)) end
@@ -893,7 +904,7 @@ local words = {
[900] = "novecientos",
[1000] = "mil",
[1000^2] = "millón",
- [1000^3] = "mil millónes",
+ [1000^3] = "mil millones",
[1000^4] = "billón",
}
diff --git a/tex/context/base/core-con.mkiv b/tex/context/base/core-con.mkiv
index 47c7c88d5..34ab90839 100644
--- a/tex/context/base/core-con.mkiv
+++ b/tex/context/base/core-con.mkiv
@@ -79,6 +79,9 @@
\def\languagecharacters#1{\ctxcommand{alphabetic(\number#1,"\currentlanguage")}} % new
\def\languageCharacters#1{\ctxcommand{Alphabetic(\number#1,"\currentlanguage")}} % new
+\def\alphabeticnumerals#1{\ctxcommand{alphabetic(\number#1)}}
+\def\Alphabeticnumerals#1{\ctxcommand{Alphabetic(\number#1)}}
+
% we could use an auxiliary macro to save some bytes in the format
%
% \def\dolanguagecharacters#1#2{\ctxcommand{alphabetic(\number#2,"#1")}}
@@ -627,6 +630,9 @@
\defineconversion [AK] [\smallcappedcharacters]
\defineconversion [KA] [\smallcappedcharacters]
+\defineconversion [\v!alphabetic] [\alphabeticnumerals]
+\defineconversion [\v!Alphabetic] [\Alphabeticnumerals]
+
\defineconversion [\v!number] [\numbers]
\defineconversion [\v!numbers] [\numbers]
\defineconversion [\v!Numbers] [\Numbers]
@@ -659,6 +665,9 @@
\defineconversion [\v!greek] [\greeknumerals]
\defineconversion [\v!Greek] [\Greeknumerals]
+\defineconversion [ñ] [\spanishnumerals]
+\defineconversion [Ñ] [\Spanishnumerals]
+
\defineconversion [g] [\greeknumerals]
\defineconversion [G] [\Greeknumerals]
diff --git a/tex/context/base/lang-def.mkiv b/tex/context/base/lang-def.mkiv
index 088f86eb8..2d9c88a18 100644
--- a/tex/context/base/lang-def.mkiv
+++ b/tex/context/base/lang-def.mkiv
@@ -271,11 +271,6 @@
\installlanguage [\s!slovenian] [\s!sl]
\installlanguage [slovene] [\s!sl] % both possible (mojca: still needed?)
-\def\doconvertsloveniancharacters{\dodoconvertcharacters{25}}
-
-\def\sloveniancharacters{\doconvertsloveniancharacters\sloveniancharacter}
-\def\slovenianCharacters{\doconvertsloveniancharacters\slovenianCharacter}
-
%D Define general-purpose macros for Slovenian character enumerations:
\defineconversion [sloveniancharacter] [\sloveniancharacter]
@@ -284,13 +279,24 @@
\defineconversion [sloveniancharacters] [\sloveniancharacters]
\defineconversion [slovenianCharacters] [\slovenianCharacters]
+\defineconversion [spanishcharacter] [\spanishcharacter]
+\defineconversion [spanishCharacter] [\spanishCharacter]
+
+\defineconversion [spanishcharacters] [\spanishcharacters]
+\defineconversion [spanishCharacters] [\spanishCharacters]
+
%D Define these as the general character enumeration when
%D language is Slovenian. If you feel uncomfortable with this,
%D mail Mojca, since she promised to to take the heat.
-\defineconversion [\s!sl] [character] [\sloveniancharacter]
-\defineconversion [\s!sl] [Character] [\slovenianCharacter]
+\def\sloveniancharacters#1{\ctxcommand{alphabetic(\number#1,"sl")}}
+\def\slovenianCharacters#1{\ctxcommand{Alphabetic(\number#1,"sl")}}
+
+\def\spanishcharacters #1{\ctxcommand{alphabetic(\number#1,"es")}}
+\def\spanishCharacters #1{\ctxcommand{Alphabetic(\number#1,"es")}}
+\defineconversion [\s!sl] [character] [\sloveniancharacters]
+\defineconversion [\s!sl] [Character] [\slovenianCharacters]
\defineconversion [\s!sl] [characters] [\sloveniancharacters]
\defineconversion [\s!sl] [Characters] [\slovenianCharacters]
@@ -299,8 +305,15 @@
\defineconversion [\s!sl] [AK] [\smallcapped\sloveniancharacters]
\defineconversion [\s!sl] [KA] [\smallcapped\sloveniancharacters]
-\def\sloveniancharacters#1{\ctxcommand{alphabetic(\number#1,"sl")}}
-\def\slovenianCharacters#1{\ctxcommand{Alphabetic(\number#1,"sl")}}
+\defineconversion [\s!es] [character] [\spanishcharacters]
+\defineconversion [\s!es] [Character] [\spanishCharacters]
+\defineconversion [\s!es] [characters] [\spanishcharacters]
+\defineconversion [\s!es] [Characters] [\spanishCharacters]
+
+\defineconversion [\s!es] [a] [\spanishcharacters]
+\defineconversion [\s!es] [A] [\spanishCharacters]
+\defineconversion [\s!es] [AK] [\smallcapped\spanishcharacters]
+\defineconversion [\s!es] [KA] [\smallcapped\spanishcharacters]
% Cyrillic Languages
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index 560faf6e0..7791200f9 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -34,6 +34,9 @@
% start todo in muld-def.lua:
+\def\v!alphabetic {alphabetic}
+\def\v!Alphabetic {Alphabetic}
+
\def\c!svgstyle {svgstyle}
\def\c!nextleft {nextleft}
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
index 7eef49f72..a449e25b4 100644
--- a/tex/context/base/publ-aut.lua
+++ b/tex/context/base/publ-aut.lua
@@ -27,6 +27,8 @@ local publications = publications
local datasets = publications.datasets
local writers = publications.writers
local authors = publications.authors
+local detailed = publications.detailed
+local casters = publications.casters
local chardata = characters.data
@@ -210,6 +212,7 @@ local function splitauthorstring(str)
end
authors.splitstring = splitauthorstring
+casters.author = splitauthorstring
local function the_initials(initials,symbol,connector)
if not symbol then
@@ -307,18 +310,22 @@ function commands.btxauthorfield(i,field)
end
function commands.btxauthor(dataset,tag,field,settings)
- local ds = datasets[dataset]
- if not ds then
+ local current = datasets[dataset]
+ if not current then
return f_invalid("dataset",dataset)
end
- local dt = ds.details[tag]
- if not dt then
- return f_invalid("details",tag)
+ local entry = current.luadata[tag]
+ if not entry then
+ return f_invalid("entry",tag)
end
- local split = dt[field]
- if not split then
+ local value = entry[field]
+ if not value then
return f_invalid("field",field)
end
+ local split = detailed.author[value]
+ if type(split) ~= "table" then
+ return f_invalid("cast",value)
+ end
local max = split and #split or 0
if max == 0 then
return
@@ -410,7 +417,6 @@ local function components(snippet,short)
local initials = snippet.initials
local firstnames = not short and snippet.firstnames
local juniors = snippet.juniors
--- inspect(initials)
return
vons and #vons > 0 and concat(vons, " ") or "",
surnames and #surnames > 0 and concat(surnames, " ") or "",
@@ -463,32 +469,26 @@ local default = { "author" }
function authors.getauthor(dataset,tag,categories)
local current = datasets[dataset]
local luadata = current.luadata
- if not luadata then
- report("invalid dataset %a",dataset)
- end
- local entry = luadata[tag]
+ local entry = luadata and luadata[tag]
if entry then
local category = entry.category
- local detail = current.details[tag]
- if detail then
- local list
- if categories then
- local c = categories[category]
- if c then
- local sets = c.sets
- list = sets and sets.author and sets.authors or default
- else
- list = default
- end
+ local list
+ if categories then
+ local c = categories[category]
+ if c then
+ local sets = c.sets
+ list = sets and sets.author and sets.authors or default
else
list = default
end
- for i=1,#list do
- local l = list[i]
- local d = detail[l]
- if d then
- return d, l
- end
+ else
+ list = default
+ end
+ for i=1,#list do
+ local l = list[i]
+ local v = entry[l]
+ if v then
+ return detailed.author[v], l
end
end
end
@@ -510,23 +510,30 @@ end
-- first : key author editor publisher title journal volume number pages
-- second: year suffix title month day journal volume number
+local function directget(dataset,entry,field)
+ local value = entry[field]
+ if value then
+ return detailed.author[value]
+ end
+end
+
local function byauthor(dataset,list,method)
- local luadata = datasets[dataset].luadata
- local details = datasets[dataset].details
+ local current = datasets[dataset]
+ local luadata = current.luadata
local result = { }
local splitted = newsplitter(splitter) -- saves mem
local snippets = { } -- saves mem
+ local get = publications.directget or directget
+ local field = "author" -- todo
for i=1,#list do
-- either { tag, tag, ... } or { { tag, index }, { tag, index } }
local li = list[i]
local tag = type(li) == "string" and li or li[1]
- local entry = luadata[tag]
- local detail = details[tag]
local index = tostring(i)
- if entry and detail then
--- todo pluggable
- local mainkey = writer(detail.author or detail.editor or entry.publisher or entry.title or "",snippets)
- -- we could store the mainkey in details for tracing
+ local entry = luadata[tag]
+ if entry then
+ local value = get(current,entry,field) or ""
+ local mainkey = writer(value,snippets)
result[i] = {
index = i,
split = {
@@ -582,18 +589,3 @@ function authors.sorted(dataset,list,sorttype) -- experimental
return valid
end
end
-
--- local dataset = publications.datasets.test
---
--- local function add(str)
--- dataset.details[str] = { author = publications.authors.splitstring(str) }
--- end
---
--- add("Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der")
--- add("Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut")
--- add("de Gennes, P. and Gennes, P. de")
--- add("van't Hoff, J. H. and {van't Hoff}, J. H.")
---
--- local list = table.keys(dataset.details)
--- local sort = publications.authors.sorted("test",list,"author")
--- local test = { } for i=1,#sort do test[i] = dataset.details[list[sort[i]]] end
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
index 0026c2c1f..b11ddf215 100644
--- a/tex/context/base/publ-dat.lua
+++ b/tex/context/base/publ-dat.lua
@@ -9,6 +9,7 @@ if not modules then modules = { } end modules ['publ-dat'] = {
-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
-- todo: store bibroot and bibrootdt
-- todo: dataset = datasets[dataset] => current = datasets[dataset]
+-- todo: maybe split this file
--[[ldx--
<p>This is a prelude to integrated bibliography support. This file just loads
@@ -68,15 +69,31 @@ publications.tables = tables
publications.statistics = publications.statistics or { }
local publicationsstats = publications.statistics
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+local casters = { }
+publications.casters = casters
+
+local enhancers = publications.enhancers or { }
+publications.enhancers = enhancers
+
+local enhancer = publications.enhancer or utilities.sequencers.new { arguments = "dataset" }
+publications.enhancer = enhancer
+
+utilities.sequencers.appendgroup(enhancer,"system") -- private
+
publicationsstats.nofbytes = 0
publicationsstats.nofdefinitions = 0
publicationsstats.nofshortcuts = 0
publicationsstats.nofdatasets = 0
local privates = allocate {
- category = true,
- tag = true,
- index = true,
+ category = true,
+ tag = true,
+ index = true,
+ suffix = true,
+ specification = true,
}
local specials = allocate {
@@ -87,39 +104,6 @@ local specials = allocate {
comment = true,
}
-tables.privates = privates
-tables.specials = specials
-
-if not publications.usedentries then
- function publications.usedentries()
- return { }
- end
-end
-
-local v_all = interfaces and interfaces.variables.all or "all"
-
-local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
-
-local defaultshortcuts = allocate {
- jan = "1",
- feb = "2",
- mar = "3",
- apr = "4",
- may = "5",
- jun = "6",
- jul = "7",
- aug = "8",
- sep = "9",
- oct = "10",
- nov = "11",
- dec = "12",
-}
-
-local space = p_whitespace^0
-local separator = space * "+" * space
-local l_splitter = lpeg.tsplitat(separator)
-local d_splitter = lpeg.splitat (separator)
-
local implicits = allocate {
category = "implicit",
tag = "implicit",
@@ -148,12 +132,50 @@ local defaulttypes = allocate {
author = "author",
editor = "author",
publisher = "author",
+ page = "pagenumber",
+ pages = "pagenumber",
+ keywords = "keyword",
}
tables.implicits = implicits
tables.origins = origins
tables.virtuals = virtuals
tables.types = defaulttypes
+tables.privates = privates
+tables.specials = specials
+
+local variables = interfaces and interfaces.variables or setmetatableindex("self")
+
+local v_all = variables.all
+local v_standard = variables.standard
+
+if not publications.usedentries then
+ function publications.usedentries()
+ return { }
+ end
+end
+
+local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
+
+local defaultshortcuts = allocate {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+local space = p_whitespace^0
+local separator = space * "+" * space
+local l_splitter = lpeg.tsplitat(separator)
+local d_splitter = lpeg.splitat (separator)
local unknownfield = function(t,k)
local v = "extra"
@@ -280,7 +302,8 @@ function publications.parenttag(dataset,tag)
elseif find(tag,"%+") then
local tags = lpegmatch(l_splitter,tag)
local parent = tags[1]
- local luadata = datasets[dataset].luadata
+ local current = datasets[dataset]
+ local luadata = current.luadata
local first = luadata[parent]
if first then
local combined = first.combined
@@ -318,8 +341,8 @@ function publications.new(name)
luadata = { },
suffixes = { },
xmldata = xmlconvert(xmlplaceholder),
- -- details = { },
- -- ordered = { },
+ details = { },
+ ordered = { },
nofbytes = 0,
entries = nil, -- empty == all
sources = { },
@@ -332,39 +355,16 @@ function publications.new(name)
resources = false,
userdata = false,
},
+ specifications = {
+ -- used specifications
+ },
}
-- we delay details till we need it (maybe we just delay the
-- individual fields but that is tricky as there can be some
-- depedencies)
- setmetatableindex(dataset,function(t,k)
- -- will become a plugin
- if k == "details" then
- if publications.enhance then
- dataset.details = { }
- publications.enhance(dataset.name)
- return dataset.details
- end
- elseif k == "ordered" then
- local luadata = dataset.luadata
- local ordered = sortedkeys(luadata)
- for i=1,#ordered do
- ordered[i] = luadata[ordered[i]]
- end
- dataset.ordered = ordered
- return ordered
- end
- end)
return dataset
end
-function publications.markasupdated(name)
- if type(name) == "string" then
- rawset(datasets[name],"details",nil)
- else
- rawset(name,"details",nil)
- end
-end
-
setmetatableindex(datasets,function(t,k)
if type(k) == "table" then
return k -- so we can use this accessor as checker
@@ -375,40 +375,6 @@ setmetatableindex(datasets,function(t,k)
end
end)
--- we apply some normalization
-
-local space = S(" \t\n\r\f") -- / " "
-
------ command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
------ command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
-local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
-local any = P(1)
-local done = P(-1)
-local one_l = P("{") / ""
-local one_r = P("}") / ""
-local two_l = P("{{") / ""
-local two_r = P("}}") / ""
-local special = P("#") / "\\letterhash"
-
-local filter_0 = S('\\{}')
-local filter_1 = (1-filter_0)^0 * filter_0
-local filter_2 = Cs(
--- {{...}} ... {{...}}
--- two_l * (command + special + any - two_r - done)^0 * two_r * done +
--- one_l * (command + special + any - one_r - done)^0 * one_r * done +
- (somemath + command + special + any )^0
-)
-
--- Currently we expand shortcuts and for large ones (like the acknowledgements
--- in tugboat.bib) this is not that efficient. However, eventually strings get
--- hashed again.
-
-local function do_shortcut(key,value,dataset)
- publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
- dataset.shortcuts[key] = value
-end
-
local function getindex(dataset,luadata,tag)
local found = luadata[tag]
if found then
@@ -422,480 +388,517 @@ end
publications.getindex = getindex
--- todo: categories : metatable that lowers and also counts
--- todo: fields : metatable that lowers
-
-local tags = table.setmetatableindex("table")
-
-local function do_definition(category,tag,tab,dataset)
- publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
- local fields = dataset.fields
- local luadata = dataset.luadata
- if luadata[tag] then
- local t = tags[tag]
- local d = dataset.name
- local n = (t[n] or 0) + 1
- t[d] = n
- if trace_duplicates then
- local p = { }
- for k, v in sortedhash(t) do
- p[#p+1] = formatters["%s:%s"](k,v)
- end
- report_duplicates("tag %a is present multiple times: % t",tag,p)
- end
- else
- local found = luadata[tag]
- local index = getindex(dataset,luadata,tag)
- local entries = {
- category = lower(category),
- tag = tag,
- index = index,
- }
- for i=1,#tab,2 do
- local original = tab[i]
- local normalized = fields[original]
- if not normalized then
- normalized = lower(original) -- we assume ascii fields
- fields[original] = normalized
- end
- -- if entries[normalized] then
- if rawget(entries,normalized) then
- if trace_duplicates then
- report_duplicates("redundant field %a is ignored for tag %a in dataset %a",normalized,tag,dataset.name)
+do
+
+ -- we apply some normalization
+
+ local space = S(" \t\n\r\f") -- / " "
+
+ ----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+ ----- command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) * space^0 / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+ local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+ local any = P(1)
+ local done = P(-1)
+ local one_l = P("{") / ""
+ local one_r = P("}") / ""
+ local two_l = P("{{") / ""
+ local two_r = P("}}") / ""
+ local special = P("#") / "\\letterhash"
+
+ local filter_0 = S('\\{}')
+ local filter_1 = (1-filter_0)^0 * filter_0
+ local filter_2 = Cs(
+ -- {{...}} ... {{...}}
+ -- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+ -- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (somemath + command + special + any )^0
+ )
+
+ -- Currently we expand shortcuts and for large ones (like the acknowledgements
+ -- in tugboat.bib) this is not that efficient. However, eventually strings get
+ -- hashed again.
+
+ local function do_shortcut(key,value,dataset)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ dataset.shortcuts[key] = value
+ end
+
+ -- todo: categories : metatable that lowers and also counts
+ -- todo: fields : metatable that lowers
+
+ local tags = table.setmetatableindex("table")
+
+ local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ if luadata[tag] then
+ local t = tags[tag]
+ local d = dataset.name
+ local n = (t[n] or 0) + 1
+ t[d] = n
+ if trace_duplicates then
+ local p = { }
+ for k, v in sortedhash(t) do
+ p[#p+1] = formatters["%s:%s"](k,v)
end
- else
- local value = tab[i+1]
- value = textoutf(value)
- if lpegmatch(filter_1,value) then
- value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ report_duplicates("tag %a is present multiple times: % t",tag,p)
+ end
+ else
+ local found = luadata[tag]
+ local index = getindex(dataset,luadata,tag)
+ local entries = {
+ category = lower(category),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
end
- if normalized == "crossref" then
- local parent = luadata[value]
- if parent then
- setmetatableindex(entries,parent)
- else
- -- warning
+ -- if entries[normalized] then
+ if rawget(entries,normalized) then
+ if trace_duplicates then
+ report_duplicates("redundant field %a is ignored for tag %a in dataset %a",normalized,tag,dataset.name)
+ end
+ else
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
end
+ entries[normalized] = value
end
- entries[normalized] = value
end
+ luadata[tag] = entries
end
- luadata[tag] = entries
end
-end
-local function resolve(s,dataset)
- return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
-end
+ local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+ end
-local pattern = p_whitespace^0
- * C(P("message") + P("warning") + P("error") + P("comment")) * p_whitespace^0 * P(":")
- * p_whitespace^0
- * C(P(1)^1)
-
-local function do_comment(s,dataset)
- local how, what = lpegmatch(pattern,s)
- if how and what then
- local t = string.splitlines(utilities.strings.striplines(what))
- local b = file.basename(dataset.fullname or dataset.name or "unset")
- for i=1,#t do
- report("%s > %s : %s",b,how,t[i])
+ local pattern = p_whitespace^0
+ * C(P("message") + P("warning") + P("error") + P("comment")) * p_whitespace^0 * P(":")
+ * p_whitespace^0
+ * C(P(1)^1)
+
+ local function do_comment(s,dataset)
+ local how, what = lpegmatch(pattern,s)
+ if how and what then
+ local t = string.splitlines(utilities.strings.striplines(what))
+ local b = file.basename(dataset.fullname or dataset.name or "unset")
+ for i=1,#t do
+ report("%s > %s : %s",b,how,t[i])
+ end
end
end
-end
-local percent = P("%")
-local start = P("@")
-local comma = P(",")
-local hash = P("#")
-local escape = P("\\")
-local single = P("'")
-local double = P('"')
-local left = P('{')
-local right = P('}')
-local both = left + right
-local lineending = S("\n\r")
-local space = S(" \t\n\r\f") -- / " "
-local spacing = space^0
-local equal = P("=")
------ collapsed = (space^1)/ " "
-local collapsed = (p_whitespace^1)/" "
-
------ balanced = lpegpatterns.balanced
-
-local balanced = P {
--- [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
- [1] = ((escape * (left+right)) + collapsed + (1 - (left+right))^1 + V(2))^0,
- [2] = left * V(1) * right,
-}
+ local percent = P("%")
+ local start = P("@")
+ local comma = P(",")
+ local hash = P("#")
+ local escape = P("\\")
+ local single = P("'")
+ local double = P('"')
+ local left = P('{')
+ local right = P('}')
+ local both = left + right
+ local lineending = S("\n\r")
+ local space = S(" \t\n\r\f") -- / " "
+ local spacing = space^0
+ local equal = P("=")
+ ----- collapsed = (space^1)/ " "
+ local collapsed = (p_whitespace^1)/" "
+
+ ----- balanced = lpegpatterns.balanced
+
+ local balanced = P {
+ -- [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
+ [1] = ((escape * (left+right)) + collapsed + (1 - (left+right))^1 + V(2))^0,
+ [2] = left * V(1) * right,
+ }
-local unbalanced = P {
- [1] = left * V(2) * right,
- [2] = ((escape * (left+right)) + collapsed + (1 - (left+right))^1 + V(1))^0,
-}
+ local unbalanced = P {
+ [1] = left * V(2) * right,
+ [2] = ((escape * (left+right)) + collapsed + (1 - (left+right))^1 + V(1))^0,
+ }
-local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
-local key = C((1-space-equal)^1)
-local tag = C((1-space-comma)^1)
-local reference = keyword
-local category = C((1-space-left)^1)
-local s_quoted = ((escape*single) + collapsed + (1-single))^0
-local d_quoted = ((escape*double) + collapsed + (1-double))^0
+ local keyword = C((R("az","AZ","09") + S("@_:-"))^1)
+ local key = C((1-space-equal)^1)
+ local tag = C((1-space-comma)^1)
+ local reference = keyword
+ local category = C((1-space-left)^1)
+ local s_quoted = ((escape*single) + collapsed + (1-single))^0
+ local d_quoted = ((escape*double) + collapsed + (1-double))^0
-local b_value = (left /"") * balanced * (right /"")
-local u_value = (left /"") * unbalanced * (right /"") -- get rid of outer { }
-local s_value = (single/"") * (u_value + s_quoted) * (single/"")
-local d_value = (double/"") * (u_value + d_quoted) * (double/"")
-local r_value = reference * Carg(1) /resolve
+ local b_value = (left /"") * balanced * (right /"")
+ local u_value = (left /"") * unbalanced * (right /"") -- get rid of outer { }
+ local s_value = (single/"") * (u_value + s_quoted) * (single/"")
+ local d_value = (double/"") * (u_value + d_quoted) * (double/"")
+ local r_value = reference * Carg(1) /resolve
-local somevalue = d_value + b_value + s_value + r_value
-local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+ local somevalue = d_value + b_value + s_value + r_value
+ local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
-local forget = percent^1 * (1-lineending)^0
-local spacing = spacing * forget^0 * spacing
-local assignment = spacing * key * spacing * equal * spacing * value * spacing
-local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
+ local forget = percent^1 * (1-lineending)^0
+ local spacing = spacing * forget^0 * spacing
+ local assignment = spacing * key * spacing * equal * spacing * value * spacing
+ local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition
-local crapword = C((1-space-left)^1)
-local shortcut = Cmt(crapword,function(_,p,s) return lower(s) == "string" and p end) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
-local comment = Cmt(crapword,function(_,p,s) return lower(s) == "comment" and p end) * spacing * lpegpatterns.argument * Carg(1) / do_comment
+ local crapword = C((1-space-left)^1)
+ local shortcut = Cmt(crapword,function(_,p,s) return lower(s) == "string" and p end) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right
+ local comment = Cmt(crapword,function(_,p,s) return lower(s) == "comment" and p end) * spacing * lpegpatterns.argument * Carg(1) / do_comment
-local casecrap = #S("sScC") * (shortcut + comment)
+ local casecrap = #S("sScC") * (shortcut + comment)
-local bibtotable = (space + forget + P("@") * (casecrap + definition) + 1)^0
+ local bibtotable = (space + forget + P("@") * (casecrap + definition) + 1)^0
--- todo \%
+ -- todo \%
--- loadbibdata -> dataset.luadata
--- loadtexdata -> dataset.luadata
--- loadluadata -> dataset.luadata
+ -- loadbibdata -> dataset.luadata
+ -- loadtexdata -> dataset.luadata
+ -- loadluadata -> dataset.luadata
--- converttoxml -> dataset.xmldata from dataset.luadata
+ -- converttoxml -> dataset.xmldata from dataset.luadata
-function publications.loadbibdata(dataset,content,source,kind)
- if not source then
- report("invalid source for dataset %a",dataset)
- return
+ function publications.loadbibdata(dataset,content,source,kind)
+ if not source then
+ report("invalid source for dataset %a",dataset)
+ return
+ end
+ local current = datasets[dataset]
+ local size = #content
+ if size == 0 then
+ report("empty source %a for dataset %a",source,current.name)
+ else
+ report("adding bib data to set %a from source %a",current.name,source)
+ end
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + size
+ current.nofbytes = current.nofbytes + size
+ if source then
+ table.insert(current.sources, { filename = source, checksum = md5.HEX(content) })
+ current.loaded[source] = kind or true
+ end
+ current.newtags = #current.luadata > 0 and { } or current.newtags
+ lpegmatch(bibtotable,content or "",1,current)
+ statistics.stoptiming(publications)
end
- dataset = datasets[dataset]
- local size = #content
- if size == 0 then
- report("empty source %a for dataset %a",source,dataset.name)
- else
- report("adding bib data to set %a from source %a",dataset.name,source)
- end
- statistics.starttiming(publications)
- publicationsstats.nofbytes = publicationsstats.nofbytes + size
- dataset.nofbytes = dataset.nofbytes + size
- if source then
- table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
- dataset.loaded[source] = kind or true
- end
- dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
- publications.markasupdated(dataset)
- lpegmatch(bibtotable,content or "",1,dataset)
- statistics.stoptiming(publications)
-end
-
--- we could use xmlescape again
-local cleaner_0 = S('<>&')
-local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
-local cleaner_2 = Cs ( (
- P("<") / "&lt;" +
- P(">") / "&gt;" +
- P("&") / "&amp;" +
- P(1)
-)^0)
-
-local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+end
-function publications.converttoxml(dataset,nice,dontstore,usedonly) -- we have fields !
- current = datasets[dataset]
- local luadata = current and current.luadata
- if luadata then
- statistics.starttiming(publications)
- --
- local result, r, n = { }, 0, 0
- local usedonly = usedonly and publications.usedentries(dataset)
- --
- r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
- r = r + 1 ; result[r] = "<bibtex>"
- --
- if nice then
- local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
- local s_entry_stop = " </entry>"
- local f_field = formatters[" <field name='%s'>%s</field>"]
- for tag, entry in sortedhash(luadata) do
- if not usedonly or usedonly[tag] then
- r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
- for key, value in sortedhash(entry) do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
+do
+
+ -- we could use xmlescape again
+
+ local cleaner_0 = S('<>&')
+ local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+ local cleaner_2 = Cs ( (
+ P("<") / "&lt;" +
+ P(">") / "&gt;" +
+ P("&") / "&amp;" +
+ P(1)
+ )^0)
+
+ local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+ function publications.converttoxml(dataset,nice,dontstore,usedonly) -- we have fields !
+ local current = datasets[dataset]
+ local luadata = current and current.luadata
+ if luadata then
+ statistics.starttiming(publications)
+ --
+ local result, r, n = { }, 0, 0
+ local usedonly = usedonly and publications.usedentries()
+ --
+ r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
+ r = r + 1 ; result[r] = "<bibtex>"
+ --
+ if nice then
+ local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
+ local s_entry_stop = " </entry>"
+ local f_field = formatters[" <field name='%s'>%s</field>"]
+ for tag, entry in sortedhash(luadata) do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
end
end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
end
- r = r + 1 ; result[r] = s_entry_stop
- n = n + 1
end
- end
- else
- local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
- local s_entry_stop = "</entry>"
- local f_field = formatters["<field name='%s'>%s</field>"]
- for tag, entry in next, luadata do
- if not usedonly or usedonly[tag] then
- r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
- for key, value in next, entry do
- if key ~= "tag" and key ~= "category" and key ~= "index" then
- if lpegmatch(cleaner_1,value) then
- value = lpegmatch(cleaner_2,value)
- end
- if value ~= "" then
- r = r + 1 ; result[r] = f_field(key,value)
+ else
+ local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
+ local s_entry_stop = "</entry>"
+ local f_field = formatters["<field name='%s'>%s</field>"]
+ for tag, entry in next, luadata do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
end
end
+ r = r + 1 ; result[r] = s_entry_stop
+ n = n + 1
end
- r = r + 1 ; result[r] = s_entry_stop
- n = n + 1
end
end
- end
- --
- r = r + 1 ; result[r] = "</bibtex>"
- --
- result = concat(result,nice and "\n" or nil)
- --
- if dontstore then
- -- indeed
- else
- statistics.starttiming(xml)
- current.xmldata = xmlconvert(result, {
- resolve_entities = true,
- resolve_predefined_entities = true, -- in case we have escaped entities
- -- unify_predefined_entities = true, -- &#038; -> &amp;
- utfize_entities = true,
- } )
- statistics.stoptiming(xml)
- if lxml then
- lxml.register(formatters["btx:%s"](current.name),current.xmldata)
+ --
+ r = r + 1 ; result[r] = "</bibtex>"
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ if dontstore then
+ -- indeed
+ else
+ statistics.starttiming(xml)
+ current.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- &#038; -> &amp;
+ utfize_entities = true,
+ } )
+ statistics.stoptiming(xml)
+ if lxml then
+ lxml.register(formatters["btx:%s"](current.name),current.xmldata)
+ end
end
+ statistics.stoptiming(publications)
+ return result, n
end
- statistics.stoptiming(publications)
- return result, n
end
+
end
-local loaders = publications.loaders or { }
-publications.loaders = loaders
+do
-local function resolvedname(dataset,filename)
- dataset = datasets[dataset]
- if type(filename) ~= "string" then
- report("invalid filename %a",tostring(filename))
- end
- local fullname = resolvers.findfile(filename,"bib")
- if fullname == "" then
- fullname = resolvers.findfile(filename) -- let's not be too picky
- end
- if not fullname or fullname == "" then
- report("no file %a",filename)
- dataset.fullname = filename
- return dataset, false
- else
- dataset.fullname = fullname
- return dataset, fullname
+ local function resolvedname(dataset,filename)
+ local current = datasets[dataset]
+ if type(filename) ~= "string" then
+ report("invalid filename %a",tostring(filename))
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if fullname == "" then
+ fullname = resolvers.findfile(filename) -- let's not be too picky
+ end
+ if not fullname or fullname == "" then
+ report("no file %a",filename)
+ current.fullname = filename
+ return current, false
+ else
+ current.fullname = fullname
+ return current, fullname
+ end
end
-end
-publications.resolvedname = resolvedname
+ publications.resolvedname = resolvedname
-function loaders.bib(dataset,filename,kind)
- local dataset, fullname = resolvedname(dataset,filename)
- if not fullname then
- return
+ function loaders.bib(dataset,filename,kind)
+ local dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",fullname)
+ return
+ end
+ if trace then
+ report("loading file",fullname)
+ end
+ publications.loadbibdata(dataset,data,fullname,kind)
end
- local data = io.loaddata(filename) or ""
- if data == "" then
- report("empty file %a, nothing loaded",fullname)
- return
+
+ function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ local current, data, fullname
+ if type(filename) == "table" then
+ current = datasets[dataset]
+ data = filename
+ else
+ dataset, fullname = resolvedname(dataset,filename)
+ if not fullname then
+ return
+ end
+ current = datasets[dataset]
+ data = table.load(filename)
+ end
+ if data then
+ local luadata = current.luadata
+ for tag, entry in next, data do
+ if type(entry) == "table" then
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
end
- if trace then
- report("loading file",fullname)
+
+ function loaders.buffer(dataset,name) -- if filename is a table we load that one
+ local current = datasets[dataset]
+ local barename = file.removesuffix(name)
+ local data = buffers.getcontent(barename) or ""
+ if data == "" then
+ report("empty buffer %a, nothing loaded",barename)
+ return
+ end
+ if trace then
+ report("loading buffer",barename)
+ end
+ publications.loadbibdata(current,data,barename,"bib")
end
- publications.loadbibdata(dataset,data,fullname,kind)
-end
-function loaders.lua(dataset,filename) -- if filename is a table we load that one
- local data, fullname
- if type(filename) == "table" then
- dataset = datasets[dataset]
- data = filename
- else
- dataset, fullname = resolvedname(dataset,filename)
+ function loaders.xml(dataset,filename)
+ local dataset, fullname = resolvedname(dataset,filename)
if not fullname then
return
end
- data = table.load(filename)
- end
- if data then
- local luadata = dataset.luadata
- for tag, entry in next, data do
- if type(entry) == "table" then
- entry.index = getindex(dataset,luadata,tag)
- entry.tag = tag
- luadata[tag] = entry -- no cleaning yet
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local root = xml.load(filename)
+ for bibentry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = bibentry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category,
+ tag = tag, -- afterwards also set, to prevent overload
+ index = 0, -- prelocated
+ }
+ for field in xmlcollected(bibentry,"/field") do
+ entry[field.at.name] = field.dt[1] -- no cleaning yet | xmltext(field)
end
+ entry.index = getindex(current,luadata,tag)
+ entry.tag = tag
+ luadata[tag] = entry
end
end
-end
-function loaders.buffer(dataset,name) -- if filename is a table we load that one
- dataset = datasets[dataset]
- name = file.removesuffix(name)
- local data = buffers.getcontent(name) or ""
- if data == "" then
- report("empty buffer %a, nothing loaded",name)
- return
- end
- if trace then
- report("loading buffer",name)
- end
- publications.loadbibdata(dataset,data,name,"bib")
-end
+ setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[k] = v
+ return v
+ end)
-function loaders.xml(dataset,filename)
- local dataset, fullname = resolvedname(dataset,filename)
- if not fullname then
- return
- end
- local luadata = dataset.luadata
- local root = xml.load(filename)
- for bibentry in xmlcollected(root,"/bibtex/entry") do
- local attributes = bibentry.at
- local tag = attributes.tag
- local entry = {
- category = attributes.category,
- tag = tag, -- afterwards also set, to prevent overload
- index = 0, -- prelocated
- }
- for field in xmlcollected(bibentry,"/field") do
- entry[field.at.name] = field.dt[1] -- no cleaning yet | xmltext(field)
+ function publications.load(specification)
+ local current = datasets[specification.dataset or v_standard]
+ local files = settings_to_array(specification.filename)
+ local kind = specification.kind
+ local dataspec = specification.specification
+ statistics.starttiming(publications)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ if filename then
+ if not filetype or filetype == "" then
+ filetype = "bib"
+ end
+ if file.suffix(filename) == "" then
+ file.addsuffix(filename,filetype)
+ end
+ loaders[filetype](current,filename)
+ if kind then
+ current.loaded[current.fullname or filename] = kind
+ end
+ if dataspec then
+ current.specifications[dataspec] = true
+ end
+ end
+ end
+ local runner = enhancer.runner
+ if runner then
+ runner(current)
end
- entry.index = getindex(dataset,luadata,tag)
- entry.tag = tag
- luadata[tag] = entry
+ statistics.stoptiming(publications)
+ return current
end
+
end
-setmetatableindex(loaders,function(t,filetype)
- local v = function(dataset,filename)
- report("no loader for file %a with filetype %a",filename,filetype)
- end
- t[k] = v
- return v
-end)
+do
-function publications.load(dataset,filename,kind)
- dataset = datasets[dataset]
- statistics.starttiming(publications)
- local files = settings_to_array(filename)
- for i=1,#files do
- local filetype, filename = string.splitup(files[i],"::")
- if not filename then
- filename = filetype
- filetype = file.suffix(filename)
+ function enhancers.order(dataset)
+ local luadata = dataset.luadata
+ local ordered = sortedkeys(luadata)
+ local total = #ordered
+ for i=1,total do
+ ordered[i] = luadata[ordered[i]]
end
- if filename then
- if not filetype or filetype == "" then
- filetype = "bib"
- end
- if file.suffix(filename) == "" then
- file.addsuffix(filename,filetype)
- end
- loaders[filetype](dataset,filename)
- if kind then
- dataset.loaded[dataset.fullname or filename] = kind
+ dataset.ordered = ordered
+ end
+
+ function enhancers.details(dataset)
+ local luadata = dataset.luadata
+ local details = dataset.details
+ for tag, entry in next, luadata do
+ if not details[tag] then
+ details[tag] = { }
end
end
end
- statistics.stoptiming(publications)
- return dataset
+
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.order")
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.details")
+
end
-local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
-local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
-
-function publications.analyze(dataset)
- dataset = datasets[dataset]
- local data = dataset.luadata
- local categories = { }
- local fields = { }
- local commands = { }
- for k, v in next, data do
- categories[v.category] = (categories[v.category] or 0) + 1
- for k, v in next, v do
- fields[k] = (fields[k] or 0) + 1
- lpegmatch(checktex,v,1,commands)
+do
+
+ local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+ local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+ function publications.analyze(dataset)
+ local current = datasets[dataset]
+ local data = current.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
end
+ current.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
end
- dataset.analysis = {
- categories = categories,
- fields = fields,
- commands = commands,
- }
-end
--- str = [[
--- @COMMENT { CRAP }
--- @STRING{ hans = "h a n s" }
--- @STRING{ taco = "t a c o" }
--- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
--- @SOMETHING{ key2, abc = hans # taco }
--- @SOMETHING{ key3, abc = "hans" # taco }
--- @SOMETHING{ key4, abc = hans # "taco" }
--- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
--- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
--- ]]
-
--- local dataset = publications.new()
--- publications.tolua(dataset,str)
--- publications.toxml(dataset)
--- publications.toxml(dataset)
--- print(dataset.xmldata)
--- inspect(dataset.luadata)
--- inspect(dataset.xmldata)
--- inspect(dataset.shortcuts)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"IEEEabrv.bib")
--- publications.load(dataset,"IEEEfull.bib")
--- publications.load(dataset,"IEEEexample.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- local dataset = publications.new()
--- publications.load(dataset,"gut.bib")
--- publications.load(dataset,"komoedie.bib")
--- publications.load(dataset,"texbook1.bib")
--- publications.load(dataset,"texbook2.bib")
--- publications.load(dataset,"texbook3.bib")
--- publications.load(dataset,"texgraph.bib")
--- publications.load(dataset,"texjourn.bib")
--- publications.load(dataset,"texnique.bib")
--- publications.load(dataset,"tugboat.bib")
--- publications.toxml(dataset)
--- print(dataset.nofbytes,statistics.elapsedtime(publications))
-
--- print(table.serialize(dataset.luadata))
--- print(table.serialize(dataset.xmldata))
--- print(table.serialize(dataset.shortcuts))
--- print(xml.serialize(dataset.xmldata))
+end
-- a helper:
@@ -915,91 +918,115 @@ end
-- savers
-local savers = { }
+do
-local s_preamble = [[
-% this is an export from context mkiv
+ local savers = { }
-@preamble{
- \ifdefined\btxcmd
- % we're probably in context
- \else
- \def\btxcmd#1{\csname#1\endcsname}
- \fi
-}
+ local s_preamble = [[
+ % this is an export from context mkiv
-]]
-
-function savers.bib(dataset,filename,usedonly)
- local current = datasets[dataset]
- local luadata = current.luadata or { }
- local usedonly = usedonly and publications.usedentries(dataset)
- local f_start = formatters["@%s{%s,\n"]
- local f_field = formatters[" %s = {%s},\n"]
- local s_stop = "}\n\n"
- local result = { s_preamble }
- local n, r = 0, 1
- for tag, data in sortedhash(luadata) do
- if not usedonly or usedonly[tag] then
- r = r + 1 ; result[r] = f_start(data.category or "article",tag)
- for key, value in sortedhash(data) do
- if privates[key] then
- -- skip
- else
- r = r + 1 ; result[r] = f_field(key,value)
+ @preamble{
+ \ifdefined\btxcmd
+ % we're probably in context
+ \else
+ \def\btxcmd#1{\csname#1\endcsname}
+ \fi
+ }
+
+ ]]
+
+ function savers.bib(dataset,filename,usedonly)
+ local current = datasets[dataset]
+ local luadata = current.luadata or { }
+ local usedonly = usedonly and publications.usedentries()
+ local f_start = formatters["@%s{%s,\n"]
+ local f_field = formatters[" %s = {%s},\n"]
+ local s_stop = "}\n\n"
+ local result = { s_preamble }
+ local n, r = 0, 1
+ for tag, data in sortedhash(luadata) do
+ if not usedonly or usedonly[tag] then
+ r = r + 1 ; result[r] = f_start(data.category or "article",tag)
+ for key, value in sortedhash(data) do
+ if privates[key] then
+ -- skip
+ else
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
end
+ r = r + 1 ; result[r] = s_stop
+ n = n + 1
end
- r = r + 1 ; result[r] = s_stop
- n = n + 1
end
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,concat(result))
end
- report("%s entries from dataset %a saved in %a",n,dataset,filename)
- io.savedata(filename,concat(result))
-end
-function savers.lua(dataset,filename,usedonly)
- local current = datasets[dataset]
- local luadata = current.luadata or { }
- local usedonly = usedonly and publications.usedentries(dataset)
- if usedonly then
- local list = { }
- for k, v in next, luadata do
- if usedonly[k] then
- list[k] = v
+ function savers.lua(dataset,filename,usedonly)
+ local current = datasets[dataset]
+ local luadata = current.luadata or { }
+ local usedonly = usedonly and publications.usedentries()
+ if usedonly then
+ local list = { }
+ for k, v in next, luadata do
+ if usedonly[k] then
+ list[k] = v
+ end
end
+ luadata = list
end
- luadata = list
+ report("%s entries from dataset %a saved in %a",table.count(luadata),dataset,filename)
+ table.save(filename,luadata)
end
- report("%s entries from dataset %a saved in %a",table.count(luadata),dataset,filename)
- table.save(filename,luadata)
-end
-function savers.xml(dataset,filename,usedonly)
- local result, n = publications.converttoxml(dataset,true,true,usedonly)
- report("%s entries from dataset %a saved in %a",n,dataset,filename)
- io.savedata(filename,result)
-end
-
-function publications.save(dataset,filename,kind,usedonly)
- statistics.starttiming(publications)
- if not kind or kind == "" then
- kind = file.suffix(filename)
+ function savers.xml(dataset,filename,usedonly)
+ local result, n = publications.converttoxml(dataset,true,true,usedonly)
+ report("%s entries from dataset %a saved in %a",n,dataset,filename)
+ io.savedata(filename,result)
end
- local saver = savers[kind]
- if saver then
- usedonly = usedonly ~= v_all
- saver(dataset,filename,usedonly)
- else
- report("unknown format %a for saving %a",kind,dataset)
+
+ function publications.save(dataset,filename,kind,usedonly)
+ statistics.starttiming(publications)
+ if not kind or kind == "" then
+ kind = file.suffix(filename)
+ end
+ local saver = savers[kind]
+ if saver then
+ usedonly = usedonly ~= v_all
+ saver(dataset,filename,usedonly)
+ else
+ report("unknown format %a for saving %a",kind,dataset)
+ end
+ statistics.stoptiming(publications)
+ return dataset
end
- statistics.stoptiming(publications)
- return dataset
+
+ commands.btxsavedataset = publications.save
+
end
-commands.btxsavedataset = publications.save
+-- casters
+
+do
--- loaders.bib("test",resolvers.findfile("mkiv-publications.bib","bibtex"))
---
--- publications.save("test","e:/tmp/foo.bib")
--- publications.save("test","e:/tmp/foo.lua")
--- publications.save("test","e:/tmp/foo.xml")
+ publications.detailed = setmetatableindex(function(detailed,kind)
+ local values = setmetatableindex(function(values,value)
+ local caster = casters[kind]
+ local cast = caster and caster(value) or value
+ values[value] = cast
+ return cast
+ end)
+ detailed[kind] = values
+ return values
+ end)
+
+ casters.keyword = utilities.parsers.settings_to_set
+
+ local pagessplitter = lpeg.splitat(P("-")^1)
+
+ casters.pagenumber = function(str)
+ local first, last = lpegmatch(pagessplitter,str)
+ return first and last and { first, last } or str
+ end
+
+end
diff --git a/tex/context/base/publ-fnd.lua b/tex/context/base/publ-fnd.lua
index 739ca9d29..5308302cb 100644
--- a/tex/context/base/publ-fnd.lua
+++ b/tex/context/base/publ-fnd.lua
@@ -13,11 +13,9 @@ end
-- this tracker is only for real debugging and not for the average user
-local trace_match = false trackers.register("publications.cite.match", function(v) trace_match = v end)
+local trace_match = false trackers.register("publications.match", function(v) trace_match = v end)
-if not publications then
- publications = { }
-end
+local publications = publications
local tonumber, next, type = tonumber, next, type
local find = string.find
@@ -29,7 +27,7 @@ local concat = table.concat
local formatters = string.formatters
local lowercase = characters.lower
-local report = logs.reporter("publications","match")
+local report = logs.reporter("publications","match")
local colon = P(":")
local dash = P("-")
diff --git a/tex/context/base/publ-imp-apa.lua b/tex/context/base/publ-imp-apa.lua
index 3ee67f6d1..9b68fdc9b 100644
--- a/tex/context/base/publ-imp-apa.lua
+++ b/tex/context/base/publ-imp-apa.lua
@@ -181,7 +181,7 @@ categories.inbook = {
"volume", "number", "series",
"edition", "month",
"address",
- "note", "isbn",
+ "note", "isbn"
},
}
@@ -199,7 +199,7 @@ categories.booklet = {
"subtitle", "type", "file",
"address",
"howpublished",
- "note", "isbn",
+ "note", "isbn"
},
}
@@ -278,8 +278,22 @@ categories.thesis = {
},
}
-categories.mastersthesis = categories.thesis
-categories.phdthesis = categories.thesis
+categories.mastersthesis = {
+ required = {
+ "author",
+ "title",
+ "school",
+ "year"
+ },
+ optional = {
+ "type",
+ "subtitle", "file",
+ "month",
+ "address",
+ "note"
+ },
+}
+categories.phdthesis = categories.mastersthesis
-- a report published by a school or other institution, usually numbered within a series.
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
index 188995b0a..b7f164610 100644
--- a/tex/context/base/publ-ini.lua
+++ b/tex/context/base/publ-ini.lua
@@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['publ-ini'] = {
license = "see context related readme files"
}
+-- bah .. this 200 locals limit again ... so we need to split it as adding more
+-- do ... ends makes it messier
+
-- plug the list sorted in the list mechanism (specification.sortorder)
-- todo: delay details till alternative is known so that potential author
@@ -18,7 +21,7 @@ if not modules then modules = { } end modules ['publ-ini'] = {
-- load big bib files many times and even then ... fonts are larger.
local next, rawget, type, tostring, tonumber = next, rawget, type, tostring, tonumber
-local match, gmatch, format, gsub, find = string.match, string.gmatch, string.format, string.gsub, string.find
+local match, find = string.match, string.find
local concat, sort, tohash = table.concat, table.sort, table.tohash
local utfsub = utf.sub
local mod = math.mod
@@ -38,10 +41,15 @@ local trace = false trackers.register("publications",
local trace_cite = false trackers.register("publications.cite", function(v) trace_cite = v end)
local trace_missing = false trackers.register("publications.cite.missing", function(v) trace_missing = v end)
local trace_references = false trackers.register("publications.cite.references", function(v) trace_references = v end)
+local trace_detail = false trackers.register("publications.detail", function(v) trace_detail = v end)
publications = publications or { }
local datasets = publications.datasets
local writers = publications.writers
+local casters = publications.casters
+local detailed = publications.detailed
+local enhancer = publications.enhancer
+local enhancers = publications.enhancers
local tracers = publications.tracers or { }
publications.tracers = tracers
@@ -162,17 +170,12 @@ function commands.registerbtxlistvariant(name,parent)
registeredlistvariants[name] = parent or ""
end
-local specifications = publications.specifications
-local currentspecification = specifications[false]
------ currentspecificationfields = currentspecification.fields
-local currentspecificationcategories = currentspecification.categories
-
-local ignoredfields = { }
+local specifications = publications.specifications
+local currentspecification = specifications[false]
+local ignoredfields = { }
local function setspecification(name)
- currentspecification = specifications[name]
- -- currentspecificationfields = currentspecification.fields
- currentspecificationcategories = currentspecification.categories
+ currentspecification = specifications[name]
if trace then
report("setting specification %a",type(name) == "string" and name or "anything")
end
@@ -238,9 +241,6 @@ end)
local collected = allocate()
local tobesaved = allocate()
--- we use a a dedicated (and efficient as it know what it deals with) serializer,
--- also because we need to ignore the 'details' field
-
do
local function serialize(t)
@@ -312,7 +312,11 @@ do
if datasources then
for i=1,#datasources do
local filename = datasources[i].filename
- publications.load(dataset,filename,"previous")
+ publications.load {
+ dataset = dataset,
+ filename = filename,
+ kind = "previous"
+ }
end
end
if usersource then
@@ -327,21 +331,19 @@ do
end
-if not publications.authors then
- initializer() -- for now, runtime loaded
-end
-
-- we want to minimize references as there can be many (at least
-- when testing)
-local initialized = false
-local usedentries = allocate { }
-local citetolist = allocate { }
-local listtocite = allocate { }
local nofcitations = 0
+local usedentries = nil
+local citetolist = nil
+local listtocite = nil
-setmetatableindex(usedentries,function(t,k)
- if not initialized then
+do
+
+ local initialize = nil
+
+ initialize = function(t)
usedentries = allocate { }
citetolist = allocate { }
listtocite = allocate { }
@@ -434,12 +436,20 @@ setmetatableindex(usedentries,function(t,k)
end
end
end
- return usedentries[k]
+ initialize = nil
+ end
+
+ usedentries = setmetatableindex(function(_,k) if initialize then initialize() end return usedentries[k] end)
+ citetolist = setmetatableindex(function(_,k) if initialize then initialize() end return citetolist [k] end)
+ listtocite = setmetatableindex(function(_,k) if initialize then initialize() end return listtocite [k] end)
+
+ function publications.usedentries()
+ if initialize then
+ initialize()
+ end
+ return usedentries
end
-end)
-function publications.usedentries(dataset)
- return usedentries[dataset]
end
-- match:
@@ -451,103 +461,108 @@ end
-- by prefix
-- by dataset
-local reported = { }
-local finder = publications.finder
-
-local function findallused(dataset,reference,internal)
- local finder = publications.finder -- for the moment, not yet in all betas
- local find = finder and finder(reference)
- local tags = not find and settings_to_array(reference)
- local todo = { }
- local okay = { } -- only if mark
- local set = usedentries[dataset]
- local valid = datasets[dataset].luadata
- local ordered = datasets[dataset].ordered
- if set then
- local function register(tag)
- local entry = set[tag]
- if entry then
- -- only once in a list but at some point we can have more (if we
- -- decide to duplicate)
- if #entry == 1 then
- entry = entry[1]
- else
- -- same block and section
- local done = false
- if internal and internal > 0 then
- -- first following in list
- for i=1,#entry do
- local e = entry[i]
- if e.references.internal > internal then
- done = e
- break
- end
- end
- if not done then
- -- last preceding in list
+local findallused do
+
+ local reported = { }
+ local finder = publications.finder
+
+ findallused = function(dataset,reference,internal)
+ local finder = publications.finder -- for the moment, not yet in all betas
+ local find = finder and finder(reference)
+ local tags = not find and settings_to_array(reference)
+ local todo = { }
+ local okay = { } -- only if mark
+ local set = usedentries[dataset]
+ local current = datasets[dataset]
+ local valid = current.luadata
+ local ordered = current.ordered
+ if set then
+ local function register(tag)
+ local entry = set[tag]
+ if entry then
+ -- only once in a list but at some point we can have more (if we
+ -- decide to duplicate)
+ if #entry == 1 then
+ entry = entry[1]
+ else
+ -- same block and section
+ local done = false
+ if internal and internal > 0 then
+ -- first following in list
for i=1,#entry do
local e = entry[i]
- if e.references.internal < internal then
+ if e.references.internal > internal then
done = e
- else
break
end
end
+ if not done then
+ -- last preceding in list
+ for i=1,#entry do
+ local e = entry[i]
+ if e.references.internal < internal then
+ done = e
+ else
+ break
+ end
+ end
+ end
+ end
+ if done then
+ entry = done
+ else
+ entry = entry[1]
end
end
- if done then
- entry = done
- else
- entry = entry[1]
- end
- end
- okay[#okay+1] = entry
- end
- todo[tag] = true
- end
- if find then
- tags = { }
- for i=1,#ordered do
- local entry = ordered[i]
- if find(entry) then
- local tag = entry.tag
- register(tag)
- tags[#tags+1] = tag
+ okay[#okay+1] = entry
end
+ todo[tag] = true
end
- else
- for i=1,#tags do
- local tag = tags[i]
- if valid[tag] then
- register(tag)
- elseif not reported[tag] then
- reported[tag] = true
- report_cite("non-existent entry %a in %a",tag,dataset)
+ if find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
+ register(tag)
+ tags[#tags+1] = tag
+ end
end
- end
- end
- else
- if find then
- tags = { }
- for i=1,#ordered do
- local entry = ordered[i]
- if find(entry) then
- tags[#tags+1] = entry.tag
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ if valid[tag] then
+ register(tag)
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
end
end
else
- for i=1,#tags do
- local tag = tags[i]
- if valid[tag] then
- todo[tag] = true
- elseif not reported[tag] then
- reported[tag] = true
- report_cite("non-existent entry %a in %a",tag,dataset)
+ if find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ tags[#tags+1] = entry.tag
+ end
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ if valid[tag] then
+ todo[tag] = true
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
+ end
end
end
end
+ return okay, todo, tags
end
- return okay, todo, tags
+
end
local function unknowncite(reference)
@@ -671,201 +686,132 @@ end
-- basic loading
-function commands.usebtxdataset(name,filename)
- publications.load(datasets[name],filename,"current")
+function commands.usebtxdataset(specification)
+ specification.kind = "current"
+ publications.load(specification)
end
function commands.convertbtxdatasettoxml(name,nice)
- publications.converttoxml(datasets[name],nice)
+ publications.converttoxml(name,nice)
end
-- enhancing
-local splitauthorstring = publications.authors.splitstring
-
-local pagessplitter = lpeg.splitat(P("-")^1)
-
--- maybe not redo when already done
+do
-local function shortsorter(a,b)
- local ay, by = a[2], b[2]
- if ay ~= by then
- return ay < by
- end
- local ay, by = a[3], b[3]
- if ay ~= by then
- return ay < by
- end
- return a[4] < b[4]
-end
+ -- maybe not redo when already done
-function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
- statistics.starttiming(publications)
- if type(dataset) == "string" then
- dataset = datasets[dataset]
- else
- -- assume table, otherwise maybe issue an error
- end
- local used = usedentries[dataset.name] or { }
- local luadata = dataset.luadata
- local details = dataset.details
- local ordered = dataset.ordered
- local types = currentspecification.types -- or { author = "author", editor = "author" }
- local authors = { }
- --
- for k, v in next, types do
- if v == "author" then
- authors[#authors+1] = k
+ local function shortsorter(a,b)
+ local ay, by = a[2], b[2]
+ if ay ~= by then
+ return ay < by
end
- end
- --
- for tag, entry in next, luadata do
- local detail = { }
- details[tag] = detail
- for i=1,#authors do
- local key = authors[i]
- local value = entry[key]
- if value then
- detail[key] = splitauthorstring(value)
- end
+ local ay, by = a[3], b[3]
+ if ay ~= by then
+ return ay < by
end
+ return a[4] < b[4]
end
- -- short
- local shorts = { }
- for i=1,#ordered do
- local entry = ordered[i]
- if entry then
- local tag = entry.tag
- if tag then
- local detail = details[tag]
- if detail then
- local author = detail.author
- if author then
- -- number depends on sort order
- local t = { }
- if #author == 0 then
- -- what
- else
- local n = #author == 1 and 3 or 1
- for i=1,#author do
- local surnames = author[i].surnames
- if not surnames or #surnames == 0 then
- -- error
+
+ -- We could avoid loops by combining enhancers but that makes it only
+ -- more messy and for documents that use publications the few extra milli
+ -- seconds are irrelevant (there is for sure more to gain by proper coding
+ -- of the source and or style).
+
+ function publications.enhancers.suffixes(dataset)
+ local used = usedentries[dataset.name]
+ local luadata = dataset.luadata
+ local details = dataset.details
+ local ordered = dataset.ordered
+ local caster = casters.author
+ local getter = publications.directget
+ local shorts = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if entry then
+ local tag = entry.tag
+ if tag then
+ local use = used[tag]
+ if use then
+ -- use is a table of used list entries (so there can be more) and we just look at
+ -- the first one for btx properties
+ local listentry = use[1]
+ local userdata = listentry.userdata
+ local btxspc = userdata and userdata.btxspc
+ if btxspc then
+ local author = getter(dataset,entry,"author",specifications[btxspc])
+ if author then
+ author = caster(author)
+ -- number depends on sort order
+ local t = { }
+ if #author > 0 then
+ local n = #author == 1 and 3 or 1
+ for i=1,#author do
+ local surnames = author[i].surnames
+ if not surnames or #surnames == 0 then
+ -- error
+ else
+ t[#t+1] = utfsub(surnames[1],1,n)
+ end
+ end
+ end
+ local year = tonumber(entry.year) or 0
+ local short = formatters["%t%02i"](t,mod(year,100))
+ local s = shorts[short]
+ -- we could also sort on reference i.e. entries.text
+ if u then
+ u = listentry.entries.text -- hm
+ else
+ u = "0"
+ end
+ if not s then
+ shorts[short] = { { tag, year, u, i } }
else
- t[#t+1] = utfsub(surnames[1],1,n)
+ s[#s+1] = { tag, year, u, i }
end
end
- end
- local year = tonumber(entry.year) or 0
- local short = formatters["%t%02i"](t,mod(year,100))
- local s = shorts[short]
- -- we could also sort on reference i.e. entries.text
- local u = used[tag]
- if u then
- u = u[1].entries.text -- hm
else
- u = "0"
+ --- no spec so let's forget about it
end
- if not s then
- shorts[short] = { { tag, year, u, i } }
- else
- s[#s+1] = { tag, year, u, i }
- end
- else
- --
end
- else
- report("internal error, no detail for tag %s",tag)
- end
- --
- local pages = entry.pages or entry.page
- if pages then
- local first, last = lpegmatch(pagessplitter,pages)
- detail.pages = first and last and { first, last } or pages
- end
- --
- local keyword = entry.keyword
- if keyword then
- detail.keyword = settings_to_set(keyword)
end
- --
- if category == "inbook" then
- detail.maintitle = entry.chapter or entry.title
- elseif category == "incollection" then
- detail.maintitle = entry.title or entry.booktitle
- else
- detail.maintitle = entry.title or entry.chapter or entry.booktitle
- end
- else
- report("internal error, no tag at index %s",i)
end
- else
- report("internal error, no entry at index %s",i)
- end
- end
- for short, tags in next, shorts do -- ordered ?
- local done = #tags > 0
- -- we only use suffixes when there are multiple references to same years
- -- so we check for used first
- if done then
- local n = 0
- for i=1,#tags do
- local tag = tags[i][1]
- if used[tag] then
- n = n + 1
- if n > 1 then
- break
+ end
+ for short, tags in next, shorts do -- ordered ?
+ local done = #tags > 0
+ -- now we assign the suffixes, unless we have only one reference
+ if done then
+ sort(tags,shortsorter)
+ local n = #tags
+ if n > 1 then
+ for i=1,n do
+ local tag = tags[i][1]
+ local detail = details[tag]
+ local suffix = numbertochar(i)
+ local entry = luadata[tag]
+ local year = entry.year
+ detail.short = short
+ detail.suffix = suffix
+ if year then
+ detail.suffixedyear = year .. suffix
+ end
end
end
- end
- done = n > 1
- end
- -- now we assign the suffixes, unless we have only one reference
- if done then
- sort(tags,shortsorter)
- local n = 0
- for i=1,#tags do
- local tag = tags[i][1]
+ else
+ local tag = tags[1][1]
local detail = details[tag]
+ local entry = luadata[tag]
+ local year = entry.year
detail.short = short
- if used[tag] then
- n = n + 1
- local suffix = numbertochar(n)
- detail.suffix = suffix
- local entry = luadata[tag]
- local year = entry.year
- if year then
- detail.suffixedyear = year .. suffix
- end
+ if year then
+ detail.suffixedyear = year
end
end
- -- for i=1,#tags do
- -- local tag = tags[i][1]
- -- local detail = details[tag]
- -- if not detail.suffix then
- -- n = n + 1
- -- local suffix = numbertochar(n)
- -- detail.suffix = suffix
- -- local entry = luadata[tag]
- -- local year = entry.year
- -- if year then
- -- detail.suffixedyear = year .. suffix
- -- end
- -- end
- -- end
- else
- local tag = tags[1][1]
- local detail = details[tag]
- detail.short = short
- local entry = luadata[tag]
- local year = entry.year
- if year then
- detail.suffixedyear = year
- end
end
end
- dataset.enhanced = true
- statistics.stoptiming(publications)
+
+ utilities.sequencers.appendaction(enhancer,"system","publications.enhancers.suffixes")
+
end
function commands.addbtxentry(name,settings,content)
@@ -940,23 +886,28 @@ do
-- when we have a special kind of database
local function permitted(category,field)
- local catspec = currentspecificationcategories[category]
+ local catspec = currentspecification.categories[category]
if not catspec then
- report("invalid category %a, %s",category,"no specification")
+ report("invalid category %a, %s",category,"no specification") -- can't happen
return false
end
local fields = catspec.fields
if not fields then
- report("invalid category %a, %s",category,"no fields")
+ report("invalid category %a, %s",category,"no fields") -- can't happen
return false
end
- local kind = fields[field]
if ignoredfields and ignoredfields[field] then
return false
- else
- --- just "return true" as it's already in fields
- local sets = catspec.sets
- return sets and sets[field] or true
+ end
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ return set
+ end
+ end
+ if fields[field] then
+ return true
end
end
@@ -968,36 +919,36 @@ do
return field, okay
end
local details = dataset.details[tag]
- local okay = details[field]
- if okay then
- return field, okay
+ local value = details[field]
+ if value then
+ return field, value
end
elseif valid then
-- local fields = dataset.luadata[tag]
for i=1,#valid do
local field = valid[i]
- local okay = fields[field]
- if okay then
- return field, okay
+ local value = fields[field]
+ if value then
+ return field, value
end
end
local details = dataset.details[tag]
for i=1,#valid do
- local okay = details[field]
- if okay then
- return field, okay
+ local value = details[field]
+ if value then
+ return field, value
end
end
end
end
- local function get(name,tag,field,what,check)
- local dataset = rawget(datasets,name)
+ local function get(dataset,tag,field,what,check,catspec)
+ dataset = rawget(datasets,name)
if dataset then
local data = dataset.luadata[tag]
if data then
local category = data.category
- local catspec = currentspecificationcategories[category]
+ local catspec = (catspec or currentspecification).categories[category]
if not catspec then
return false
end
@@ -1036,6 +987,32 @@ do
return ""
end
+ publications.get = get
+
+ function publications.directget(dataset,data,field,catspec)
+ local catspec = (catspec or currentspecification).categories[data.category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+ end
+
function commands.btxfieldname(name,tag,field) context(get(name,tag,field,false,false)) end
function commands.btxfieldtype(name,tag,field) context(get(name,tag,field,true, false)) end
function commands.btxfoundname(name,tag,field) context(get(name,tag,field,false,true )) end
@@ -1053,10 +1030,10 @@ do
local name, value = found(dataset,tag,field,valid,fields)
if value then
typesetters[currentspecification.types[name]](field,value,manipulator)
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","unknown","entry",field,category,tag,name)
end
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","invalid","entry",field,category,tag,name)
end
else
@@ -1078,10 +1055,10 @@ do
local value = fields[field]
if value then
typesetters[currentspecification.types[field]](field,value,manipulator)
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","unknown","field",field,category,tag,name)
end
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","invalid","field",field,category,tag,name)
end
else
@@ -1105,10 +1082,10 @@ do
local value = details[field]
if value then
typesetters[currentspecification.types[field]](field,value,manipulator)
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","unknown","detail",field,category,tag,name)
end
- else
+ elseif trace_detail then
report("%s %s %a in category %a for tag %a in dataset %a","invalid","detail",field,category,tag,name)
end
else
@@ -1157,392 +1134,410 @@ function publications.singularorplural(singular,plural)
end
end
-local patterns = { "publ-imp-%s.mkvi", "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
+-- loading
-local function failure(name)
- report("unknown library %a",name)
-end
+do
-local function action(name,foundname)
- context.input(foundname)
-end
+ local patterns = { "publ-imp-%s.mkvi", "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
+
+ local function failure(name)
+ report("unknown library %a",name)
+ end
+
+ local function action(name,foundname)
+ context.input(foundname)
+ end
+
+ function commands.loadbtxdefinitionfile(name) -- a more specific name
+ commands.uselibrary {
+ name = string.gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
-function commands.loadbtxdefinitionfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^publ%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = true,
- }
end
--- lists:
+-- lists
-publications.lists = publications.lists or { }
-local lists = publications.lists
+do
-local context = context
-local structures = structures
+ publications.lists = publications.lists or { }
+ local lists = publications.lists
-local references = structures.references
-local sections = structures.sections
+ local context = context
+ local structures = structures
--- per rendering
+ local references = structures.references
+ local sections = structures.sections
-local renderings = { } --- per dataset
+ -- per rendering
-setmetatableindex(renderings,function(t,k)
- local v = {
- list = { },
- done = { },
- alldone = { },
- used = { },
- registered = { },
- ordered = { },
- shorts = { },
- method = v_none,
- texts = setmetatableindex("table"),
- currentindex = 0,
- }
- t[k] = v
- return v
-end)
+ local renderings = { } --- per dataset
--- helper
-
--- local function sortedtags(dataset,list,sorttype)
--- local luadata = datasets[dataset].luadata
--- local valid = { }
--- for i=1,#list do
--- local tag = list[i]
--- local entry = luadata[tag]
--- if entry then
--- local key = entry[sorttype]
--- if key then
--- valid[#valid+1] = {
--- tag = tag,
--- split = sortsplitter(sortstripper(key))
--- }
--- end
--- end
--- end
--- if #valid == 0 or #valid ~= #list then
--- return list
--- else
--- sorters.sort(valid,basicsorter)
--- for i=1,#valid do
--- valid[i] = valid[i].tag
--- end
--- return valid
--- end
--- end
---
--- if sorttype and sorttype ~= "" then
--- tags = sortedtags(dataset,tags,sorttype)
--- end
+ setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ texts = setmetatableindex("table"),
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+ end)
--- why shorts vs tags: only for sorting
+ -- helper
+
+ -- local function sortedtags(dataset,list,sorttype)
+ -- local luadata = datasets[dataset].luadata
+ -- local valid = { }
+ -- for i=1,#list do
+ -- local tag = list[i]
+ -- local entry = luadata[tag]
+ -- if entry then
+ -- local key = entry[sorttype]
+ -- if key then
+ -- valid[#valid+1] = {
+ -- tag = tag,
+ -- split = sortsplitter(sortstripper(key))
+ -- }
+ -- end
+ -- end
+ -- end
+ -- if #valid == 0 or #valid ~= #list then
+ -- return list
+ -- else
+ -- sorters.sort(valid,basicsorter)
+ -- for i=1,#valid do
+ -- valid[i] = valid[i].tag
+ -- end
+ -- return valid
+ -- end
+ -- end
+ --
+ -- if sorttype and sorttype ~= "" then
+ -- tags = sortedtags(dataset,tags,sorttype)
+ -- end
-function lists.register(dataset,tag,short) -- needs checking now that we split
- local r = renderings[dataset]
- if not short or short == "" then
- short = tag
- end
- if trace then
- report("registering publication entry %a with shortcut %a",tag,short)
+ -- why shorts vs tags: only for sorting
+
+ function lists.register(dataset,tag,short) -- needs checking now that we split
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
end
- local top = #r.registered + 1
- -- do we really need these
- r.registered[top] = tag
- r.ordered [tag] = top
- r.shorts [tag] = short
-end
-function lists.nofregistered(dataset)
- return #renderings[dataset].registered
-end
+ function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+ end
-local function validkeyword(dataset,tag,keyword)
- local ds = datasets[dataset]
- if not ds then
- report("unknown dataset %a",dataset)
- return
- end
- local dt = ds.details[tag]
- if not dt then
- report("no details for tag %a",tag)
- return
- end
- local kw = dt.keyword
- if kw then
- for k in next, keyword do
- if kw[k] then
- return true
+ local function validkeyword(dataset,entry,keyword)
+ local kw = fastget(dataset,entry,"keywords") -- hard coded for the moment
+ if kw then
+ for k in next, keyword do
+ if kw[k] then
+ return true
+ end
end
end
end
-end
-local function registerpage(pages,tag,result,listindex)
- local p = pages[tag]
- local r = result[listindex].references
- if p then
- local last = p[#p][2]
- local real = last.realpage
- if real ~= r.realpage then
- p[#p+1] = { listindex, r }
+ local function registerpage(pages,tag,result,listindex)
+ local p = pages[tag]
+ local r = result[listindex].references
+ if p then
+ local last = p[#p][2]
+ local real = last.realpage
+ if real ~= r.realpage then
+ p[#p+1] = { listindex, r }
+ end
+ else
+ pages[tag] = { { listindex, r } }
end
- else
- pages[tag] = { { listindex, r } }
end
-end
-local methods = { }
-lists.methods = methods
+ local methods = { }
+ lists.methods = methods
-methods[v_dataset] = function(dataset,rendering,keyword)
- -- why only once unless criterium=all?
- local luadata = datasets[dataset].luadata
- local list = rendering.list
- for tag, data in sortedhash(luadata) do
- if not keyword or validkeyword(dataset,tag,keyword) then
- list[#list+1] = { tag, false, 0, false, false }
+ methods[v_dataset] = function(dataset,rendering,keyword)
+ -- why only once unless criterium=all?
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local list = rendering.list
+ for tag, data in sortedhash(luadata) do
+ if not keyword or validkeyword(dataset,data,keyword) then
+ list[#list+1] = { tag, false, 0, false, false }
+ end
end
end
-end
-methods[v_force] = function (dataset,rendering,keyword)
- -- only for checking, can have duplicates, todo: collapse page numbers, although
- -- we then also needs deferred writes
- local result = structures.lists.filter(rendering.specification) or { }
- local list = rendering.list
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if tag and (not keyword or validkeyword(dataset,tag,keyword)) then
- list[#list+1] = { tag, listindex, 0, u, u.btxint }
+ methods[v_force] = function (dataset,rendering,keyword)
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ local result = structures.lists.filter(rendering.specification) or { }
+ local list = rendering.list
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and (not keyword or validkeyword(dataset,luadata[tag],keyword)) then
+ list[#list+1] = { tag, listindex, 0, u, u.btxint }
+ end
end
end
- end
- lists.result = result
-end
-
--- local : if tag and done[tag] ~= section then ...
--- global : if tag and not alldone[tag] and done[tag] ~= section then ...
-
-methods[v_local] = function(dataset,rendering,keyword)
- local result = structures.lists.filter(rendering.specification) or { }
- local section = sections.currentid()
- local list = rendering.list
- local repeated = rendering.repeated == v_yes
- local r_done = rendering.done
- local r_alldone = rendering.alldone
- local done = repeated and { } or r_done
- local alldone = repeated and { } or r_alldone
- local doglobal = rendering.method == v_global
- local traced = { } -- todo: only if interactive (backlinks) or when tracing
- local pages = { }
- for listindex=1,#result do
- local r = result[listindex]
- local u = r.userdata
- if u and u.btxset == dataset then
- local tag = u.btxref
- if not tag then
- -- problem
- elseif done[tag] == section then -- a bit messy for global and all and so
- -- skip
- elseif doglobal and alldone[tag] then
- -- skip
- elseif not keyword or validkeyword(dataset,tag,keyword) then
- if traced then
- local l = traced[tag]
- if l then
- l[#l+1] = u.btxint
+ lists.result = result
+ end
+
+ -- local : if tag and done[tag] ~= section then ...
+ -- global : if tag and not alldone[tag] and done[tag] ~= section then ...
+
+ methods[v_local] = function(dataset,rendering,keyword)
+ local result = structures.lists.filter(rendering.specification) or { }
+ local section = sections.currentid()
+ local list = rendering.list
+ local repeated = rendering.repeated == v_yes
+ local r_done = rendering.done
+ local r_alldone = rendering.alldone
+ local done = repeated and { } or r_done
+ local alldone = repeated and { } or r_alldone
+ local doglobal = rendering.method == v_global
+ local traced = { } -- todo: only if interactive (backlinks) or when tracing
+ local pages = { }
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if not tag then
+ -- problem
+ elseif done[tag] == section then -- a bit messy for global and all and so
+ -- skip
+ elseif doglobal and alldone[tag] then
+ -- skip
+ elseif not keyword or validkeyword(dataset,luadata[tag],keyword) then
+ if traced then
+ local l = traced[tag]
+ if l then
+ l[#l+1] = u.btxint
+ else
+ local l = { tag, listindex, 0, u, u.btxint }
+ list[#list+1] = l
+ traced[tag] = l
+ end
else
- local l = { tag, listindex, 0, u, u.btxint }
- list[#list+1] = l
- traced[tag] = l
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex, 0, u, u.btxint }
end
- else
- done[tag] = section
- alldone[tag] = true
- list[#list+1] = { tag, listindex, 0, u, u.btxint }
end
+ registerpage(pages,tag,result,listindex)
end
- registerpage(pages,tag,result,listindex)
end
- end
- if traced then
- for tag in next, traced do
- done[tag] = section
- alldone[tag] = true
+ if traced then
+ for tag in next, traced do
+ done[tag] = section
+ alldone[tag] = true
+ end
end
+ lists.result = result
+ structures.lists.result = result
+ rendering.pages = pages -- or list.pages
+ -- inspect(pages)
+ end
+
+ methods[v_global] = methods[v_local]
+
+ function lists.collectentries(specification)
+ local dataset = specification.btxdataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+ if not rendering then
+ return
+ end
+ local method = specification.method or v_none
+ local ignored = specification.ignored or ""
+ rendering.method = method
+ rendering.ignored = ignored ~= "" and settings_to_set(ignored) or nil
+ rendering.list = { }
+ rendering.done = { }
+ rendering.sorttype = specification.sorttype or v_default
+ rendering.criterium = specification.criterium or v_none
+ rendering.repeated = specification.repeated or v_no
+ rendering.specification = specification
+ local filtermethod = methods[method]
+ if not filtermethod then
+ return
+ end
+ lists.result = { } -- kind of reset
+ local keyword = specification.keyword
+ if keyword and keyword ~= "" then
+ keyword = settings_to_set(keyword)
+ else
+ keyword = nil
+ end
+ filtermethod(dataset,rendering,keyword)
end
- lists.result = result
- structures.lists.result = result
- rendering.pages = pages -- or list.pages
- -- inspect(pages)
-end
-
-methods[v_global] = methods[v_local]
-
-function lists.collectentries(specification)
- local dataset = specification.btxdataset
- if not dataset then
- return
- end
- local rendering = renderings[dataset]
- if not rendering then
- return
- end
- local method = specification.method or v_none
- local ignored = specification.ignored or ""
- rendering.method = method
- rendering.ignored = ignored ~= "" and settings_to_set(ignored) or nil
- rendering.list = { }
- rendering.done = { }
- rendering.sorttype = specification.sorttype or v_default
- rendering.criterium = specification.criterium or v_none
- rendering.repeated = specification.repeated or v_no
- rendering.specification = specification
- local filtermethod = methods[method]
- if not filtermethod then
- return
- end
- lists.result = { } -- kind of reset
- local keyword = specification.keyword
- if keyword and keyword ~= "" then
- keyword = settings_to_set(keyword)
- else
- keyword = nil
- end
- filtermethod(dataset,rendering,keyword)
-end
-
--- experiment
-local splitspec = lpeg.splitat(S(":."))
-local splitter = sorters.splitters.utf
-local strip = sorters.strip
+ -- experiment
-local function newsplitter(splitter)
- return setmetatableindex({},function(t,k) -- could be done in the sorter but seldom that many shared
- local v = splitter(k,true) -- in other cases
- t[k] = v
- return v
- end)
-end
+ local splitspec = lpeg.splitat(S(":."))
+ local splitter = sorters.splitters.utf
+ local strip = sorters.strip
-local template = [[
- local strip = sorters.strip
- local writers = publications.writers
- return function(entry,detail,splitted,i) -- snippets
- return {
- index = i,
- split = { %s, splitted[tostring(i)] }
- }
+ local function newsplitter(splitter)
+ return setmetatableindex({},function(t,k) -- could be done in the sorter but seldom that many shared
+ local v = splitter(k,true) -- in other cases
+ t[k] = v
+ return v
+ end)
end
-]]
-
-local function byspec(dataset,list,method) -- todo: yearsuffix
- local luadata = datasets[dataset].luadata
- local details = datasets[dataset].details
- local result = { }
- local splitted = newsplitter(splitter) -- saves mem
- -- local snippets = { } -- saves mem
- local fields = settings_to_array(method)
- for i=1,#fields do
- local f = settings_to_array(fields[i])
- local r = { }
- for i=1,#f do
- local a, b = lpegmatch(splitspec,f[i])
- if b then
- if a == "detail" or a == "entry" then
- local t = currentspecification.types[b]
- local w = t and writers[t]
- if w then
- r[#r+1] = formatters["(%s.%s and writers[%q](%s.%s))"](a,b,t,a,b)
- else
- r[#r+1] = formatters["%s.%s"](a,b,a,b)
+
+ local template = [[
+ local strip = sorters.strip
+ local writers = publications.writers
+ return function(entry,detail,splitted,i) -- snippets
+ return {
+ index = i,
+ split = { %s, splitted[tostring(i)] }
+ }
+ end
+ ]]
+
+ local function byspec(dataset,list,method) -- todo: yearsuffix
+ local luadata = datasets[dataset].luadata
+ local details = datasets[dataset].details
+ local result = { }
+ local splitted = newsplitter(splitter) -- saves mem
+ -- local snippets = { } -- saves mem
+ local fields = settings_to_array(method)
+ for i=1,#fields do
+ local f = settings_to_array(fields[i])
+ local r = { }
+ for i=1,#f do
+ local a, b = lpegmatch(splitspec,f[i])
+ if b then
+ if a == "detail" or a == "entry" then
+ local t = currentspecification.types[b]
+ local w = t and writers[t]
+ if w then
+ r[#r+1] = formatters["(%s.%s and writers[%q](%s.%s))"](a,b,t,a,b)
+ else
+ r[#r+1] = formatters["%s.%s"](a,b,a,b)
+ end
end
+ elseif a then
+ r[#r+1] = formatters["%s"](a)
end
- elseif a then
- r[#r+1] = formatters["%s"](a)
end
- end
- r[#r+1] = '""'
- fields[i] = "splitted[strip(" .. concat(r," or ") .. ")]"
- end
- local action = formatters[template](concat(fields,", "))
- local prepare = loadstring(action)
- if prepare then
- prepare = prepare()
- local dummy = { }
- for i=1,#list do
- -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
- local li = list[i]
- local tag = type(li) == "string" and li or li[1]
- local entry = luadata[tag]
- local detail = details[tag]
- if entry and detail then
- result[i] = prepare(entry,detail,splitted,i) -- ,snippets)
- else
- result[i] = prepare(dummy,dummy,splitted,i) -- ,snippets)
+ r[#r+1] = '""'
+ fields[i] = "splitted[strip(" .. concat(r," or ") .. ")]"
+ end
+ local action = formatters[template](concat(fields,", "))
+ local prepare = loadstring(action)
+ if prepare then
+ prepare = prepare()
+ local dummy = { }
+ for i=1,#list do
+ -- either { tag, tag, ... } or { { tag, index }, { tag, index } }
+ local li = list[i]
+ local tag = type(li) == "string" and li or li[1]
+ local entry = luadata[tag]
+ local detail = details[tag]
+ if entry and detail then
+ result[i] = prepare(entry,detail,splitted,i) -- ,snippets)
+ else
+ result[i] = prepare(dummy,dummy,splitted,i) -- ,snippets)
+ end
end
end
+ return result
end
- return result
-end
-lists.sorters = {
- [v_short] = function(dataset,rendering,list)
- local shorts = rendering.shorts
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = shorts[aa], shorts[bb]
- return aa and bb and aa < bb
- end
- return false
- end
- sort(list,compare)
- end,
- [v_reference] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- return aa and bb and aa < bb
+ lists.sorters = {
+ [v_short] = function(dataset,rendering,list)
+ local shorts = rendering.shorts
+ local function compare(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = shorts[aa], shorts[bb]
+ return aa and bb and aa < bb
+ end
+ return false
end
- return false
- end
- sort(list,compare)
- end,
- [v_dataset] = function(dataset,rendering,list)
- local function compare(a,b)
- local aa, bb = a and a[1], b and b[1]
- if aa and bb then
- aa, bb = list[aa].index or 0, list[bb].index or 0
- return aa and bb and aa < bb
+ sort(list,compare)
+ end,
+ [v_reference] = function(dataset,rendering,list)
+ local function compare(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ return aa and bb and aa < bb
+ end
+ return false
end
- return false
- end
- sort(list,compare)
- end,
- [v_default] = function(dataset,rendering,list,sorttype) -- experimental
- if sorttype == "" or sorttype == v_default then
+ sort(list,compare)
+ end,
+ [v_dataset] = function(dataset,rendering,list)
local function compare(a,b)
- local aa, bb = a and a[3], b and b[3]
+ local aa, bb = a and a[1], b and b[1]
if aa and bb then
+ aa, bb = list[aa].index or 0, list[bb].index or 0
return aa and bb and aa < bb
end
return false
end
sort(list,compare)
- else
- local valid = byspec(dataset,list,sorttype)
+ end,
+ [v_default] = function(dataset,rendering,list,sorttype) -- experimental
+ if sorttype == "" or sorttype == v_default then
+ local function compare(a,b)
+ local aa, bb = a and a[3], b and b[3]
+ if aa and bb then
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ sort(list,compare)
+ else
+ local valid = byspec(dataset,list,sorttype)
+ if #valid == 0 or #valid ~= #list then
+ -- nothing to sort
+ else
+ -- if needed we can wrap compare and use the list directly but this is cleaner
+ sorters.sort(valid,sortcomparer)
+ for i=1,#valid do
+ local v = valid[i]
+ valid[i] = list[v.index]
+ end
+ return valid
+ end
+ end
+ end,
+ [v_author] = function(dataset,rendering,list)
+ local valid = publications.authors.sorters.author(dataset,list)
if #valid == 0 or #valid ~= #list then
-- nothing to sort
else
@@ -1554,1181 +1549,1176 @@ lists.sorters = {
end
return valid
end
- end
- end,
- [v_author] = function(dataset,rendering,list)
- local valid = publications.authors.sorters.author(dataset,list)
- if #valid == 0 or #valid ~= #list then
- -- nothing to sort
- else
- -- if needed we can wrap compare and use the list directly but this is cleaner
- sorters.sort(valid,sortcomparer)
- for i=1,#valid do
- local v = valid[i]
- valid[i] = list[v.index]
- end
- return valid
- end
- end,
-}
+ end,
+ }
--- for determining width
-
-local lastreferencenumber = 0 -- document wide
-
-function lists.prepareentries(dataset)
- local rendering = renderings[dataset]
- local list = rendering.list
- local used = rendering.used
- local forceall = rendering.criterium == v_all
- local repeated = rendering.repeated == v_yes
- local sorttype = rendering.sorttype or v_default
- local sorter = lists.sorters[sorttype] or lists.sorters[v_default]
- local current = datasets[dataset]
- local luadata = current.luadata
- local details = current.details
- local newlist = { }
- for i=1,#list do
- local li = list[i]
- local tag = li[1]
- local entry = luadata[tag]
- if entry then
- if forceall or repeated or not used[tag] then
- newlist[#newlist+1] = li
- -- already here:
- if not repeated then
- used[tag] = true -- beware we keep the old state (one can always use criterium=all)
- end
- local detail = details[tag]
- if detail then
- local referencenumber = detail.referencenumber
- if not referencenumber then
- lastreferencenumber = lastreferencenumber + 1
- referencenumber = lastreferencenumber
- detail.referencenumber = lastreferencenumber
+ -- for determining width
+
+ local lastreferencenumber = 0 -- document wide
+
+ function lists.prepareentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local used = rendering.used
+ local forceall = rendering.criterium == v_all
+ local repeated = rendering.repeated == v_yes
+ local sorttype = rendering.sorttype or v_default
+ local sorter = lists.sorters[sorttype] or lists.sorters[v_default]
+ local current = datasets[dataset]
+ local luadata = current.luadata
+ local details = current.details
+ local newlist = { }
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local entry = luadata[tag]
+ if entry then
+ if forceall or repeated or not used[tag] then
+ newlist[#newlist+1] = li
+ -- already here:
+ if not repeated then
+ used[tag] = true -- beware we keep the old state (one can always use criterium=all)
+ end
+ local detail = details[tag]
+ if detail then
+ local referencenumber = detail.referencenumber
+ if not referencenumber then
+ lastreferencenumber = lastreferencenumber + 1
+ referencenumber = lastreferencenumber
+ detail.referencenumber = lastreferencenumber
+ end
+ li[3] = referencenumber
+ else
+ report("missing details for tag %a in dataset %a (enhanced: %s)",tag,dataset,current.enhanced and "yes" or "no")
+ -- weird, this shouldn't happen .. all have a detail
+ lastreferencenumber = lastreferencenumber + 1
+ details[tag] = { referencenumber = lastreferencenumber }
+ li[3] = lastreferencenumber
end
- li[3] = referencenumber
- else
- report("missing details for tag %a in dataset %a (enhanced: %s)",tag,dataset,current.enhanced and "yes" or "no")
- -- weird, this shouldn't happen .. all have a detail
- lastreferencenumber = lastreferencenumber + 1
- details[tag] = { referencenumber = lastreferencenumber }
- li[3] = lastreferencenumber
end
end
end
+ rendering.list = type(sorter) == "function" and sorter(dataset,rendering,newlist,sorttype) or newlist
end
- rendering.list = type(sorter) == "function" and sorter(dataset,rendering,newlist,sorttype) or newlist
-end
-function lists.fetchentries(dataset)
- local rendering = renderings[dataset]
- local list = rendering.list
- for i=1,#list do
- local li = list[i]
- ctx_btxsettag(li[1])
- ctx_btxsetnumber(li[3])
- ctx_btxchecklistentry()
+ function lists.fetchentries(dataset)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ for i=1,#list do
+ local li = list[i]
+ ctx_btxsettag(li[1])
+ ctx_btxsetnumber(li[3])
+ ctx_btxchecklistentry()
+ end
end
-end
--- for rendering
-
--- setspecification
-
-function commands.btxflushpages(dataset,tag)
- -- todo: interaction
- local rendering = renderings[dataset]
- local pages = rendering.pages[tag]
- if not pages then
- return
- end
- local nofpages = #pages
- if nofpages == 0 then
- return
- end
- local first_p = nil
- local first_r = nil
- local last_p = nil
- local last_r = nil
- local ranges = { }
- local nofdone = 0
- local function flush()
- if last_r and first_r ~= last_r then
- ranges[#ranges+1] = { first_p, last_p }
- else
- ranges[#ranges+1] = { first_p }
- end
- end
- for i=1,nofpages do
- local next_p = pages[i]
- local next_r = next_p[2].realpage
- if not first_r then
- first_p = next_p
- first_r = next_r
- elseif last_r + 1 == next_r then
- -- continue
- elseif first_r then
- flush()
- first_p = next_p
- first_r = next_r
+ -- for rendering
+
+ -- setspecification
+
+ function commands.btxflushpages(dataset,tag)
+ -- todo: interaction
+ local rendering = renderings[dataset]
+ local pages = rendering.pages[tag]
+ if not pages then
+ return
end
- last_p = next_p
- last_r = next_r
- end
- if first_r then
- flush()
- end
- local nofranges = #ranges
- for i=1,nofranges do
- local r = ranges[i]
- ctx_btxsetconcat(concatstate(i,nofranges))
- local first, last = r[1], r[2]
- ctx_btxsetfirstinternal(first[2].internal)
- ctx_btxsetfirstpage(first[1])
- if last then
- ctx_btxsetlastinternal(last[2].internal)
- ctx_btxsetlastpage(last[1])
+ local nofpages = #pages
+ if nofpages == 0 then
+ return
end
- ctx_btxpagesetup()
- end
-end
-
-function lists.flushentries(dataset,textmode)
- local rendering = renderings[dataset]
- local list = rendering.list
- local luadata = datasets[dataset].luadata
- -- maybe a startflushing here
- ignoredfields = rendering.ignored or { }
- --
- for i=1,#list do
- local li = list[i]
- local tag = li[1]
- local n = li[3]
- local entry = luadata[tag]
- local combined = entry.combined
- local language = entry.language
- if combined then
- ctx_btxsetcombis(concat(combined,","))
- end
- ctx_btxsetcategory(entry.category or "unknown")
- ctx_btxsettag(tag)
- ctx_btxsetnumber(n)
- if language then
- ctx_btxsetlanguage(language)
- end
- local bl = li[5]
- if bl and bl ~= "" then
- ctx_btxsetbacklink(bl)
- ctx_btxsetbacktrace(concat(li," ",5))
- local uc = citetolist[tonumber(bl)]
- if uc then
- ctx_btxsetinternal(uc.references.internal or "")
+ local first_p = nil
+ local first_r = nil
+ local last_p = nil
+ local last_r = nil
+ local ranges = { }
+ local nofdone = 0
+ local function flush()
+ if last_r and first_r ~= last_r then
+ ranges[#ranges+1] = { first_p, last_p }
+ else
+ ranges[#ranges+1] = { first_p }
end
- else
- -- nothing
end
- local userdata = li[4]
- if userdata then
- local b = userdata.btxbtx
- local a = userdata.btxatx
- if b then
- ctx_btxsetbefore(b)
- end
- if a then
- ctx_btxsetafter(a)
+ for i=1,nofpages do
+ local next_p = pages[i]
+ local next_r = next_p[2].realpage
+ if not first_r then
+ first_p = next_p
+ first_r = next_r
+ elseif last_r + 1 == next_r then
+ -- continue
+ elseif first_r then
+ flush()
+ first_p = next_p
+ first_r = next_r
end
+ last_p = next_p
+ last_r = next_r
end
- rendering.userdata = userdata
- if textmode then
- ctx_btxhandlelisttextentry()
- else
- ctx_btxhandlelistentry()
+ if first_r then
+ flush()
+ end
+ local nofranges = #ranges
+ for i=1,nofranges do
+ local r = ranges[i]
+ ctx_btxsetconcat(concatstate(i,nofranges))
+ local first, last = r[1], r[2]
+ ctx_btxsetfirstinternal(first[2].internal)
+ ctx_btxsetfirstpage(first[1])
+ if last then
+ ctx_btxsetlastinternal(last[2].internal)
+ ctx_btxsetlastpage(last[1])
+ end
+ ctx_btxpagesetup()
end
end
- context(function()
- -- wrapup
- ignoredfields = nil
- setspecification(false)
- end)
-end
-local function getuserdata(dataset,key)
- local rendering = renderings[dataset]
- if rendering then
- local userdata = rendering.userdata
- if userdata then
- local value = userdata[key]
- if value and value ~= "" then
- return value
+ function lists.flushentries(dataset,textmode)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local luadata = datasets[dataset].luadata
+ -- maybe a startflushing here
+ ignoredfields = rendering.ignored or { }
+ --
+ for i=1,#list do
+ local li = list[i]
+ local tag = li[1]
+ local n = li[3]
+ local entry = luadata[tag]
+ local combined = entry.combined
+ local language = entry.language
+ if combined then
+ ctx_btxsetcombis(concat(combined,","))
+ end
+ ctx_btxsetcategory(entry.category or "unknown")
+ ctx_btxsettag(tag)
+ ctx_btxsetnumber(n)
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ local bl = li[5]
+ if bl and bl ~= "" then
+ ctx_btxsetbacklink(bl)
+ ctx_btxsetbacktrace(concat(li," ",5))
+ local uc = citetolist[tonumber(bl)]
+ if uc then
+ ctx_btxsetinternal(uc.references.internal or "")
+ end
+ else
+ -- nothing
+ end
+ local userdata = li[4]
+ if userdata then
+ local b = userdata.btxbtx
+ local a = userdata.btxatx
+ if b then
+ ctx_btxsetbefore(b)
+ end
+ if a then
+ ctx_btxsetafter(a)
+ end
+ end
+ rendering.userdata = userdata
+ if textmode then
+ ctx_btxhandlelisttextentry()
+ else
+ ctx_btxhandlelistentry()
+ end
+ end
+ context(function()
+ -- wrapup
+ ignoredfields = nil
+ setspecification(false)
+ end)
+ end
+
+ local function getuserdata(dataset,key)
+ local rendering = renderings[dataset]
+ if rendering then
+ local userdata = rendering.userdata
+ if userdata then
+ local value = userdata[key]
+ if value and value ~= "" then
+ return value
+ end
end
end
end
-end
-lists.uservariable = getuserdata
+ lists.uservariable = getuserdata
-function commands.btxuservariable(dataset,key)
- local value = getuserdata(dataset,key)
- if value then
- context(value)
+ function commands.btxuservariable(dataset,key)
+ local value = getuserdata(dataset,key)
+ if value then
+ context(value)
+ end
end
-end
-function commands.btxdoifelseuservariable(dataset,key)
- if getuserdata(dataset,key) then
- ctx_firstoftwoarguments()
- else
- ctx_secondoftwoarguments()
+ function commands.btxdoifelseuservariable(dataset,key)
+ if getuserdata(dataset,key) then
+ ctx_firstoftwoarguments()
+ else
+ ctx_secondoftwoarguments()
+ end
end
-end
-function lists.filterall(dataset)
- local r = renderings[dataset]
- local list = r.list
- local registered = r.registered
- for i=1,#registered do
- list[i] = { registered[i], i, 0, false, false }
+ function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i, 0, false, false }
+ end
end
-end
-
-commands.btxresolvelistreference = lists.resolve
-commands.btxaddtolist = lists.addentry
-commands.btxcollectlistentries = lists.collectentries
-commands.btxpreparelistentries = lists.prepareentries
-commands.btxfetchlistentries = lists.fetchentries
-commands.btxflushlistentries = lists.flushentries
-commands.btxflushlistentry = lists.flushentry
-local citevariants = { }
-publications.citevariants = citevariants
+ commands.btxresolvelistreference = lists.resolve
+ commands.btxaddtolist = lists.addentry
+ commands.btxcollectlistentries = lists.collectentries
+ commands.btxpreparelistentries = lists.prepareentries
+ commands.btxfetchlistentries = lists.fetchentries
+ commands.btxflushlistentries = lists.flushentries
+ commands.btxflushlistentry = lists.flushentry
-function commands.btxhandlecite(specification)
- local dataset = specification.dataset or "" -- standard
- local reference = specification.reference
- local variant = specification.variant or defaultvariant
- if not reference or reference == "" then
- return
- end
- --
- specification.variant = variant
- specification.compress = specification.compress == v_yes
- specification.markentry = specification.markentry ~= false
- --
- local prefix, rest = lpegmatch(prefixsplitter,reference)
- if prefix and rest then
- specification.dataset = prefix
- specification.reference = rest
- end
- --
- --
- if trace_cite then
- report_cite("inject, dataset: %s, tag: %s, variant: %s, compressed",
- specification.dataset or "-",
- specification.reference,
- specification.variant
- )
- end
- --
- ctx_setvalue("currentbtxdataset",dataset)
- --
- citevariants[variant](specification) -- we always fall back on default
end
+do
-function commands.btxhandlenocite(specification)
- local dataset = specification.dataset or "" -- standard
- local reference = specification.reference
- if not reference or reference == "" then
- return
- end
- --
- local markentry = specification.markentry ~= false
- local internal = specification.internal or ""
- --
- local prefix, rest = lpegmatch(prefixsplitter,reference)
- if rest then
- dataset = prefix
- reference = rest
- end
- --
- if trace_cite then
- report_cite("mark, dataset: %s, tags: %s",dataset or "-",reference)
- end
- --
- local reference = publications.parenttag(dataset,reference)
- --
- local found, todo, list = findallused(dataset,reference,internal)
- --
- tobemarked = markentry and todo
- if found and tobemarked then
- flushmarked(dataset,list)
- commands.flushmarked() -- here (could also be done in caller)
- end
-end
-
--- function commands.btxcitevariant(dataset,block,tags,variant) -- uses? specification ?
--- local action = citevariants[variant]
--- if action then
--- action(dataset,tags,variant)
--- end
--- end
-
--- sorter
-
-local keysorter = function(a,b) return a.sortkey < b.sortkey end
-
--- local suffix = 0
--- local function setsuffix(entry,suffix,sortfld)
--- entry.suffix = suffix
--- local dataset = datasets[entry.dataset]
--- if dataset then
--- local suffixes = dataset.suffixes[entry.tag]
--- if suffixes then
--- suffixes[sortfld] = suffix
--- else
--- dataset.suffixes[entry.tag] = { [sortfld] = suffix }
--- end
--- end
--- end
--- for i=1,#source do
--- local entry = source[i]
--- local sortfld = entry.sortfld
--- if sortfld then
--- local value = entry.sortkey
--- if value == oldvalue then
--- if suffix == 0 then
--- suffix = 1
--- local entry = source[i-1]
--- setsuffix(entry,suffix,sortfld)
--- end
--- suffix = suffix + 1
--- setsuffix(entry,suffix,sortfld)
--- else
--- oldvalue = value
--- suffix = 0
--- end
--- else
--- break
--- end
--- end
-
-local function compresslist(source)
- for i=1,#source do
- if type(source[i].sortkey) ~= "number" then
- return source
- end
- end
- local first, last, firstr, lastr
- local target, noftarget, tags = { }, 0, { }
- sort(source,keysorter)
- local oldvalue = nil
- local function flushrange()
- noftarget = noftarget + 1
- if last > first + 1 then
- target[noftarget] = {
- first = firstr,
- last = lastr,
- tags = tags,
- }
- else
- target[noftarget] = firstr
- if last > first then
- noftarget = noftarget + 1
- target[noftarget] = lastr
+ local citevariants = { }
+ publications.citevariants = citevariants
+
+ function commands.btxhandlecite(specification)
+ local dataset = specification.dataset or "" -- standard
+ local reference = specification.reference
+ local variant = specification.variant or defaultvariant
+ if not reference or reference == "" then
+ return
+ end
+ --
+ specification.variant = variant
+ specification.compress = specification.compress == v_yes
+ specification.markentry = specification.markentry ~= false
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if prefix and rest then
+ specification.dataset = prefix
+ specification.reference = rest
+ end
+ --
+ --
+ if trace_cite then
+ report_cite("inject, dataset: %s, tag: %s, variant: %s, compressed",
+ specification.dataset or "-",
+ specification.reference,
+ specification.variant
+ )
+ end
+ --
+ ctx_setvalue("currentbtxdataset",dataset)
+ --
+ citevariants[variant](specification) -- we always fall back on default
+ end
+
+
+ function commands.btxhandlenocite(specification)
+ local dataset = specification.dataset or "" -- standard
+ local reference = specification.reference
+ if not reference or reference == "" then
+ return
+ end
+ --
+ local markentry = specification.markentry ~= false
+ local internal = specification.internal or ""
+ --
+ local prefix, rest = lpegmatch(prefixsplitter,reference)
+ if rest then
+ dataset = prefix
+ reference = rest
+ end
+ --
+ if trace_cite then
+ report_cite("mark, dataset: %s, tags: %s",dataset or "-",reference)
+ end
+ --
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ --
+ tobemarked = markentry and todo
+ if found and tobemarked then
+ flushmarked(dataset,list)
+ commands.flushmarked() -- here (could also be done in caller)
+ end
+ end
+
+ -- function commands.btxcitevariant(dataset,block,tags,variant) -- uses? specification ?
+ -- local action = citevariants[variant]
+ -- if action then
+ -- action(dataset,tags,variant)
+ -- end
+ -- end
+
+ -- sorter
+
+ local keysorter = function(a,b) return a.sortkey < b.sortkey end
+
+ -- local suffix = 0
+ -- local function setsuffix(entry,suffix,sortfld)
+ -- entry.suffix = suffix
+ -- local dataset = datasets[entry.dataset]
+ -- if dataset then
+ -- local suffixes = dataset.suffixes[entry.tag]
+ -- if suffixes then
+ -- suffixes[sortfld] = suffix
+ -- else
+ -- dataset.suffixes[entry.tag] = { [sortfld] = suffix }
+ -- end
+ -- end
+ -- end
+ -- for i=1,#source do
+ -- local entry = source[i]
+ -- local sortfld = entry.sortfld
+ -- if sortfld then
+ -- local value = entry.sortkey
+ -- if value == oldvalue then
+ -- if suffix == 0 then
+ -- suffix = 1
+ -- local entry = source[i-1]
+ -- setsuffix(entry,suffix,sortfld)
+ -- end
+ -- suffix = suffix + 1
+ -- setsuffix(entry,suffix,sortfld)
+ -- else
+ -- oldvalue = value
+ -- suffix = 0
+ -- end
+ -- else
+ -- break
+ -- end
+ -- end
+
+ local function compresslist(source)
+ for i=1,#source do
+ if type(source[i].sortkey) ~= "number" then
+ return source
end
end
- tags = { }
- end
- for i=1,#source do
- local entry = source[i]
- local current = entry.sortkey
- if not first then
- first, last, firstr, lastr = current, current, entry, entry
- elseif current == last + 1 then
- last, lastr = current, entry
- else
+ local first, last, firstr, lastr
+ local target, noftarget, tags = { }, 0, { }
+ sort(source,keysorter)
+ local oldvalue = nil
+ local function flushrange()
+ noftarget = noftarget + 1
+ if last > first + 1 then
+ target[noftarget] = {
+ first = firstr,
+ last = lastr,
+ tags = tags,
+ }
+ else
+ target[noftarget] = firstr
+ if last > first then
+ noftarget = noftarget + 1
+ target[noftarget] = lastr
+ end
+ end
+ tags = { }
+ end
+ for i=1,#source do
+ local entry = source[i]
+ local current = entry.sortkey
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ elseif current == last + 1 then
+ last, lastr = current, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
+ tags[#tags+1] = entry.tag
+ end
+ if first and last then
flushrange()
- first, last, firstr, lastr = current, current, entry, entry
end
- tags[#tags+1] = entry.tag
+ return target
end
- if first and last then
- flushrange()
- end
- return target
-end
--- local source = {
--- { tag = "one", internal = 1, value = "foo", page = 1 },
--- { tag = "two", internal = 2, value = "bar", page = 2 },
--- { tag = "three", internal = 3, value = "gnu", page = 3 },
--- }
---
--- local target = compresslist(source)
-
-local numberonly = R("09")^1 / tonumber + P(1)^0
-local f_missing = formatters["<%s>"]
+ -- local source = {
+ -- { tag = "one", internal = 1, value = "foo", page = 1 },
+ -- { tag = "two", internal = 2, value = "bar", page = 2 },
+ -- { tag = "three", internal = 3, value = "gnu", page = 3 },
+ -- }
+ --
+ -- local target = compresslist(source)
--- maybe also sparse (e.g. pages)
+ local numberonly = R("09")^1 / tonumber + P(1)^0
+ local f_missing = formatters["<%s>"]
--- a bit redundant access to datasets
+ -- maybe also sparse (e.g. pages)
-local function processcite(presets,specification)
- --
- if specification then
- setmetatableindex(specification,presets)
- else
- specification = presets
- end
- --
- local dataset = specification.dataset
- local reference = specification.reference
- local internal = specification.internal
- local setup = specification.variant
- local compress = specification.compress
- local getter = specification.getter
- local setter = specification.setter
- local compressor = specification.compressor
- --
+ -- a bit redundant access to datasets
- local reference = publications.parenttag(dataset,reference)
- --
- local found, todo, list = findallused(dataset,reference,internal)
- tobemarked = specification.markentry and todo
- --
- if found and setup then
- local source = { }
- local badkey = false
- local luadata = datasets[dataset].luadata
- for i=1,#found do
- local entry = found[i]
- local tag = entry.userdata.btxref
- -- we can probably move the test into the flush
- -- local category = luadata[tag].category
- -- if currentspecificationfields[category][setup] then
- local internal = entry.references.internal
- local data = setter(dataset,tag,entry,internal)
- if compress and not compressor then
- local sortkey = data.sortkey
- if sortkey then
- local key = lpegmatch(numberonly,sortkey)
- if key then
- data.sortkey = key
+ local function processcite(presets,specification)
+ --
+ if specification then
+ setmetatableindex(specification,presets)
+ else
+ specification = presets
+ end
+ --
+ local dataset = specification.dataset
+ local reference = specification.reference
+ local internal = specification.internal
+ local setup = specification.variant
+ local compress = specification.compress
+ local getter = specification.getter
+ local setter = specification.setter
+ local compressor = specification.compressor
+ --
+
+ local reference = publications.parenttag(dataset,reference)
+ --
+ local found, todo, list = findallused(dataset,reference,internal)
+ tobemarked = specification.markentry and todo
+ --
+ if found and setup then
+ local source = { }
+ local badkey = false
+ local luadata = datasets[dataset].luadata
+ for i=1,#found do
+ local entry = found[i]
+ local tag = entry.userdata.btxref
+ -- we can probably move the test into the flush
+ -- local category = luadata[tag].category
+ -- if currentspecificationfields[category][setup] then
+ local internal = entry.references.internal
+ local data = setter(dataset,tag,entry,internal)
+ if compress and not compressor then
+ local sortkey = data.sortkey
+ if sortkey then
+ local key = lpegmatch(numberonly,sortkey)
+ if key then
+ data.sortkey = key
+ else
+ badkey = true
+ end
else
badkey = true
end
+ end
+ if type(data) == "table" then
+ source[#source+1] = data
else
- badkey = true
+ report("error in cite rendering %a",setup or "?")
end
- end
- if type(data) == "table" then
- source[#source+1] = data
- else
- report("error in cite rendering %a",setup or "?")
- end
- -- else
- -- report("cite rendering %a is not available for %a",setup,category)
- -- end
- end
+ -- else
+ -- report("cite rendering %a is not available for %a",setup,category)
+ -- end
+ end
- local lefttext = specification.lefttext
- local righttext = specification.righttext
- local before = specification.before
- local after = specification.after
+ local lefttext = specification.lefttext
+ local righttext = specification.righttext
+ local before = specification.before
+ local after = specification.after
- if lefttext and lefttext ~= "" then lefttext = settings_to_array(lefttext) end
- if righttext and righttext ~= "" then righttext = settings_to_array(righttext) end
- if before and before ~= "" then before = settings_to_array(before) end
- if after and after ~= "" then after = settings_to_array(after) end
+ if lefttext and lefttext ~= "" then lefttext = settings_to_array(lefttext) end
+ if righttext and righttext ~= "" then righttext = settings_to_array(righttext) end
+ if before and before ~= "" then before = settings_to_array(before) end
+ if after and after ~= "" then after = settings_to_array(after) end
- local function flush(i,n,entry,last)
- local tag = entry.tag
- local currentcitation = markcite(dataset,tag)
- --
- ctx_btxstartcite()
- ctx_btxsettag(tag)
- --
- if lefttext then ctx_btxsetlefttext (lefttext [i] or #lefttext == 1 and lefttext [1] or "") end
- if righttext then ctx_btxsetrighttext(righttext[i] or #righttext == 1 and righttext[1] or "") end
- if before then ctx_btxsetbefore (before [i] or #before == 1 and before [1] or "") end
- if after then ctx_btxsetafter (after [i] or #after == 1 and after [1] or "") end
- --
- ctx_btxsetbacklink(currentcitation)
- local bl = listtocite[currentcitation]
- if bl then
- -- we refer to a coming list entry
- ctx_btxsetinternal(bl.references.internal or "")
- else
- -- we refer to a previous list entry
- ctx_btxsetinternal(entry.internal or "")
- end
- local language = entry.language
- if language then
- ctx_btxsetlanguage(language)
- end
- if not getter(entry,last) then
- ctx_btxsetfirst(f_missing(tag))
+ local function flush(i,n,entry,last)
+ local tag = entry.tag
+ local currentcitation = markcite(dataset,tag)
+ --
+ ctx_btxstartcite()
+ ctx_btxsettag(tag)
+ --
+ if lefttext then ctx_btxsetlefttext (lefttext [i] or #lefttext == 1 and lefttext [1] or "") end
+ if righttext then ctx_btxsetrighttext(righttext[i] or #righttext == 1 and righttext[1] or "") end
+ if before then ctx_btxsetbefore (before [i] or #before == 1 and before [1] or "") end
+ if after then ctx_btxsetafter (after [i] or #after == 1 and after [1] or "") end
+ --
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ if bl then
+ -- we refer to a coming list entry
+ ctx_btxsetinternal(bl.references.internal or "")
+ else
+ -- we refer to a previous list entry
+ ctx_btxsetinternal(entry.internal or "")
+ end
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ if not getter(entry,last) then
+ ctx_btxsetfirst(f_missing(tag))
+ end
+ ctx_btxsetconcat(concatstate(i,n))
+ ctx_btxcitesetup(setup)
+ ctx_btxstopcite()
end
- ctx_btxsetconcat(concatstate(i,n))
- ctx_btxcitesetup(setup)
- ctx_btxstopcite()
- end
- if compress and not badkey then
- local target = (compressor or compresslist)(source)
- local nofcollected = #target
- if nofcollected == 0 then
- unknowncite(reference)
- else
- for i=1,nofcollected do
- local entry = target[i]
- local first = entry.first
- if first then
- flush(i,nofcollected,first,entry.last)
- else
- flush(i,nofcollected,entry)
+ if compress and not badkey then
+ local target = (compressor or compresslist)(source)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ if first then
+ flush(i,nofcollected,first,entry.last)
+ else
+ flush(i,nofcollected,entry)
+ end
end
end
- end
- else
- local nofcollected = #source
- if nofcollected == 0 then
- unknowncite(reference)
else
- for i=1,nofcollected do
- flush(i,nofcollected,source[i])
+ local nofcollected = #source
+ if nofcollected == 0 then
+ unknowncite(reference)
+ else
+ for i=1,nofcollected do
+ flush(i,nofcollected,source[i])
+ end
end
end
end
+ if tobemarked then
+ flushmarked(dataset,list)
+ commands.flushmarked() -- here (could also be done in caller)
+ end
end
- if tobemarked then
- flushmarked(dataset,list)
- commands.flushmarked() -- here (could also be done in caller)
- end
-end
-local function simplegetter(first,last,field)
- local value = first[field]
- if value then
- ctx_btxsetfirst(value)
- if last then
- ctx_btxsetsecond(last[field])
+ local function simplegetter(first,last,field)
+ local value = first[field]
+ if value then
+ ctx_btxsetfirst(value)
+ if last then
+ ctx_btxsetsecond(last[field])
+ end
+ return true
end
- return true
end
-end
-local setters = setmetatableindex({},function(t,k)
- local v = function(dataset,tag,entry,internal)
- local value = getfield(dataset,tag,k)
- return {
- tag = tag,
- internal = internal,
- [k] = value,
- sortkey = value,
- sortfld = k,
- }
- end
- t[k] = v
- return v
-end)
+ local setters = setmetatableindex({},function(t,k)
+ local v = function(dataset,tag,entry,internal)
+ local value = getfield(dataset,tag,k)
+ return {
+ tag = tag,
+ internal = internal,
+ [k] = value,
+ sortkey = value,
+ sortfld = k,
+ }
+ end
+ t[k] = v
+ return v
+ end)
-local getters = setmetatableindex({},function(t,k)
- local v = function(first,last)
- return simplegetter(first,last,k)
- end
- t[k] = v
- return v
-end)
+ local getters = setmetatableindex({},function(t,k)
+ local v = function(first,last)
+ return simplegetter(first,last,k)
+ end
+ t[k] = v
+ return v
+ end)
--- todo: just a sort key and then fetch normal by fieldname
+ -- todo: just a sort key and then fetch normal by fieldname
--- setmetatableindex(citevariants,function(t,k)
--- local p = registeredcitevariants[k]
--- local v = p and p ~= k and rawget(t,p) or defaultvariant
--- t[k] = v
--- return v
--- end)
+ -- setmetatableindex(citevariants,function(t,k)
+ -- local p = registeredcitevariants[k]
+ -- local v = p and p ~= k and rawget(t,p) or defaultvariant
+ -- t[k] = v
+ -- return v
+ -- end)
-setmetatableindex(citevariants,function(t,k)
- local p = registeredcitevariants[k]
- local v = nil
- if p and p ~= "" then
- v = rawget(t,p)
- end
- if not v then
- p = defaultvariant or "default"
- v = rawget(t,p)
+ setmetatableindex(citevariants,function(t,k)
+ local p = registeredcitevariants[k]
+ local v = nil
+ if p and p ~= "" then
+ v = rawget(t,p)
+ end
+ if not v then
+ p = defaultvariant or "default"
+ v = rawget(t,p)
+ end
+ report_cite("variant %a falls back on %a",k,p)
+ t[k] = v
+ return v
+ end)
+
+ function citevariants.default(presets) -- no longer used
+ local variant = presets.variant
+ processcite(presets,{
+ setter = setters[variant],
+ getter = getters[variant],
+ })
end
- report_cite("variant %a falls back on %a",k,p)
- t[k] = v
- return v
-end)
-function citevariants.default(presets) -- no longer used
- local variant = presets.variant
- processcite(presets,{
- setter = setters[variant],
- getter = getters[variant],
- })
-end
+ -- entry
--- entry
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ return {
+ tag = tag,
+ internal = internal,
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- tag = tag,
- internal = internal,
- }
- end
+ local function getter(first,last) -- last not used
+ ctx_btxsetfirst(first.tag)
+ end
- local function getter(first,last) -- last not used
- ctx_btxsetfirst(first.tag)
- end
+ function citevariants.entry(presets)
+ processcite(presets,{
+ compress = false,
+ -- variant = presets.variant or "entry",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.entry(presets)
- processcite(presets,{
- compress = false,
- -- variant = presets.variant or "entry",
- setter = setter,
- getter = getter,
- })
end
-end
-
--- short
+ -- short
-do
+ do
- local function setter(dataset,tag,entry,internal)
- return {
- tag = tag,
- internal = internal,
- short = getdetail(dataset,tag,"short"),
- suffix = getdetail(dataset,tag,"suffix"),
- }
- end
+ local function setter(dataset,tag,entry,internal)
+ return {
+ tag = tag,
+ internal = internal,
+ short = getdetail(dataset,tag,"short"),
+ suffix = getdetail(dataset,tag,"suffix"),
+ }
+ end
- local function getter(first,last) -- last not used
- local short = first.short
- if short then
- local suffix = first.suffix
- if suffix then
- ctx_btxsetfirst(short .. suffix)
- else
- ctx_btxsetfirst(short)
+ local function getter(first,last) -- last not used
+ local short = first.short
+ if short then
+ local suffix = first.suffix
+ if suffix then
+ ctx_btxsetfirst(short .. suffix)
+ else
+ ctx_btxsetfirst(short)
+ end
+ return true
end
- return true
end
- end
- function citevariants.short(presets)
- processcite(presets,{
- compress = false,
- -- variant = presets.variant or "short",
- setter = setter,
- getter = getter,
- })
- end
+ function citevariants.short(presets)
+ processcite(presets,{
+ compress = false,
+ -- variant = presets.variant or "short",
+ setter = setter,
+ getter = getter,
+ })
+ end
-end
+ end
--- pages (no compress)
+ -- pages (no compress)
-do
+ do
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- pages = getdetail(dataset,tag,"pages"),
- }
- end
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ pages = getdetail(dataset,tag,"pages"),
+ }
+ end
- local function getter(first,last)
- local pages = first.pages
- if pages then
- if type(pages) == "table" then
- ctx_btxsetfirst(pages[1])
- ctx_btxsetsecond(pages[2])
- else
- ctx_btxsetfirst(pages)
+ local function getter(first,last)
+ local pages = first.pages
+ if pages then
+ if type(pages) == "table" then
+ ctx_btxsetfirst(pages[1])
+ ctx_btxsetsecond(pages[2])
+ else
+ ctx_btxsetfirst(pages)
+ end
+ return true
end
- return true
end
- end
- function citevariants.page(presets)
- processcite(presets,{
- -- variant = presets.variant or "page",
- setter = setter,
- getter = getter,
- })
+ function citevariants.page(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "page",
+ setter = setter,
+ getter = getter,
+ })
+ end
+
end
-end
+ -- num
--- num
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ local entries = entry.entries
+ local text = entries and entries.text or "?"
+ return {
+ tag = tag,
+ internal = internal,
+ num = text,
+ sortkey = text,
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- local entries = entry.entries
- local text = entries and entries.text or "?"
- return {
- tag = tag,
- internal = internal,
- num = text,
- sortkey = text,
- }
- end
+ local function getter(first,last)
+ return simplegetter(first,last,"num")
+ end
- local function getter(first,last)
- return simplegetter(first,last,"num")
- end
+ function citevariants.num(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "num",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.num(presets)
- processcite(presets,{
- -- variant = presets.variant or "num",
- setter = setter,
- getter = getter,
- })
end
-end
+ -- year
--- year
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ year = getfield(dataset,tag,"year"),
+ suffix = getdetail(dataset,tag,"suffix"),
+ sortkey = getdetail(dataset,tag,"suffixedyear"),
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- year = getfield(dataset,tag,"year"),
- suffix = getdetail(dataset,tag,"suffix"),
- sortkey = getdetail(dataset,tag,"suffixedyear"),
- }
- end
+ local function getter(first,last)
+ return simplegetter(first,last,"year")
+ end
- local function getter(first,last)
- return simplegetter(first,last,"year")
- end
+ function citevariants.year(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "year",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.year(presets)
- processcite(presets,{
- -- variant = presets.variant or "year",
- setter = setter,
- getter = getter,
- })
end
-end
+ -- index | serial
--- index | serial
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ local index = getfield(dataset,tag,"index")
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ index = index,
+ sortkey = index,
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- local index = getfield(dataset,tag,"index")
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- index = index,
- sortkey = index,
- }
- end
+ local function getter(first,last)
+ return simplegetter(first,last,"index")
+ end
- local function getter(first,last)
- return simplegetter(first,last,"index")
- end
+ function citevariants.index(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "index",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.index(presets)
- processcite(presets,{
- -- variant = presets.variant or "index",
- setter = setter,
- getter = getter,
- })
- end
+ function citevariants.serial(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "serial",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.serial(presets)
- processcite(presets,{
- -- variant = presets.variant or "serial",
- setter = setter,
- getter = getter,
- })
end
-end
+ -- category | type
--- category | type
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ category = getfield(dataset,tag,"category"),
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- category = getfield(dataset,tag,"category"),
- }
- end
+ local function getter(first,last)
+ return simplegetter(first,last,"category")
+ end
- local function getter(first,last)
- return simplegetter(first,last,"category")
- end
+ function citevariants.category(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "serial",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.category(presets)
- processcite(presets,{
- -- variant = presets.variant or "serial",
- setter = setter,
- getter = getter,
- })
- end
+ function citevariants.type(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "type",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.type(presets)
- processcite(presets,{
- -- variant = presets.variant or "type",
- setter = setter,
- getter = getter,
- })
end
-end
+ -- key | tag
--- key | tag
+ do
-do
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- }
- end
+ local function getter(first,last)
+ ctx_btxsetfirst(first.tag)
+ return true
+ end
- local function getter(first,last)
- ctx_btxsetfirst(first.tag)
- return true
- end
+ function citevariants.key(presets)
+ return processcite(presets,{
+ variant = "key",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.key(presets)
- return processcite(presets,{
- variant = "key",
- setter = setter,
- getter = getter,
- })
- end
+ function citevariants.tag(presets)
+ return processcite(presets,{
+ variant = "tag",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.tag(presets)
- return processcite(presets,{
- variant = "tag",
- setter = setter,
- getter = getter,
- })
end
-end
+ -- todo : sort
+ -- todo : choose between publications or commands namespace
+ -- todo : use details.author
+ -- todo : sort details.author
+ -- (name, name and name) .. how names? how sorted?
+ -- todo: we loop at the tex end .. why not here
+ -- \cite[{hh,afo},kvm]
--- todo : sort
--- todo : choose between publications or commands namespace
--- todo : use details.author
--- todo : sort details.author
--- (name, name and name) .. how names? how sorted?
--- todo: we loop at the tex end .. why not here
--- \cite[{hh,afo},kvm]
+ -- common
--- common
+ do
-do
+ local getauthor = publications.authors.getauthor
- local getauthor = publications.authors.getauthor
-
- local currentbtxciteauthor = function()
- context.currentbtxciteauthor()
- return true -- needed?
- end
+ local currentbtxciteauthor = function()
+ context.currentbtxciteauthor()
+ return true -- needed?
+ end
- local function authorcompressor(found)
- local result = { }
- local entries = { }
- for i=1,#found do
- local entry = found[i]
- local author = entry.author
- if author then
- local aentries = entries[author]
- if aentries then
- aentries[#aentries+1] = entry
- else
- entries[author] = { entry }
+ local function authorcompressor(found)
+ local result = { }
+ local entries = { }
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.author
+ if author then
+ local aentries = entries[author]
+ if aentries then
+ aentries[#aentries+1] = entry
+ else
+ entries[author] = { entry }
+ end
end
end
- end
- for i=1,#found do
- local entry = found[i]
- local author = entry.author
- if author then
- local aentries = entries[author]
- if not aentries then
- result[#result+1] = entry
- elseif aentries == true then
- -- already done
- else
- result[#result+1] = entry
- entry.entries = aentries
- entries[author] = true
+ for i=1,#found do
+ local entry = found[i]
+ local author = entry.author
+ if author then
+ local aentries = entries[author]
+ if not aentries then
+ result[#result+1] = entry
+ elseif aentries == true then
+ -- already done
+ else
+ result[#result+1] = entry
+ entry.entries = aentries
+ entries[author] = true
+ end
end
end
+ -- todo: add letters (should we then tag all?)
+ return result
end
- -- todo: add letters (should we then tag all?)
- return result
- end
- local function authorconcat(target,key,setup)
- ctx_btxstartsubcite(setup)
- local nofcollected = #target
- if nofcollected == 0 then
- unknowncite(tag)
- else
- for i=1,nofcollected do
- local entry = target[i]
- local first = entry.first
- local tag = entry.tag
- local currentcitation = markcite(entry.dataset,tag)
- ctx_btxstartciteauthor()
- ctx_btxsettag(tag)
- ctx_btxsetbacklink(currentcitation)
- local bl = listtocite[currentcitation]
- ctx_btxsetinternal(bl and bl.references.internal or "")
- if first then
- ctx_btxsetfirst(first[key] or f_missing(first.tag))
- local suffix = entry.suffix
- local value = entry.last[key]
- if value then
- ctx_btxsetsecond(value)
- end
- if suffix then
- ctx_btxsetthird(suffix)
- end
- else
- local suffix = entry.suffix
- local value = entry[key] or f_missing(tag)
- ctx_btxsetfirst(value)
- if suffix then
- ctx_btxsetthird(suffix)
+ local function authorconcat(target,key,setup)
+ ctx_btxstartsubcite(setup)
+ local nofcollected = #target
+ if nofcollected == 0 then
+ unknowncite(tag)
+ else
+ for i=1,nofcollected do
+ local entry = target[i]
+ local first = entry.first
+ local tag = entry.tag
+ local currentcitation = markcite(entry.dataset,tag)
+ ctx_btxstartciteauthor()
+ ctx_btxsettag(tag)
+ ctx_btxsetbacklink(currentcitation)
+ local bl = listtocite[currentcitation]
+ ctx_btxsetinternal(bl and bl.references.internal or "")
+ if first then
+ ctx_btxsetfirst(first[key] or f_missing(first.tag))
+ local suffix = entry.suffix
+ local value = entry.last[key]
+ if value then
+ ctx_btxsetsecond(value)
+ end
+ if suffix then
+ ctx_btxsetthird(suffix)
+ end
+ else
+ local suffix = entry.suffix
+ local value = entry[key] or f_missing(tag)
+ ctx_btxsetfirst(value)
+ if suffix then
+ ctx_btxsetthird(suffix)
+ end
end
+ ctx_btxsetconcat(concatstate(i,nofcollected))
+ ctx_btxcitesetup(setup)
+ ctx_btxstopciteauthor()
end
- ctx_btxsetconcat(concatstate(i,nofcollected))
- ctx_btxcitesetup(setup)
- ctx_btxstopciteauthor()
- end
- end
- ctx_btxstopsubcite()
- end
-
- local function authorsingle(entry,key,setup)
- ctx_btxstartsubcite(setup)
- ctx_btxstartciteauthor()
- local tag = entry.tag
- ctx_btxsettag(tag)
- -- local currentcitation = markcite(entry.dataset,tag)
- -- ctx_btxsetbacklink(currentcitation)
- -- local bl = listtocite[currentcitation]
- -- ctx_btxsetinternal(bl and bl.references.internal or "")
- ctx_btxsetfirst(entry[key] or f_missing(tag))
- ctx_btxsetthird(entry.suffix)
- ctx_btxcitesetup(setup)
- ctx_btxstopciteauthor()
- ctx_btxstopsubcite()
- end
-
- local partialinteractive = false
-
- local function authorgetter(first,last,key,setup) -- only first
- -- ctx_btxsetfirst(first.author) -- unformatted
- ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
- local entries = first.entries
- -- alternatively we can use a concat with one ... so that we can only make the
- -- year interactive, as with the concat
- if partialinteractive and not entries then
- entries = { first }
- end
- if entries then
- local c = compresslist(entries)
- local f = function() authorconcat(c,key,setup) return true end -- indeed return true?
- ctx_btxsetcount(#c)
- ctx_btxsetsecond(f)
- else
- local f = function() authorsingle(first,key,setup) return true end -- indeed return true?
- ctx_btxsetcount(0)
- ctx_btxsetsecond(f)
+ end
+ ctx_btxstopsubcite()
end
- return true
- end
- -- author
+ local function authorsingle(entry,key,setup)
+ ctx_btxstartsubcite(setup)
+ ctx_btxstartciteauthor()
+ local tag = entry.tag
+ ctx_btxsettag(tag)
+ -- local currentcitation = markcite(entry.dataset,tag)
+ -- ctx_btxsetbacklink(currentcitation)
+ -- local bl = listtocite[currentcitation]
+ -- ctx_btxsetinternal(bl and bl.references.internal or "")
+ ctx_btxsetfirst(entry[key] or f_missing(tag))
+ ctx_btxsetthird(entry.suffix)
+ ctx_btxcitesetup(setup)
+ ctx_btxstopciteauthor()
+ ctx_btxstopsubcite()
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- author = getauthor(dataset,tag,currentspecificationcategories), -- todo: list
- }
- end
+ local partialinteractive = false
- local function getter(first,last,_,setup)
- -- ctx_btxsetfirst(first.author) -- unformatted
- ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
- return true
- end
+ local function authorgetter(first,last,key,setup) -- only first
+ -- ctx_btxsetfirst(first.author) -- unformatted
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ local entries = first.entries
+ -- alternatively we can use a concat with one ... so that we can only make the
+ -- year interactive, as with the concat
+ if partialinteractive and not entries then
+ entries = { first }
+ end
+ if entries then
+ local c = compresslist(entries)
+ local f = function() authorconcat(c,key,setup) return true end -- indeed return true?
+ ctx_btxsetcount(#c)
+ ctx_btxsetsecond(f)
+ else
+ local f = function() authorsingle(first,key,setup) return true end -- indeed return true?
+ ctx_btxsetcount(0)
+ ctx_btxsetsecond(f)
+ end
+ return true
+ end
- function citevariants.author(presets)
- processcite(presets,{
- compress = false,
- variant = "author",
- setter = setter,
- getter = getter,
- })
- end
+ -- author
- -- authornum
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- local text = entry.entries.text
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- author = getauthor(dataset,tag,currentspecificationcategories), -- todo: list
- num = text,
- sortkey = text and lpegmatch(numberonly,text),
- }
- end
+ local function getter(first,last,_,setup)
+ -- ctx_btxsetfirst(first.author) -- unformatted
+ ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ return true
+ end
- local function getter(first,last)
- authorgetter(first,last,"num","author:num")
- return true
- end
+ function citevariants.author(presets)
+ processcite(presets,{
+ compress = false,
+ variant = "author",
+ setter = setter,
+ getter = getter,
+ })
+ end
- function citevariants.authornum(presets)
- processcite(presets,{
- variant = "authornum",
- setter = setter,
- getter = getter,
- compressor = authorcompressor,
- })
- end
+ -- authornum
- -- authoryear | authoryears
+ local function setter(dataset,tag,entry,internal)
+ local text = entry.entries.text
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ num = text,
+ sortkey = text and lpegmatch(numberonly,text),
+ }
+ end
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- author = getauthor(dataset,tag,currentspecificationcategories), -- todo: list
- year = getfield(dataset,tag,"year"),
- suffix = getdetail(dataset,tag,"suffix"),
- sortkey = getdetail(dataset,tag,"suffixedyear"),
- }
- end
+ local function getter(first,last)
+ authorgetter(first,last,"num","author:num")
+ return true
+ end
- local function getter(first,last)
- authorgetter(first,last,"year","author:year")
- return true
- end
+ function citevariants.authornum(presets)
+ processcite(presets,{
+ variant = "authornum",
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
- function citevariants.authoryear(presets)
- processcite(presets,{
- variant = "authoryear",
- setter = setter,
- getter = getter,
- compressor = authorcompressor,
- })
- end
+ -- authoryear | authoryears
- local function getter(first,last)
- authorgetter(first,last,"year","author:years")
- return true
- end
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ year = getfield(dataset,tag,"year"),
+ suffix = getdetail(dataset,tag,"suffix"),
+ sortkey = getdetail(dataset,tag,"suffixedyear"),
+ }
+ end
- function citevariants.authoryears(presets)
- processcite(presets,{
- variant = "authoryears",
- setter = setter,
- getter = getter,
- compressor = authorcompressor,
- })
- end
+ local function getter(first,last)
+ authorgetter(first,last,"year","author:year")
+ return true
+ end
-end
+ function citevariants.authoryear(presets)
+ processcite(presets,{
+ variant = "authoryear",
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
--- List variants
+ local function getter(first,last)
+ authorgetter(first,last,"year","author:years")
+ return true
+ end
-local listvariants = { }
-publications.listvariants = listvariants
+ function citevariants.authoryears(presets)
+ processcite(presets,{
+ variant = "authoryears",
+ setter = setter,
+ getter = getter,
+ compressor = authorcompressor,
+ })
+ end
-function commands.btxlistvariant(dataset,block,tag,variant,listindex)
- local action = listvariants[variant] or listvariants.default
- if action then
- action(dataset,block,tag,variant,tonumber(listindex) or 0)
end
-end
-function listvariants.default(dataset,block,tag,variant)
- ctx_btxsetfirst("?")
- ctx_btxlistsetup(variant)
end
-function listvariants.num(dataset,block,tag,variant,listindex)
- ctx_btxsetfirst(listindex)
- ctx_btxlistsetup(variant)
-end
+-- List variants
+
+do
-listvariants[v_yes] = listvariants.num
-listvariants.bib = listvariants.num
+ local listvariants = { }
+ publications.listvariants = listvariants
-function listvariants.short(dataset,block,tag,variant,listindex)
- local short = getdetail(dataset,tag,"short","short")
- local suffix = getdetail(dataset,tag,"suffix","suffix")
- if short then
- ctx_btxsetfirst(short)
+ function commands.btxlistvariant(dataset,block,tag,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tag,variant,tonumber(listindex) or 0)
+ end
end
- if suffix then
- ctx_btxsetthird(suffix)
+
+ function listvariants.default(dataset,block,tag,variant)
+ ctx_btxsetfirst("?")
+ ctx_btxlistsetup(variant)
+ end
+
+ function listvariants.num(dataset,block,tag,variant,listindex)
+ ctx_btxsetfirst(listindex)
+ ctx_btxlistsetup(variant)
end
- ctx_btxlistsetup(variant)
-end
-function listvariants.page(dataset,block,tag,variant,listindex)
- local rendering = renderings[dataset]
- local specification = rendering.list[listindex]
- for i=3,#specification do
- local backlink = tonumber(specification[i])
- if backlink then
- local citation = citetolist[backlink]
- if citation then
- local references = citation.references
- if references then
- local internal = references.internal
- local realpage = references.realpage
- if internal and realpage then
- ctx_btxsetconcat(i-2)
- ctx_btxsetfirst(realpage)
- ctx_btxsetsecond(backlink)
- ctx_btxlistsetup(variant)
+ listvariants[v_yes] = listvariants.num
+ listvariants.bib = listvariants.num
+
+ function listvariants.short(dataset,block,tag,variant,listindex)
+ local short = getdetail(dataset,tag,"short","short")
+ local suffix = getdetail(dataset,tag,"suffix","suffix")
+ if short then
+ ctx_btxsetfirst(short)
+ end
+ if suffix then
+ ctx_btxsetthird(suffix)
+ end
+ ctx_btxlistsetup(variant)
+ end
+
+ function listvariants.page(dataset,block,tag,variant,listindex)
+ local rendering = renderings[dataset]
+ local specification = rendering.list[listindex]
+ for i=3,#specification do
+ local backlink = tonumber(specification[i])
+ if backlink then
+ local citation = citetolist[backlink]
+ if citation then
+ local references = citation.references
+ if references then
+ local internal = references.internal
+ local realpage = references.realpage
+ if internal and realpage then
+ ctx_btxsetconcat(i-2)
+ ctx_btxsetfirst(realpage)
+ ctx_btxsetsecond(backlink)
+ ctx_btxlistsetup(variant)
+ end
end
end
end
end
end
+
end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
index f2a45584b..2c81f294e 100644
--- a/tex/context/base/publ-ini.mkiv
+++ b/tex/context/base/publ-ini.mkiv
@@ -65,6 +65,7 @@
\def\s!btxrtx {btxrtx}
\def\s!btxatx {btxatx}
\def\s!btxbtx {btxbtx}
+\def\s!btxspc {btxspc}
\definelabelclass[btxlabel][2]
@@ -309,6 +310,58 @@
\installcommandhandler \??btxregister {btxregister} \??btxregister
\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+%D The following two helpers permits us to use prefixes (if we want):
+%D
+%D \startbuffer
+%D \let\btxciteparameter\btxspecificationciteparameter
+%D \let\btxlistparameter\btxspecificationlistparameter
+%D
+%D \edef\currentbtxspecification {apa}
+%D \edef\currentbtxcitealternative{author}
+%D
+%D \setupbtxcitevariant [crap=crap]
+%D \definebtxcitevariant [author] [check=author,extra=author]
+%D \definebtxcitevariant [authoryear] [author] [check=authoryear]
+%D \definebtxcitevariant [authoryears] [authoryear] [check=authoryears]
+%D \setupbtxcitevariant [author] [apa:check=apa-author]
+%D \setupbtxcitevariant [authoryear] [apa:check=apa-authoryear]
+%D
+%D \starttabulate[|lT|lT|]
+%D \NC \bf check \EQ \btxciteparameter{check} \NC\NR
+%D \NC \bf extra \EQ \btxciteparameter{extra} \NC\NR
+%D \NC \bf crap \EQ \btxciteparameter{crap} \NC\NR
+%D \stoptabulate
+%D \stopbuffer
+%D
+%D \typebuffer \start \getbuffer \stop
+
+\def\btxspecificationciteparameter#1%
+ {\csname
+ \??btxcitevariant
+ \ifcsname\??btxcitevariant\currentbtxcitealternative:\currentbtxspecification:#1\endcsname
+ \currentbtxcitealternative:\currentbtxspecification:#1%
+ \else\ifcsname\??btxcitevariant\currentbtxcitealternative:#1\endcsname
+ \currentbtxcitealternative:#1%
+ \else
+ :#1% we assume defined variants
+ \fi\fi
+ \endcsname}
+
+\def\btxspecificationlistparameter#1%
+ {\csname
+ \??btxlistvariant
+ \ifcsname\??btxlistvariant\currentbtxlistalternative:\currentbtxspecification:#1\endcsname
+ \currentbtxlistalternative:\currentbtxspecification:#1%
+ \else\ifcsname\??btxlistvariant\currentbtxlistalternative:#1\endcsname
+ \currentbtxlistalternative:#1%
+ \else
+ :#1% we assume defined variants
+ \fi\fi
+ \endcsname}
+
+% \let\btxciteparameter\btxspecificationciteparameter
+% \let\btxlistparameter\btxspecificationlistparameter
+
\appendtoks
\ifx\currentbtxlistvariant\empty \else
\ctxcommand{registerbtxlistvariant("\currentbtxlistvariant","\currentbtxlistvariantparent")}%
@@ -322,14 +375,25 @@
\to \everydefinebtxcitevariant
\unexpanded\def\usebtxdataset
- {\dodoubleargument\publ_use_dataset}
+ {\begingroup
+ \dotripleempty\publ_use_dataset}
-\def\publ_use_dataset[#1][#2]%
- {\ifsecondargument
- \ctxcommand{usebtxdataset("#1","#2")}%
- \else
- \ctxcommand{usebtxdataset("\v!standard","#1")}%
- \fi}
+\def\publ_use_dataset[#1][#2][#3]%
+ {\getdummyparameters[\c!specification=\currentbtxspecification,#3]%
+ \ifsecondargument
+ \ctxcommand{usebtxdataset{
+ specification = "\dummyparameter\c!specification",
+ dataset = "#1",
+ filename = "#2",
+ }}%
+ \else\iffirstargument
+ \ctxcommand{usebtxdataset{
+ specification = "\dummyparameter\c!specification",
+ dataset = "\v!standard",
+ filename = "#1",
+ }}%
+ \fi\fi
+ \endgroup}
\definebtxdataset
[\v!standard]
@@ -862,6 +926,7 @@
\strc_references_direct_full_user
{\s!btxset="\currentbtxdataset",%
\s!btxref="\currentbtxtag",%
+ \s!btxspc="\currentbtxspecification",%
\ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
\ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
\ifx\currentbtxbacklink\currentbtxbacktrace\s!btxint="\currentbtxbacklink"\else\s!btxbck="\currentbtxbacktrace"\fi}%
@@ -1351,13 +1416,14 @@
\unexpanded\def\publ_registers_set
{\ifx\currentbtxregister\empty \else
\ctxcommand{setbtxregister {
- name = "\currentbtxregister",
- state = "\btxregisterparameter\c!state",
- dataset = "\btxregisterparameter\c!dataset",
- field = "\btxregisterparameter\c!field",
- register = "\btxregisterparameter\c!register",
- method = "\btxregisterparameter\c!method",
- alternative = "\btxregisterparameter\c!alternative",
+ specification = "\currentbtxspecification",
+ name = "\currentbtxregister",
+ state = "\btxregisterparameter\c!state",
+ dataset = "\btxregisterparameter\c!dataset",
+ field = "\btxregisterparameter\c!field",
+ register = "\btxregisterparameter\c!register",
+ method = "\btxregisterparameter\c!method",
+ alternative = "\btxregisterparameter\c!alternative",
}}%
\fi}
diff --git a/tex/context/base/publ-jrn.lua b/tex/context/base/publ-jrn.lua
index de98608c7..91df21904 100644
--- a/tex/context/base/publ-jrn.lua
+++ b/tex/context/base/publ-jrn.lua
@@ -16,30 +16,31 @@ if not modules then modules = { } end modules ['publ-jrn'] = {
-- Abhandlungen der Naturforschenden Gesellschaft in Zürich = Abh. Nat.forsch. Ges. Zür.
-- Abhandlungen des Naturwissenschaftlichen Vereins zu Bremen = Abh. Nat.wiss. Ver. Bremen
-if not characters then require("char-utf") end
+local context = context
+local commands = commands
local find = string.find
local P, C, S, Cs, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.S, lpeg.Cs, lpeg.match, lpeg.patterns
-local lower = characters.lower
-
local report_journals = logs.reporter("publications","journals")
-publications = publications or { }
+local publications = publications
local journals = { }
publications.journals = journals
+local lowercase = characters.lower
+
local expansions = { }
local abbreviations = { }
local nofexpansions = 0
local nofabbreviations = 0
-local valid = 1 - S([[ ."':;,-]])
-local pattern = Cs((valid^1 + P(1)/"")^1)
+local valid = 1 - S([[ ."':;,-]])
+local pattern = Cs((valid^1 + P(1)/"")^1)
local function simplify(name)
-- we have utf but it doesn't matter much if we lower the bytes
- return name and lower(lpegmatch(pattern,name)) or name
+ return name and lowercase(lpegmatch(pattern,name)) or name
end
local function add(expansion,abbreviation)
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
index ff7e0e29c..55c62c31e 100644
--- a/tex/context/base/publ-oth.lua
+++ b/tex/context/base/publ-oth.lua
@@ -11,8 +11,10 @@ local lpegmatch = lpeg.match
local p_endofline = lpeg.patterns.newline
-local loaders = publications.loaders
-local getindex = publications.getindex
+local publications = publications
+
+local loaders = publications.loaders
+local getindex = publications.getindex
local function addfield(t,k,v,fields)
k = fields[k]
diff --git a/tex/context/base/publ-reg.lua b/tex/context/base/publ-reg.lua
index 7ff284d49..ef4ed06a3 100644
--- a/tex/context/base/publ-reg.lua
+++ b/tex/context/base/publ-reg.lua
@@ -21,10 +21,14 @@ local v_standard = variables.standard
local v_stop = variables.stop
local v_all = variables.all
+local publications = publications
local datasets = publications.datasets
-local specifications = { }
+local specifications = publications.specifications
+local detailed = publications.detailed
+
+local registrations = { }
local sequence = { }
-local flushers = table.setmetatableindex(function(t,k) t[k] = default return default end)
+local flushers = table.setmetatableindex(function(t,k) local v = t.default t[k] = v return v end)
function commands.setbtxregister(specification)
local name = specification.name
@@ -38,10 +42,10 @@ function commands.setbtxregister(specification)
dataset = v_all
end
-- could be metatable magic
- local s = specifications[register]
+ local s = registrations[register]
if not s then
s = { }
- specifications[register] = s
+ registrations[register] = s
end
local d = s[dataset]
if not d then
@@ -51,17 +55,24 @@ function commands.setbtxregister(specification)
--
-- check all
--
+ local processors = name ~= register and name or ""
+ if processor == "" then
+ processor = nil
+ elseif processor then
+ processor = "btx:r:" .. processor
+ end
+ --
d.active = specification.state ~= v_stop
d.once = specification.method == v_once or false
d.field = field
- d.processor = name ~= register and name or ""
+ d.processor = processor
d.alternative = d.alternative or specification.alternative
d.register = register
d.dataset = dataset
d.done = d.done or { }
--
sequence = { }
- for register, s in sortedhash(specifications) do
+ for register, s in sortedhash(registrations) do
for dataset, d in sortedhash(s) do
if d.active then
sequence[#sequence+1] = d
@@ -70,21 +81,50 @@ function commands.setbtxregister(specification)
end
end
+----- getter = publications.directget
+
+local function getter(current,tag,step) -- todo: detail
+ local data = current.luadata[tag]
+ if data then
+ local catspec = specifications[step.specification].categories[data.category]
+ if catspec then
+ local fields = catspec.fields
+ if fields then
+ local field = step.field
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return field, value, catspec.types[field] or "string"
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field]
+ if value then
+ return field, value, catspec.types[field] or "string"
+ end
+ end
+ end
+ end
+end
+
function commands.btxtoregister(dataset,tag)
+ local current = datasets[dataset]
for i=1,#sequence do
local step = sequence[i]
local dset = step.dataset
if dset == v_all or dset == dataset then
local done = step.done
if not done[tag] then
- local current = datasets[dataset]
- local entry = current.luadata[tag]
- if entry then
- local processor = step.processor
- if processor and processor ~= "" then
- step.processor = "btx:r:" .. processor
- end
- flushers[step.field or "default"](step,dataset,tag,current,entry,current.details[tag])
+ local field, value, kind = getter(current,tag,step)
+ if value then
+ local cast = detailed[kind][value] or value
+ flushers[kind](step,field,value,cast)
end
done[tag] = true
end
@@ -102,53 +142,44 @@ end
local ctx_dosetfastregisterentry = context.dosetfastregisterentry -- register entry key
-local p_keywords = lpeg.tsplitat(lpeg.patterns.whitespace^0 * lpeg.P(";") * lpeg.patterns.whitespace^0)
-local serialize = publications.serializeauthor
-local components = publications.authorcomponents
-local f_author = formatters[ [[\btxindexedauthor{%s}{%s}{%s}{%s}{%s}{%s}]] ]
+local p_keywords = lpeg.tsplitat(lpeg.patterns.whitespace^0 * lpeg.P(";") * lpeg.patterns.whitespace^0)
+local serialize = publications.serializeauthor
+local components = publications.authorcomponents
+local f_author = formatters[ [[\btxindexedauthor{%s}{%s}{%s}{%s}{%s}{%s}]] ]
-function flushers.default(specification,dataset,tag,current,entry,detail)
- local field = specification.field
- local k = detail[field] or entry[field]
- if k then
- ctx_dosetfastregisterentry(specification.register,k,"",specification.processor,"")
+function flushers.string(step,field,value,cast)
+ if value and value ~= "" then
+ ctx_dosetfastregisterentry(step.register,type(cast) == "string" and cast or value,"",step.processor or "","")
end
end
+flushers.default = flushers.string
+
local shorts = {
- normalshort = true,
- invertedshort = true,
+ normalshort = "normalshort",
+ invertedshort = "invertedshort",
}
-function flushers.author(specification,dataset,tag,current,entry,detail)
- if detail then
- local field = specification.field
- local author = detail[field]
- if author then
- local alternative = specification.alternative or "invertedshort"
- local short = shorts[alternative]
- local register = specification.register
- local processor = specification.processor
- for i=1,#author do
- local a = author[i]
- local k = serialize(a)
- local e = f_author(alternative,components(a,short))
- ctx_dosetfastregisterentry(register,e,k,processor,"")
- end
+function flushers.author(step,field,value,cast)
+ if cast and #cast > 0 then
+ local register = step.register
+ local processor = step.processor
+ local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
+ for i=1,#cast do
+ local a = cast[i]
+ local k = serialize(a)
+ local e = f_author(alternative,components(a,short))
+ ctx_dosetfastregisterentry(register,e,k,processor or "","")
end
end
end
-function flushers.keywords(specification,dataset,tag,current,entry,detail)
- if entry then
- local value = entry[specification.field]
- if value then
- local keywords = lpegmatch(p_keywords,value)
- local register = specification.register
- local processor = specification.processor
- for i=1,#keywords do
- ctx_dosetfastregisterentry(register,keywords[i],"",processor,"")
- end
+function flushers.keywords(step,field,value,cast)
+ if cast and #cast > 0 then
+ local register = step.register
+ local processor = step.processor
+ for i=1,#cast do
+ ctx_dosetfastregisterentry(register,cast[i],"",processor or "","")
end
end
end
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
index 09dab888a..d3bc05ea1 100644
--- a/tex/context/base/publ-tra.lua
+++ b/tex/context/base/publ-tra.lua
@@ -12,18 +12,21 @@ local sortedhash, sortedkeys = table.sortedhash, table.sortedkeys
local settings_to_array = utilities.parsers.settings_to_array
local formatters = string.formatters
-local tracers = publications.tracers or { }
+local context = context
+local commands = commands
+
+local publications = publications
+local tracers = publications.tracers
+local tables = publications.tables
local datasets = publications.datasets
local specifications = publications.specifications
-local context = context
-
local ctx_NC, ctx_NR, ctx_HL, ctx_FL, ctx_ML, ctx_LL = context.NC, context.NR, context.HL, context.FL, context.ML, context.LL
local ctx_bold, ctx_monobold, ctx_rotate, ctx_llap, ctx_rlap = context.bold, context.formatted.monobold, context.rotate, context.llap, context.rlap
local ctx_starttabulate, ctx_stoptabulate = context.starttabulate, context.stoptabulate
-local privates = publications.tables.privates
-local specials = publications.tables.specials
+local privates = tables.privates
+local specials = tables.specials
local report = logs.reporter("publications","tracers")
@@ -35,7 +38,7 @@ function tracers.showdatasetfields(settings)
local fielddata = specification and specifications[specification] or specifications.apa
local categories = fielddata.categories
if next(luadata) then
- ctx_starttabulate { "|lT|lT|pT|" }
+ ctx_starttabulate { "|lT|lT|pTl|" }
ctx_NC() ctx_bold("tag")
ctx_NC() ctx_bold("category")
ctx_NC() ctx_bold("fields")
@@ -58,9 +61,9 @@ function tracers.showdatasetfields(settings)
if kind == "required" then
context("{\\darkgreen %s} ",key)
elseif kind == "optional" then
- context("{\\darkyellow %s} ",key)
- else
context("%s ",key)
+ else
+ context("{\\darkyellow %s} ",key)
end
end
end
@@ -81,7 +84,7 @@ function tracers.showdatasetcompleteness(settings)
local lpegmatch = lpeg.match
local texescape = lpeg.patterns.texescape
- local preamble = { "|lTBw(5em)|lBTp(10em)|p|" }
+ local preamble = { "|lTBw(5em)|lBTp(10em)|pl|" }
local function identified(tag,category,crossref)
ctx_NC()
@@ -284,7 +287,7 @@ function tracers.showfields(settings)
end
function tracers.showtables(settings)
- for name, list in sortedhash(publications.tables) do
+ for name, list in sortedhash(tables) do
ctx_starttabulate { "|Tl|Tl|" }
ctx_FL()
ctx_NC()
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
index e2100a289..4650401ee 100644
--- a/tex/context/base/publ-usr.lua
+++ b/tex/context/base/publ-usr.lua
@@ -7,9 +7,11 @@ if not modules then modules = { } end modules ['publ-usr'] = {
}
local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
-
+local lpegmatch = lpeg.match
local settings_to_hash = utilities.parsers.settings_to_hash
--- local chardata = characters.data
+
+local publications = publications
+local datasets = publications.datasets
-- local str = [[
-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
@@ -70,8 +72,10 @@ local leftbrace = P("{")
local rightbrace = P("}")
local leftbracket = P("[")
local rightbracket = P("]")
+local backslash = P("\\")
+local letter = R("az","AZ")
-local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
+local key = backslash * Cs(letter^1) * lpeg.patterns.space^0
local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
@@ -79,14 +83,17 @@ local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
local pattern = ((Carg(1) * key * value) / register + P(1))^0
function publications.addtexentry(dataset,settings,content)
- settings = settings_to_hash(settings)
+ local current = datasets[dataset]
+ local settings = settings_to_hash(settings)
local data = {
tag = settings.tag or settings.k or "no tag",
category = settings.category or settings.t or "article",
}
- lpeg.match(pattern,content,1,data) -- can set tag too
- dataset.userdata[data.tag] = data
- dataset.luadata[data.tag] = data
- publications.markasupdated(dataset)
+ lpegmatch(pattern,content,1,data) -- can set tag too
+ current.userdata[data.tag] = data
+ current.luadata[data.tag] = data
+ publications.markasupdated(current)
return data
end
+
+commands.addbtxentry = publications.addtexentry
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 0cd6b92cb..4f2bb757c 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 5182fb633..c08793164 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index 3c1866f6f..65df5dd40 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -2572,6 +2572,12 @@ return {
},
{
category = "mkiv",
+ filename = "publ-jrn",
+ loading = "always",
+ status = "pending",
+ },
+ {
+ category = "mkiv",
filename = "publ-xml",
loading = "always",
status = "pending",
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index dc2bb35bf..317ab6d92 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 11/11/14 12:12:33
+-- merge date : 11/12/14 11:56:31
do -- begin closure to overcome local limits and interference