summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2014-01-14 16:20:28 +0200
committerMarius <mariausol@gmail.com>2014-01-14 16:20:28 +0200
commit314b29513ffe652c5169d54aff4de001475eb1ea (patch)
treec9aec2f65c753beff17d4eb92d20ab352d13c1c0
parent4a0e1a196924d87dd4b148e20236bff7ee9e4ca2 (diff)
downloadcontext-314b29513ffe652c5169d54aff4de001475eb1ea.tar.gz
beta 2014.01.14 15:03
-rw-r--r--context/data/scite/lexers/data/scite-context-data-metafun.lua2
-rw-r--r--context/data/scite/lexers/scite-context-lexer-mps.lua2
-rw-r--r--context/data/scite/scite-context-data-metafun.properties86
-rw-r--r--doc/context/manuals/allkind/mkiv-publications.bib34
-rw-r--r--doc/context/manuals/allkind/mkiv-publications.tex1135
-rw-r--r--doc/context/manuals/allkind/publications-en.xml301
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.html53
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.man30
-rw-r--r--doc/context/scripts/mkiv/mtx-bibtex.xml26
-rw-r--r--metapost/context/base/mp-tool.mpiv21
-rw-r--r--scripts/context/lua/mtx-bibtex.lua106
-rw-r--r--tex/context/base/bibl-tra.lua2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4086 -> 4085 bytes
-rw-r--r--tex/context/base/context.mkiv13
-rw-r--r--tex/context/base/font-map.lua30
-rw-r--r--tex/context/base/m-oldbibtex.mkiv16
-rw-r--r--tex/context/base/mult-fun.lua2
-rw-r--r--tex/context/base/publ-aut.lua465
-rw-r--r--tex/context/base/publ-dat.lua510
-rw-r--r--tex/context/base/publ-imp-apa.mkiv547
-rw-r--r--tex/context/base/publ-imp-cite.mkiv74
-rw-r--r--tex/context/base/publ-imp-commands.mkiv15
-rw-r--r--tex/context/base/publ-imp-definitions.mkiv68
-rw-r--r--tex/context/base/publ-ini.lua1222
-rw-r--r--tex/context/base/publ-ini.mkiv849
-rw-r--r--tex/context/base/publ-oth.lua146
-rw-r--r--tex/context/base/publ-tra.lua130
-rw-r--r--tex/context/base/publ-tra.mkiv26
-rw-r--r--tex/context/base/publ-usr.lua91
-rw-r--r--tex/context/base/publ-usr.mkiv2
-rw-r--r--tex/context/base/publ-xml.mkiv114
-rw-r--r--tex/context/base/s-abr-01.tex1
-rw-r--r--tex/context/base/status-files.pdfbin24538 -> 24760 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin228348 -> 229094 bytes
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
36 files changed, 6041 insertions, 82 deletions
diff --git a/context/data/scite/lexers/data/scite-context-data-metafun.lua b/context/data/scite/lexers/data/scite-context-data-metafun.lua
index 1ca02de97..50b9ecec4 100644
--- a/context/data/scite/lexers/data/scite-context-data-metafun.lua
+++ b/context/data/scite/lexers/data/scite-context-data-metafun.lua
@@ -1,4 +1,4 @@
return {
- ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
+ ["commands"]={ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian", "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos", "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "paired", "tripled", "unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "llcircle", "lrcircle", "urcircle", "ulcircle", "tcircle", "bcircle", "lcircle", "rcircle", "lltriangle", "lrtriangle", "urtriangle", "ultriangle", "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened", "punked", "curved", "unspiked", "simplified", "blownup", "stretched", "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged", "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed", "llenlarged", "lrenlarged", "urenlarged", "ulenlarged", "llmoved", "lrmoved", "urmoved", "ulmoved", "rightarrow", "leftarrow", "centerarrow", "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox", "bottomboundary", "leftboundary", "topboundary", "rightboundary", "xsized", "ysized", "xysized", "sized", "xyscaled", "intersection_point", "intersection_found", "penpoint", "bbwidth", "bbheight", "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto", "withcircularshade", "withlinearshade", "cmyk", "spotcolor", "multitonecolor", "namedcolor", "drawfill", "undrawfill", "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign", "transparent", "withtransparency", "property", "properties", "withproperties", "asgroup", "infont", "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade", "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade", "space", "CRLF", "grayscale", "greyscale", "withgray", "withgrey", "colorpart", "readfile", "clearxy", "unitvector", "center", "epsed", "anchored", "originpath", "infinite", "break", "xstretched", "ystretched", "snapped", "pathconnectors", "function", "constructedpath", "constructedpairs", "punkedfunction", "curvedfunction", "tightfunction", "punkedpath", "curvedpath", "tightpath", "punkedpairs", "curvedpairs", "tightpairs", "evenly", "oddly", "condition", "pushcurrentpicture", "popcurrentpicture", "arrowpath", "tensecircle", "roundedsquare", "colortype", "whitecolor", "blackcolor", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels", "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions", "undashed", "decorated", "redecorated", "undecorated", "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable" },
["internals"]={ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel", "shadefactor", "textextoffset", "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent", "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent", "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent", "metapostversion", "maxdimensions" },
} \ No newline at end of file
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
index 96c5e9c3c..f0d88eb3b 100644
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -98,7 +98,7 @@ local number = token('number', number)
local grouping = token('grouping', S("()[]{}")) -- can be an option
local special = token('special', S("#()[]{}<>=:\"")) -- or else := <> etc split
local texlike = token('warning', P("\\") * cstokentex^1)
-local extra = token('extra', S("`~%^&_-+*/\'|\\"))
+local extra = token('extra', P("+-+") + P("++") + S("`~%^&_-+*/\'|\\"))
local nested = P { leftbrace * (V(1) + (1-rightbrace))^0 * rightbrace }
local texlike = token('embedded', P("\\") * (P("MP") + P("mp")) * mptoken^1)
diff --git a/context/data/scite/scite-context-data-metafun.properties b/context/data/scite/scite-context-data-metafun.properties
index c0b080982..9381b4f8d 100644
--- a/context/data/scite/scite-context-data-metafun.properties
+++ b/context/data/scite/scite-context-data-metafun.properties
@@ -3,49 +3,49 @@ sqr log ln exp \
inv pow pi radian tand \
cotd sin cos tan cot \
atan asin acos invsin invcos \
-acosh asinh sinh cosh paired \
-tripled unitcircle fulldiamond unitdiamond fullsquare \
-llcircle lrcircle urcircle ulcircle tcircle \
-bcircle lcircle rcircle lltriangle lrtriangle \
-urtriangle ultriangle smoothed cornered superellipsed \
-randomized squeezed enlonged shortened punked \
-curved unspiked simplified blownup stretched \
-enlarged leftenlarged topenlarged rightenlarged bottomenlarged \
-crossed laddered randomshifted interpolated paralleled \
-cutends peepholed llenlarged lrenlarged urenlarged \
-ulenlarged llmoved lrmoved urmoved ulmoved \
-rightarrow leftarrow centerarrow boundingbox innerboundingbox \
-outerboundingbox pushboundingbox popboundingbox bottomboundary leftboundary \
-topboundary rightboundary xsized ysized xysized \
-sized xyscaled intersection_point intersection_found penpoint \
-bbwidth bbheight withshade withlinearshading withcircularshading \
-withfromshadecolor withtoshadecolor withshading shadedinto withcircularshade \
-withlinearshade cmyk spotcolor multitonecolor namedcolor \
-drawfill undrawfill inverted uncolored softened \
-grayed greyed onlayer along graphictext \
-loadfigure externalfigure withmask figure register \
-bitmapimage colordecimals ddecimal dddecimal ddddecimal \
-textext thetextext rawtextext textextoffset verbatim \
-thelabel label autoalign transparent withtransparency \
-property properties withproperties asgroup infont \
-set_linear_vector linear_shade define_linear_shade define_circular_linear_shade define_sampled_linear_shade \
-set_circular_vector circular_shade define_circular_shade define_circular_linear_shade define_sampled_circular_shade \
-space CRLF grayscale greyscale withgray \
-withgrey colorpart readfile clearxy unitvector \
-center epsed anchored originpath infinite \
-break xstretched ystretched snapped pathconnectors \
-function constructedpath constructedpairs punkedfunction curvedfunction \
-tightfunction punkedpath curvedpath tightpath punkedpairs \
-curvedpairs tightpairs evenly oddly condition \
-pushcurrentpicture popcurrentpicture arrowpath tensecircle roundedsquare \
-colortype whitecolor blackcolor normalfill normaldraw \
-visualizepaths naturalizepaths drawboundary drawwholepath visualizeddraw \
-visualizedfill draworigin drawboundingbox drawpath drawpoint \
-drawpoints drawcontrolpoints drawcontrollines drawpointlabels drawlineoptions \
-drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions drawboundoptions \
-drawpathoptions resetdrawoptions undashed decorated redecorated \
-undecorated passvariable passarrayvariable tostring format \
-formatted startpassingvariable stoppassingvariable
+invtan acosh asinh sinh cosh \
+paired tripled unitcircle fulldiamond unitdiamond \
+fullsquare llcircle lrcircle urcircle ulcircle \
+tcircle bcircle lcircle rcircle lltriangle \
+lrtriangle urtriangle ultriangle smoothed cornered \
+superellipsed randomized squeezed enlonged shortened \
+punked curved unspiked simplified blownup \
+stretched enlarged leftenlarged topenlarged rightenlarged \
+bottomenlarged crossed laddered randomshifted interpolated \
+paralleled cutends peepholed llenlarged lrenlarged \
+urenlarged ulenlarged llmoved lrmoved urmoved \
+ulmoved rightarrow leftarrow centerarrow boundingbox \
+innerboundingbox outerboundingbox pushboundingbox popboundingbox bottomboundary \
+leftboundary topboundary rightboundary xsized ysized \
+xysized sized xyscaled intersection_point intersection_found \
+penpoint bbwidth bbheight withshade withlinearshading \
+withcircularshading withfromshadecolor withtoshadecolor withshading shadedinto \
+withcircularshade withlinearshade cmyk spotcolor multitonecolor \
+namedcolor drawfill undrawfill inverted uncolored \
+softened grayed greyed onlayer along \
+graphictext loadfigure externalfigure withmask figure \
+register bitmapimage colordecimals ddecimal dddecimal \
+ddddecimal textext thetextext rawtextext textextoffset \
+verbatim thelabel label autoalign transparent \
+withtransparency property properties withproperties asgroup \
+infont set_linear_vector linear_shade define_linear_shade define_circular_linear_shade \
+define_sampled_linear_shade set_circular_vector circular_shade define_circular_shade define_circular_linear_shade \
+define_sampled_circular_shade space CRLF grayscale greyscale \
+withgray withgrey colorpart readfile clearxy \
+unitvector center epsed anchored originpath \
+infinite break xstretched ystretched snapped \
+pathconnectors function constructedpath constructedpairs punkedfunction \
+curvedfunction tightfunction punkedpath curvedpath tightpath \
+punkedpairs curvedpairs tightpairs evenly oddly \
+condition pushcurrentpicture popcurrentpicture arrowpath tensecircle \
+roundedsquare colortype whitecolor blackcolor normalfill \
+normaldraw visualizepaths naturalizepaths drawboundary drawwholepath \
+visualizeddraw visualizedfill draworigin drawboundingbox drawpath \
+drawpoint drawpoints drawcontrolpoints drawcontrollines drawpointlabels \
+drawlineoptions drawpointoptions drawcontroloptions drawlabeloptions draworiginoptions \
+drawboundoptions drawpathoptions resetdrawoptions undashed decorated \
+redecorated undecorated passvariable passarrayvariable tostring \
+format formatted startpassingvariable stoppassingvariable
keywordclass.metafun.internals=\
nocolormodel greycolormodel graycolormodel rgbcolormodel \
diff --git a/doc/context/manuals/allkind/mkiv-publications.bib b/doc/context/manuals/allkind/mkiv-publications.bib
new file mode 100644
index 000000000..e94f43202
--- /dev/null
+++ b/doc/context/manuals/allkind/mkiv-publications.bib
@@ -0,0 +1,34 @@
+@book{demo-001,
+ author = "Hans Hagen",
+ title = "\BIBTEX, the \CONTEXT\ way",
+ year = "2013",
+}
+
+@book{demo-002,
+ crossref = "demo-001"
+ year = "2014",
+}
+
+@book{demo-003,
+ author = "Hans Hagen and Ton Otten",
+ title = "Typesetting education documents",
+ year = "1996",
+ comment = "a non-existing document",
+}
+
+@book{demo-004,
+ author = "Luigi Scarso",
+ title = "Designing high speed trains",
+ year = "2021",
+ comment = "still to be published",
+}
+
+@book{demo-005,
+ author = "author",
+ title = "title",
+ year = "year",
+ serial = "serial",
+ doi = "doi",
+ url = "url",
+ pages = "pages"
+}
diff --git a/doc/context/manuals/allkind/mkiv-publications.tex b/doc/context/manuals/allkind/mkiv-publications.tex
new file mode 100644
index 000000000..a92b2b287
--- /dev/null
+++ b/doc/context/manuals/allkind/mkiv-publications.tex
@@ -0,0 +1,1135 @@
+% language=uk engine=luajittex
+
+% criterium: all + sorttype=cite => citex before rest
+% criterium: all + sorttype=database => database order
+% criterium: used
+%
+% numbering: label, short, indexinlist, indexused
+%
+% maybeyear
+%
+% \cite[data][whatever]
+
+% \showframe
+
+\usemodule[abr-02]
+\usemodule[set-11]
+
+\loadsetups[publications-en.xml] \enablemode[interface:setup:defaults]
+
+% \input publ-tmp.mkiv
+
+\setupbodyfont
+ [dejavu,10pt]
+
+\setuphead
+ [chapter]
+ [header=high,
+ style=\bfc,
+ color=darkmagenta]
+
+\setuplayout
+ [topspace=2cm,
+ bottomspace=1cm,
+ header=0cm,
+ width=middle,
+ height=middle]
+
+\setupwhitespace
+ [big]
+
+\setuptyping
+ [color=darkmagenta]
+
+\setuptyping
+ [keeptogether=yes]
+
+\setuptype
+ [color=darkcyan]
+
+\setupfootertexts
+ [pagenumber]
+
+\setupMPgraphics
+ [mpy=\jobname.mpy]
+
+\setupinteraction
+ [state=start,
+ color=darkcyan,
+ contrastcolor=darkyellow]
+
+\starttext
+
+\startMPpage
+
+ StartPage ;
+
+ % input "mkiv-publications.mpy" ;
+
+ picture pic ; pic := image (
+ path pth ; pth := ((0,0) for i=1 step 2 until 20 : -- (i,1) -- (i+1,0) endfor) ;
+ for i=0 upto 9 : draw pth shifted (0,2*i) ; endfor ;
+ ) ;
+
+ % picture btx ; btx := textext("\ssbf BIBTEX") ;
+ % picture ctx ; ctx := textext("\ssbf THE CONTEXT WAY") ;
+ picture btx ; btx := image(graphictext("\ssbf BIBTEX") withfillcolor white) ;
+ picture ctx ; ctx := image(graphictext("\ssbf THE CONTEXT WAY") withfillcolor white) ;
+
+ pic := pic shifted - llcorner pic ;
+ btx := btx shifted - llcorner btx ;
+ ctx := ctx shifted - llcorner ctx ;
+
+ pic := pic xysized (PaperWidth,PaperHeight) ;
+ btx := btx xsized (2PaperWidth/3) shifted (.25PaperWidth,.15PaperHeight) ;
+ ctx := ctx xsized (2PaperWidth/3) shifted (.25PaperWidth,.075PaperHeight) ;
+
+ fill Page withcolor \MPcolor{darkcyan} ;
+
+ draw pic withcolor \MPcolor{darkmagenta} ;
+ draw btx withcolor \MPcolor{lightgray} ;
+ draw ctx withcolor \MPcolor{lightgray} ;
+
+ % draw boundingbox btx ;
+ % draw boundingbox ctx ;
+
+ StopPage ;
+
+\stopMPpage
+
+
+\startfrontmatter
+
+\starttitle[title=Contents]
+ \placelist[chapter,section][color=black]
+\stoptitle
+
+\startchapter[title=Introduction]
+
+This manual describes how \MKIV\ handles bibliographies. Support in \CONTEXT\
+started in \MKII for \BIBTEX, using a module written by Taco Hoekwater. Later his
+code was adapted to \MKIV, but because users demanded more, I decided that
+reimplementing made more sense than patching. In particular, through the use of
+\LUA, the \BIBTEX\ data files can be easily directly parsed, thus liberating
+\CONTEXT\ from the dependency on an external \BIBTEX\ executable. The CritEd
+project (by Thomas Schmitz, Alan Braslau, Luigi Scarso and myself) was a good
+reason to undertake this rewrite. As part that project users were invited to come
+up with ideas about extensions. Not all of them are (yet) honored, but the
+rewrite makes more functionality possible.
+
+This manual is dedicated to Taco Hoekwater who in a previous century implemented
+the first \BIBTEX\ module and saw it morf into a \TEX||\LUA\ hybrid in this
+century. The fact that there was support for bibliographies made it possible for
+users to use \CONTEXT\ in an academic environment, dominated by bibliographic
+databases encoded in the \BIBTEX\ format.
+
+\startlines
+Hans Hagen
+PRAGMA ADE
+Hasselt NL
+\stoplines
+
+\stopchapter
+
+\stopfrontmatter
+
+\startbodymatter
+
+\startchapter[title=The database]
+
+The \BIBTEX\ format is rather popular in the \TEX\ community and even with its
+shortcomings it will stay around for a while. Many publication websites can
+export and many tools are available to work with this database format. It is
+rather simple and looks a bit like \LUA\ tables. Unfortunately the content can be
+polluted with non|-|standardized \TEX\ commands which complicates pre- or
+postprocessing outside \TEX. In that sense a \BIBTEX\ database is often not coded
+neutrally. Some limitations, like the use of commands to encode accented
+characters root in the \ASCII\ world and can be bypassed by using \UTF\ instead
+(as handled somewhat in \LATEX\ through extensions such as \type {bibtex8}).
+
+The normal way to deal with a bibliography is to refer to entries using a unique
+tag or key. When a list of entries is typeset, this reference can be used for
+linking purposes. The typeset list can be processed and sorted using the \type
+{bibtex} program that converts the database into something more \TEX\ friendly (a
+\type {.bbl} file). I never used the program myself (nor bibliographies) so I
+will not go into too much detail here, if only because all I say can be wrong.
+
+In \CONTEXT\ we no longer use the \type {bibtex} program: we just use
+database files and deal with the necessary manipulations directly in \CONTEXT.
+One or more such databases can be used and combined with additional entries
+defined within the document. We can have several such datasets active at the same
+time.
+
+A \BIBTEX\ file looks like this:
+
+\starttyping
+@Article{sometag,
+ author = "An Author and Another One",
+ title = "A hopefully meaningful title",
+ journal = maps,
+ volume = "25",
+ number = "2",
+ pages = "5--9",
+ month = mar,
+ year = "2013",
+ ISSN = "1234-5678",
+}
+\stoptyping
+
+Normally a value is given between quotes (or curly brackets) but single words are
+also OK (there is no real benefit in not using quotes, so we advise to always use
+them). There can be many more fields and instead of strings one can use
+predefined shortcuts. The title for example quite often contains \TEX\ macros.
+Some fields, like \type {pages} have funny characters such as the endash
+(typically as \type {--}) so we have a mixture of data and typesetting
+directives. If you are covering non||english references, you often need
+characters that are not in the \ASCII\ subset but \CONTEXT\ is quite happy with
+\UTF. If your database file uses old|-|fashioned \TEX\ accent commands then these
+will be internally converted automatically to \UTF. Commands (macros) are
+converted to an indirect call, which is quite robust.
+
+The \BIBTEX\ files are loaded in memory as \LUA\ table but can be converted to
+\XML\ so that we can access them in a more flexible way, but that is a subject
+for specialists.
+
+In the old \MKII\ setup we have two kinds of entries: the ones that come from the
+\BIBTEX\ run and user supplied ones. We no longer rely on \BIBTEX\ output but we
+do still support the user supplied definitions. These were in fact prepared in a
+way that suits the processing of \BIBTEX\ generated entries. The next variant
+reflects the \CONTEXT\ recoding of the old \BIBTEX\ output.
+
+\starttyping
+\startpublication[k=Hagen:Second,t=article,a={Hans Hagen},y=2013,s=HH01]
+ \artauthor[]{Hans}[H.]{}{Hagen}
+ \arttitle{Who knows more?}
+ \journal{MyJournal}
+ \pubyear{2013}
+ \month{8}
+ \volume{1}
+ \issue{3}
+ \issn{1234-5678}
+ \pages{123--126}
+\stoppublication
+\stoptyping
+
+The split \type {\artauthor} fields are collapsed into a single \type {author}
+field as we deal with the splitting later when it gets parsed in \LUA. The \type
+{\artauthor} syntax is only kept around for backward compatibility with the
+previous use of \BIBTEX.
+
+In the new setup we support these variants as well:
+
+\starttyping
+\startpublication[k=Hagen:Third,t=article]
+ \author{Hans Hagen}
+ \title{Who knows who?}
+ ...
+\stoppublication
+\stoptyping
+
+and
+
+\starttyping
+\startpublication[tag=Hagen:Third,category=article]
+ \author{Hans Hagen}
+ \title{Who knows who?}
+ ...
+\stoppublication
+\stoptyping
+
+and
+
+\starttyping
+\startpublication
+ \tag{Hagen:Third}
+ \category{article}
+ \author{Hans Hagen}
+ \title{Who knows who?}
+ ...
+\stoppublication
+\stoptyping
+
+Because internally the entries are \LUA\ tables, we also support loading of \LUA\
+based definitions:
+
+\starttyping
+return {
+ ["Hagen:First"] = {
+ author = "Hans Hagen",
+ category = "article",
+ issn = "1234-5678",
+ issue = "3",
+ journal = "MyJournal",
+ month = "8",
+ pages = "123--126",
+ tag = "Hagen:First",
+ title = "Who knows nothing?",
+ volume = "1",
+ year = "2013",
+ },
+}
+\stoptyping
+
+Files set up like this can be loaded too. The following \XML\ input is rather
+close to this, and is also accepted as input.
+
+\starttyping
+<?xml version="2.0" standalone="yes" ?>
+<bibtex>
+ <entry tag="Hagen:First" category="article">
+ <field name="author">Hans Hagen</field>
+ <field name="category">article</field>
+ <field name="issn">1234-5678</field>
+ <field name="issue">3</field>
+ <field name="journal">MyJournal</field>
+ <field name="month">8</field>
+ <field name="pages">123--126</field>
+ <field name="tag">Hagen:First</field>
+ <field name="title">Who knows nothing?</field>
+ <field name="volume">1</field>
+ <field name="year">2013</field>
+ </entry>
+</bibtex>
+\stoptyping
+
+{\em Todo: Add some remarks about loading EndNote and RIS formats, but first we
+need to complete the tag mapping (on Alan's plate).}
+
+So the user has a rather wide choice of formatting style for bibliography
+database files.
+
+\stopchapter
+
+You can load more data than you actually need. Only entries that are referred to
+explicitly through the \type {\cite} and \type {\nocite} commands will be shown
+in lists. We will cover these details later.
+
+\startchapter[title=Commands in entries]
+
+One unfortunate aspect commonly found in \BIBTEX\ files is that they often
+contain \TEX\ commands. Even worse is that there is no standard on what these
+commands can be and what they mean, at least not formally, as \BIBTEX\ is a
+program intended to be used with many variants of \TEX\ style: plain, \LATEX, and
+others. This means that we need to define our use of these typesetting commands.
+However, in most cases, they are just abbreviations or font switches and these
+are often known. Therefore, \CONTEXT\ will try to resolve them before reporting
+an issue. In the log file there is a list of commands that has been seen in the
+loaded databases. For instance, loading \type {tugboat.bib} gives a long list of
+commands of which we show a small set here:
+
+\starttyping
+publications > start used btx commands
+
+publications > standard CONTEXT 1 known
+publications > standard ConTeXt 4 known
+publications > standard TeXLive 3 KNOWN
+publications > standard eTeX 1 known
+publications > standard hbox 6 known
+publications > standard sltt 1 unknown
+
+publications > stop used btxcommands
+\stoptyping
+
+You can define unknown commands, or overload existing definitions in the
+following way:
+
+\starttyping
+\definebtxcommand\TUB {TUGboat}
+\definebtxcommand\sltt{\tt}
+\definebtxcommand\<#1>{\type{#1}}
+\stoptyping
+
+Unknown commands do not stall processing, but their names are then typeset in a
+mono|-|spaced font so they probably stand out for proofreading. You can
+access the commands with \type {\btxcommand {...}}, as in:
+
+\startbuffer
+commands like \btxcommand{MySpecialCommand} are handled in an indirect way
+\stopbuffer
+
+\typebuffer
+
+As this is an undefined command we get: \quotation {\inlinebuffer}.
+
+??
+
+\stopchapter
+
+\startchapter[title=Datasets]
+
+Normally in a document you will use only one bibliographic database, whether or
+not distributed over multiple files. Nevertheless we support multiple databases as well
+which is why we talk of datasets instead. A dataset is loaded with the \type
+{\usebtxdataset} command. Although currently it is not necessary to define a
+(default) dataset you can best do this because in the future we might provide more
+options. Here are some examples:
+
+\starttyping
+\definebtxdataset[standard]
+
+\usebtxdataset[standard][tugboat.bib]
+\usebtxdataset[standard][mtx-bibtex-output.xml]
+\usebtxdataset[standard][test-001-btx-standard.lua]
+\stoptyping
+
+These three suffixes are understood by the loader. Here the dataset has the name
+\type {standard} and the three database files are merged, where later entries having the
+same tag overload previous ones. Definitions in the document source (coded in \TEX\
+speak) are also added, and they are saved for successive runs. This means that if
+you load and define entries, they will be known at a next run beforehand, so that
+references to them are independent of when loading and definitions take place.
+
+\showsetup{setupbtxdataset}
+
+\showsetup{definebtxdataset}
+
+\showsetup{usebtxdataset}
+
+In this document we use some example databases, so let's load one of them now:
+
+\startbuffer
+\definebtxdataset[example]
+
+\usebtxdataset[example][mkiv-publications.bib]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+You can ask for an overview of entries in a dataset with:
+
+\startbuffer
+\showbtxdatasetfields[example]
+\stopbuffer
+
+\typebuffer
+
+this gives:
+
+\getbuffer
+
+You can set the current active dataset with
+
+\starttyping
+\setbtxdataset[standard]
+\stoptyping
+
+but most publication|-|related commands accept optional arguments that denote the
+dataset and references to entries can be prefixed with a dataset identifier.. More
+about that later.
+
+\stopchapter
+
+\startchapter[title=Renderings]
+
+A list of publications can be rendered at any place in the document. A
+database can be much larger than needed for a document. The same is true for
+the fields that make up an entry. Here is the list of fields that are currently
+handled, but of course there can be additional ones:
+
+\startalignment[flushleft,verytolerant,nothyphenated]
+\startluacode
+local fields = publications.tracers.fields
+
+for i=1,#fields do
+ if i > 1 then
+ context(", ")
+ end
+ context.type(fields[i])
+end
+\stopluacode
+\stopalignment
+
+If you want to see what publications are in the database, the easiest way is to
+ask for a complete list:
+
+\startbuffer
+\definebtxrendering
+ [example]
+ [dataset=example,
+ method=local,
+ alternative=apa]
+\placelistofpublications % \placebtxrendering
+ [example]
+ [criterium=all]
+\stopbuffer
+
+\typebuffer
+
+This gives:
+
+\getbuffer
+
+The rendering itself is somewhat complex to set up because we have not only many
+different standards but also many fields that can be set up. This means that
+there are several commands involved. Often there is a prescribed style to render
+bibliographic descriptions, for example \type {apa}. A rendering is setup and
+defined with:
+
+\showsetup[setupbtxrendering]
+%showrootvalues[btxrendering]
+\showsetup[definebtxrendering]
+
+And a list of such descriptions is generated with:
+
+\showsetup[placebtxrendering]
+
+A dataset can have all kind of entries:
+
+\startalignment[flushleft,verytolerant,nothyphenated]
+\startluacode
+ local categories = publications.tracers.categories
+
+ for i=1,#categories do
+ if i > 1 then
+ context(", ")
+ end
+ context.type(categories[i])
+ end
+\stopluacode
+\stopalignment
+
+Each has its own rendering variant. To keep things simple we have their settings
+separated. However, these settings are shared for all rendering alternatives. In practice
+this is seldom a problem in a publication as only one rendering alternative will
+be active. If this be not sufficient, you can always group local settings in a setup
+and hook that into the specific rendering.
+
+\showsetup[setupbtxlistvariant]
+%showrootvalues[btxlistvariant]
+\showsetup[definebtxlistvariant]
+
+Examples of list variants are:
+
+\startluacode
+ local variants = publications.tracers.listvariants
+
+ for i=1,#variants do
+ context.showinstancevalues( { "btxlistvariant" }, { variants[i] })
+ end
+\stopluacode
+
+The exact rendering of list entries is determined by the \type {alternative} key
+and defaults to \type {apa} which uses definitions from \type
+{publ-imp-apa.mkiv}. If you look at that file you will see that each category has
+its own setup. You may also notice that additional tests are needed to make sure
+that empty fields don't trigger separators and such.
+
+% \showsetup{setuplists}
+
+There are a couple of accessors and helpers to get the job done. When you want to
+fetch a field from the current entry you use \type {\btxfield}. In most cases
+you want to make sure this field has a value, for instance because you don't want
+fences or punctuation that belongs to a field.
+
+\starttyping
+\btxdoif {title} {
+ \bold{\btxfield{title}},
+}
+\stoptyping
+
+There are three test macros:
+
+\starttyping
+\btxdoifelse{fieldname}{action when found}{action when not found}
+\btxdoif {fieldname}{action when found}
+\btxdoifnot {fieldname} {action when not found}
+\stoptyping
+
+An extra conditional is available for testing interactivity:
+
+\starttyping
+\btxdoifelseinteraction{action when true}{action when false}
+\stoptyping
+
+In addition there is also a conditional \type {\btxinteractive} which is
+more efficient, although in practice efficiency is not so important here.
+
+There are three commands to flush data:
+
+\starttabulate[|||] % Funny usage here! Could not tabulate work without
+ % even specifying the number of columns?
+\NC \type {\btxfield} \NC fetch a explicit field (e.g. \type {year}) \NC \NR
+\NC \type {\btxdetail} \NC fetch a derived field (e.g. \type {short}) \NC \NR
+\NC \type {\btxflush} \NC fetch a derived or explicit field \NC \NR
+\stoptabulate
+
+Normally you can use \type {\btxfield} or \type {\btxflush} as derived fields
+just like analyzed author fields are flushed in a special way.
+
+You can improve readability by using setups, for instance:
+
+\starttyping
+\btxdoifelse {author} {
+ \btxsetup{btx:apa:author:yes}
+} {
+ \btxsetup{btx:apa:author:nop}
+}
+\stoptyping
+
+Keep in mind that normally you don't need to mess with definitions like this because
+standard rendering styles are provided. These styles use a few helpers that inject symbols
+but also take care of leading and trailing spaces:
+
+\starttabulate[|||]
+\NC \type {\btxspace } \NC before \btxspace after \NC \NR
+\NC \type {\btxperiod } \NC before \btxperiod after \NC \NR
+\NC \type {\btxcomma } \NC before \btxcomma after \NC \NR
+\NC \type {\btxlparent } \NC before \btxlparent after \NC \NR
+\NC \type {\btxrparent } \NC before \btxrparent after \NC \NR
+\NC \type {\btxlbracket} \NC before \btxlbracket after \NC \NR
+\NC \type {\btxrbracket} \NC before \btxrbracket after \NC \NR
+\stoptabulate
+
+So, the previous example setup can be rewritten as:
+
+\starttyping
+\btxdoif {title} {
+ \bold{\btxfield{title}}
+ \btxcomma
+}
+\stoptyping
+
+There is a special command for rendering a (combination) of authors:
+
+\starttyping
+\btxflushauthor{author}
+\btxflushauthor{editor}
+\btxflushauthor[inverted]{editor}
+\stoptyping
+
+Instead of the last one you can also use:
+
+\starttyping
+\btxflushauthorinverted{editor}
+\stoptyping
+
+You can use a (configurable) default or pass directives: Valid directives are
+
+\starttabulate
+\NC \bf conversion \NC \bf rendering \NC \NR
+\HL
+\NC \type{inverted} \NC the Frog jr, Kermit \NC \NR
+\NC \type{invertedshort} \NC the Frog jr, K \NC \NR
+\NC \type{normal} \NC Kermit, the Frog, jr \NC \NR
+\NC \type{normalshort} \NC K, the Frog, jr \NC \NR
+\stoptabulate
+
+\stopchapter
+
+\startchapter[title=Citations]
+
+Citations are references to bibliographic entries that normally show up in lists
+someplace in the document: at the end of a chapter, in an appendix, at the end of
+an article, etc. We discussed the rendering of these lists in the previous chapter.
+A citation is normally pretty short as its main purpose is to refer uniquely to a more
+detailed description. But, there are several ways to refer, which is why the citation
+subsystem is configurable and extensible. Just look at the following commands:
+
+\startbuffer
+\cite[author][example::demo-003]
+\cite[authoryear][example::demo-003]
+\cite[authoryears][example::demo-003]
+\cite[author][example::demo-003,demo-004]
+\cite[authoryear][example::demo-003,demo-004]
+\cite[authoryears][example::demo-003,demo-004]
+\cite[author][example::demo-004,demo-003]
+\cite[authoryear][example::demo-004,demo-003]
+\cite[authoryears][example::demo-004,demo-003]
+\stopbuffer
+
+\typebuffer
+
+\startlines \getbuffer \stoplines
+
+The first argument is optional.
+
+\showsetup[cite]
+
+You can tune the way a citation shows up:
+
+\startbuffer
+\setupbtxcitevariant[author] [sorttype=author,color=darkyellow]
+\setupbtxcitevariant[authoryear] [sorttype=author,color=darkyellow]
+\setupbtxcitevariant[authoryears][sorttype=author,color=darkyellow]
+
+\cite[author][example::demo-004,demo-003]
+\cite[authoryear][example::demo-004,demo-003]
+\cite[authoryears][example::demo-004,demo-003]
+\stopbuffer
+
+\typebuffer
+
+Here we sort the authors and color the citation:
+
+\startlines \getbuffer \stoplines
+
+For reasons of backward compatibility the \type {\cite} command is a bit picky
+about spaces between the two arguments, of which the first is optional.
+
+\starttyping
+\citation[author] [example::demo-004,demo-003]
+\citation[authoryear] [example::demo-004,demo-003]
+\citation[authoryears][example::demo-004,demo-003]
+\stoptyping
+
+There is a whole bunch of cite options and more can be easily defined.
+
+\startluacode
+local variants = publications.tracers.citevariants
+
+context.starttabulate { "|l|p|" }
+ context.NC() context.bold("key")
+ context.NC() context.bold("rendering")
+ context.NC() context.NR() context.FL()
+ for i=1,#variants do
+ local variant = variants[i]
+ context.NC() context.type(variant)
+ context.NC() context.citation( { variant }, { "example::demo-005" })
+ context.NC() context.NR()
+ end
+context.stoptabulate()
+\stopluacode
+
+Because we are dealing with database input and because we generally need to
+manipulate entries, much of the work is delegated to \LUA. This makes it easier
+to maintain and extend the code. Of course \TEX\ still does the rendering. The
+typographic details are controlled by parameters but not all are used in all
+variants. As with most \CONTEXT\ commands, it starts out with a general setup
+command:
+
+\showsetup[setupbtxcitevariant]
+
+On top of that we can define instances that inherit either from a given parent or
+from the topmost setup.
+
+\showsetup[definebtxcitevariant]
+
+% The default values are:
+
+% \showrootvalues[btxcitevariant]
+
+But, specific variants can have them overloaded:
+
+% \showinstancevalues[setupbtxcitevariant][author]
+% \showinstancevalues[setupbtxcitevariant][authornum]
+
+\startluacode
+ local variants = publications.tracers.citevariants
+
+ for i=1,#variants do
+ context.showinstancevalues( { "btxcitevariant" }, { variants[i] })
+ end
+\stopluacode
+
+A citation variant is defined in several steps and if you really want to know
+the dirty details, you should look into the \type {publ-imp-*.mkiv} files. Here
+we stick to the concept.
+
+\starttyping
+\startsetups btx:cite:author
+ \btxcitevariant{author}
+\stopsetups
+\stoptyping
+
+You can overload such setups if needed, but that only makes sense when you cannot
+configure the rendering with parameters. The \type {\btxcitevariant} command is
+one of the build in accessors and it calls out to \LUA\ where more complex
+manipulation takes place if needed. If no manipulation is known, the field with
+the same name (if found) will be flushed. A command like \type {\btxcitevariant}
+assumes that a dataset and specific tag has been set. This is normally done in
+the wrapper macros, like \type {\cite}. For special purposes you can use these
+commands
+
+\starttyping
+\setbtxdataset[example]
+\setbtxentry[hh2013]
+\stoptyping
+
+But don't expect too much support for such low level rendering control.
+
+Unless you use \type {criterium=all} only publications that are cited will end up
+in the lists. You can force a citation into a list using \type {\usecitation}, for
+example:
+
+\starttyping
+\usecitation[example::demo-004,demo-003]
+\stoptyping
+
+This command has two synonyms: \type {\nocite} and \type {\nocitation} so you can
+choose whatever fits you best.
+
+\showsetup[nocite]
+
+\stopchapter
+
+\startchapter[title=The \LUA\ view]
+
+Because we manage data at the \LUA\ end it is tempting to access it there for
+other purposes. This is fine as long as you keep in mind that aspects of the
+implementation may change over time, although this is unlikely once the modules
+become stable.
+
+The entries are collected in datasets and each set has a unique name. In this
+document we have the set named \type {example}. A dataset table has several
+fields, and probably the one of most interest is the \type {luadata} field. Each
+entry in this table describes a publication:
+
+\startluacode
+ context.tocontext(publications.datasets.example.luadata["demo-001"])
+\stopluacode
+
+This is \type {publications.datasets.example.luadata["demo-001"]}. There can be
+a companion entry in the parallel \type {details} table.
+
+\startluacode
+ context.tocontext(publications.datasets.example.details["demo-001"])
+\stopluacode
+
+These details are accessed as \type
+{publications.datasets.example.details["demo-001"]} and by using a separate table
+we can overload fields in the original entry without losing the original.
+
+You can loop over the entries using regular \LUA\ code combined with \MKIV\
+helpers:
+
+\startbuffer
+local dataset = publications.datasets.example
+
+context.starttabulate { "|l|l|l|" }
+for tag, entry in table.sortedhash(dataset.luadata) do
+ local detail = dataset.details[tag] or { }
+ context.NC() context.type(tag)
+ context.NC() context(detail.short)
+ context.NC() context(entry.title)
+ context.NC() context.NR()
+end
+context.stoptabulate()
+\stopbuffer
+
+\typebuffer
+
+This results in:
+
+\ctxluabuffer
+
+\stopchapter
+
+\startchapter[title=The \XML\ view]
+
+The \type {luadata} table can be converted into an \XML\ representation. This is
+a follow up on earlier experiments with an \XML|-|only approach. I decided in the end
+to stick to a \LUA\ approach and provide some simple \XML\ support in addition.
+
+Once a dataset is accessible as \XML\ tree, you can use the regular \type {\xml...}
+commands. We start with loading a dataset, in this case from just one file.
+
+\startbuffer
+\usebtxdataset[tugboat][tugboat.bib]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+The dataset has to be converted to \XML:
+
+\startbuffer
+\convertbtxdatasettoxml[tugboat]
+\stopbuffer
+
+\typebuffer \getbuffer
+
+The tree is now accessible by its root reference \type {btx:tugboat}. If we want simple
+field access we can use a few setups:
+
+\startbuffer
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\xmlsetup{btx:tugboat}{btx:initialize}
+\stopbuffer
+
+\typebuffer \getbuffer
+
+The two setups are predefined in the core already, but you might want to change them. They are
+applied in for instance:
+
+\startbuffer
+\starttabulate[|||]
+ \NC \type {tag} \NC \xmlfirst {btx:tugboat}
+ {/bibtex/entry[string.find(@tag,'Hagen')]/attribute('tag')}
+ \NC \NR
+ \NC \type {title} \NC \xmlfirst {btx:tugboat}
+ {/bibtex/entry[string.find(@tag,'Hagen')]/field[@name='title']}
+ \NC \NR
+\stoptabulate
+\stopbuffer
+
+\typebuffer \getbuffer
+
+\startbuffer
+\startxmlsetups btx:demo
+ \xmlcommand
+ {#1}
+ {/bibtex/entry[string.find(@tag,'Hagen')][1]}{btx:table}
+\stopxmlsetups
+
+\startxmlsetups btx:table
+\starttabulate[|||]
+ \NC \type {tag} \NC \xmlatt{#1}{tag} \NC \NR
+ \NC \type {title} \NC \xmlfirst{#1}{/field[@name='title']} \NC \NR
+\stoptabulate
+\stopxmlsetups
+
+\xmlsetup{btx:tugboat}{btx:demo}
+\stopbuffer
+
+\typebuffer \getbuffer
+
+Here is another example:
+
+\startbuffer
+\startxmlsetups btx:row
+ \NC \xmlatt{#1}{tag}
+ \NC \xmlfirst{#1}{/field[@name='title']}
+ \NC \NR
+\stopxmlsetups
+
+\startxmlsetups btx:demo
+ \xmlfilter {#1} {
+ /bibtex
+ /entry[@category='article']
+ /field[@name='author' and (find(text(),'Knuth') or find(text(),'DEK'))]
+ /../command(btx:row)
+ }
+\stopxmlsetups
+
+\starttabulate[|||]
+ \xmlsetup{btx:tugboat}{btx:demo}
+\stoptabulate
+\stopbuffer
+
+\typebuffer \getbuffer
+
+A more extensive example is the following. Of course this assumes that you
+know what \XML\ support mechanisms and macros are available.
+
+\startbuffer
+\startxmlsetups btx:getkeys
+ \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+ \xmladdsortentry{btx}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+ \xmladdsortentry{btx}{#1}{\xmlatt{#1}{tag}}
+\stopxmlsetups
+
+\startxmlsetups btx:sorter
+ \xmlresetsorter{btx}
+ % \xmlfilter{#1}{entry/command(btx:getkeys)}
+ \xmlfilter{#1}{
+ /bibtex
+ /entry[@category='article']
+ /field[@name='author' and find(text(),'Knuth')]
+ /../command(btx:getkeys)}
+ \xmlsortentries{btx}
+ \starttabulate[||||]
+ \xmlflushsorter{btx}{btx:entry:flush}
+ \stoptabulate
+\stopxmlsetups
+
+\startxmlsetups btx:entry:flush
+ \NC \xmlfilter{#1}{/field[@name='year' ]/context()}
+ \NC \xmlatt{#1}{tag}
+ \NC \xmlfilter{#1}{/field[@name='author']/context()}
+ \NC \NR
+\stopxmlsetups
+
+\xmlsetup{btx:tugboat}{btx:sorter}
+\stopbuffer
+
+\typebuffer \getbuffer
+
+The original data is stored in a \LUA\ table, hashed by tag. Starting with \LUA\ 5.2
+each run of \LUA\ gets a different ordering of such a hash. In older versions, when you
+looped over a hash, the order was undefined, but the same as long as you used the same
+binary. This had the advantage that successive runs, something we often have in document
+processing gave consistent results. In today's \LUA\ we need to do much more sorting of
+hashes before we loop, especially when we save multi||pass data. It is for this reason
+that the \XML\ tree is sorted by hash key by default. That way lookups (especially
+the first of a set) give consistent outcomes.
+
+\stopchapter
+
+\startchapter[title=Standards]
+
+The rendering of bibliographic entries is often standardized and prescribed by
+the publisher. If you submit an article to a journal, normally it will be
+reformatted (or even re|-|keyed) and the rendering will happen at the publishers
+end. In that case it may not matter how entries were rendered when writing the
+publication, because the publisher will do it his or her way.
+This means that most users probably will stick to the standard \APA\ rules and for
+them we provide some configuration. Because we use setups it is easy to overload
+specifics. If you really want to tweak, best look in the files that deal with it.
+
+Many standards exist and support for other renderings may be added to the core.
+Interested users are invited to develop and to test alternate standard renderings
+according to their needs.
+
+Todo: maybe a list of categories and fields.
+
+\stopchapter
+
+\startchapter[title=Cleaning up]
+
+Although the \BIBTEX\ format is reasonably well defined, in practice there are
+many ways to organize the data. For instance, one can use predefined string
+constants that get used (either or not combined with other strings) later on. A string
+can be enclosed in curly braces or double quotes. The strings can contain \TEX\ commands
+but these are not standardized. The databases often have somewhat complex
+ways to deal with special characters and the use of braces in their definition is also
+not normalized.
+
+The most complex to deal with are the fields that contain names of people. At some point it
+might be needed to split a combination of names into individual ones that then get split into
+title, first name, optional inbetweens, surname(s) and additional: \type {Prof. Dr. Alfred
+B. C. von Kwik Kwak Jr. II and P. Q. Olet} is just one example of this. The convention seems
+to be not to use commas but \type {and} to separate names (often each name will be specified
+as lastname, firstname).
+
+We don't see it as challenge nor as a duty to support all kinds of messy definitions. Of
+course we try to be somewhat tolerant, but you will be sure to get better results if you
+use nicely setup, consistent databases.
+
+Todo: maybe some examples of bad.
+
+\stopchapter
+
+\startchapter[title=Transition]
+
+In the original bibliography support module usage was as follows (example taken
+from the contextgarden wiki):
+
+\starttyping
+% engine=pdftex
+
+\usemodule[bib]
+\usemodule[bibltx]
+
+\setupbibtex
+ [database=xampl]
+
+\setuppublications
+ [numbering=yes]
+
+\starttext
+ As \cite [article-full] already indicated, bibtex is a \LATEX||centric
+ program.
+
+ \completepublications
+\stoptext
+\stoptyping
+
+For \MKIV\ the modules were partly rewritten and ended up in the core so the two
+{\usemodule} commands are not needed there. One advantage of explicitly loading a
+module is that a job that doesn't need references to publications doesn't suffer
+from the associated overhead. Nowadays this overhead can be neglected. The first
+setup command in this example is needed to bootstrap the process: it tells what
+database has to be processed by \BIBTEX\ between runs. The second setup command
+is optional. Each citation (tagged with \type {\cite}) ends up in the list of
+publications.
+
+In the new approach again the code is in the \CONTEXT\ kernel, so no modules need
+to be loaded. But, as we no longer use \BIBTEX, we don't need to setup \BIBTEX.
+Instead we define dataset(s). We also no longer set up publications with one
+command, but have split that up in rendering-, list-, and cite|-|variants. The
+basic \type {\cite} command remains.
+
+\starttyping
+\definebtxdataset
+ [document]
+
+\usebtxdataset
+ [document]
+ [mybibfile.bib]
+
+\definebtxrendering
+ [document]
+
+\setupbtxrendering
+ [document]
+ [numbering=yes]
+
+\starttext
+ As \cite [article-full] already indicated, bibtex is a \LATEX||centric
+ program.
+
+ \completebtxrendering[document]
+\stoptext
+\stoptyping
+
+So, we have a few more commands to set up things. If you use just one dataset
+and rendering, the above preamble can be simplified to:
+
+\starttyping
+\usebtxdataset
+ [mybibfile.bib]
+
+\setupbtxrendering
+ [numbering=yes]
+\stoptyping
+
+But keep in mind, that compared to the old \MKII\ derived method we have moved
+some of the setup options to setting up the list and cite variants.
+
+Another difference is the use of lists. When you define a rendering, you also
+define a list. However, all entries are collected in a common list tagged \type
+{btx}. Although you will normally configure a rendering you can still set some
+properties of lists, but in that case you need to prefix the list identifier. In
+the case of the above example this is \type {btx:document}.
+
+\stopchapter
+
+\startchapter[title=\MLBIBTEX]
+
+Todo: how to plug in \MLBIBTEX\ for sorting and other advanced operations.
+
+\stopchapter
+
+\startchapter[title=Extensions]
+
+As \TEX\ and \LUA\ are both open and accessible in \CONTEXT\ it is possible to
+extend the functionality of the bibliography related code. For instance, you can add
+extra loaders.
+
+\starttyping
+function publications.loaders.myformat(dataset,filename)
+ local t = { }
+ -- Load data from 'filename' and convert it to a Lua table 't' with
+ -- the key as hash entry and fields conforming the luadata table
+ -- format.
+ loaders.lua(dataset,t)
+end
+\stoptyping
+
+This then permits loading a database (into a dataset) with the command:
+
+\starttyping
+\usebtxdataset[standard][myfile.myformat]
+\stoptyping
+
+The \type {myformat} suffix is recognized automatically. If you want to use another
+suffix, you can do this:
+
+\starttyping
+\usebtxdataset[standard][myformat::myfile.txt]
+\stoptyping
+
+\stopchapter
+
+\stopbodymatter
+
+\stoptext
+
+
diff --git a/doc/context/manuals/allkind/publications-en.xml b/doc/context/manuals/allkind/publications-en.xml
new file mode 100644
index 000000000..79b31453a
--- /dev/null
+++ b/doc/context/manuals/allkind/publications-en.xml
@@ -0,0 +1,301 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!-- bibliographies -->
+
+<cd:interface xmlns:cd="http://www.pragma-ade.com/commands" name="publications" language="en" version="2013.12.22">
+
+ <!-- datasets -->
+
+ <cd:command name="setupbtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
+ <cd:sequence>
+ <cd:string value="setupbtxdataset"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="2" optional="yes">
+ <!-- todo -->
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="definebtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
+ <cd:sequence>
+ <cd:string value="definebtxdataset"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="2" optional="yes">
+ <cd:inherit name="setupbtxdataset" n="2"/>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="usebtxdataset" file="publ-ini.mkiv" category="publications" hash="btxdataset">
+ <cd:sequence>
+ <cd:string value="usebtxdataset"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:keywords n="2">
+ <cd:constant type="cd:file"/>
+ </cd:keywords>
+ </cd:arguments>
+ </cd:command>
+
+ <!-- rendering -->
+
+ <cd:command name="setupbtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
+ <cd:sequence>
+ <cd:string value="setupbtxrendering"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="2">
+ <cd:parameter name="alternative">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="dataset">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="setups">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="method">
+ <cd:constant type="local"/>
+ <cd:constant type="global"/>
+ <cd:constant type="none"/>
+ <cd:constant type="force"/>
+ </cd:parameter>
+ <cd:parameter name="sorttype">
+ <cd:constant type="short"/>
+ <cd:constant type="reference"/>
+ <cd:constant type="dataset"/>
+ <cd:constant type="default"/>
+ </cd:parameter>
+ <cd:parameter name="criterium">
+ <cd:constant type="cd:text"/> <!-- todo -->
+ </cd:parameter>
+ <cd:parameter name="refcommand">
+ <cd:constant type="cd:text"/> <!-- todo -->
+ </cd:parameter>
+ <cd:parameter name="numbering">
+ <cd:constant type="yes"/>
+ <cd:constant type="cite"/>
+ </cd:parameter>
+ <cd:parameter name="width">
+ <cd:constant type="cd:dimension"/>
+ <cd:constant type="auto"/>
+ </cd:parameter>
+ <cd:parameter name="distance">
+ <cd:constant type="cd:dimension"/>
+ </cd:parameter>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="definebtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
+ <cd:sequence>
+ <cd:string value="definebtxrendering"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:keywords n="2" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="3" optional="yes">
+ <cd:inherit name="setupbtxrendering" n="2"/>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="placebtxrendering" file="publ-ini.mkiv" category="publications" hash="btxrendering">
+ <cd:sequence>
+ <cd:string value="placebtxrendering"/>
+ </cd:sequence>
+ </cd:command>
+
+ <!-- lists -->
+
+ <cd:command name="setupbtxlistvariant" file="publ-ini.mkiv" category="publications" hash="btxlistvariant">
+ <cd:sequence>
+ <cd:string value="setupbtxlistvariant"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="2">
+ <cd:parameter name="namesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="lastnamesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="finalnamesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="firstnamesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="juniorsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="vonsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="surnamesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="surnamejuniorsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="juniorjuniorsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="surnamefirstnamesep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="surnameinitialsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="etallimit">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="etaldisplay">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="etaltext">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="monthconversion">
+ <cd:constant type="number"/>
+ <cd:constant type="month"/>
+ <cd:constant type="month:mnem"/>
+ </cd:parameter>
+ <cd:parameter name="authorconversion">
+ <cd:constant type="normal"/>
+ <cd:constant type="inverted"/>
+ <cd:constant type="normalshort"/>
+ <cd:constant type="invertedshort"/>
+ </cd:parameter>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="definebtxlistvariant" file="publ-ini.mkiv" category="publications" hash="btxlistvariant">
+ <cd:sequence>
+ <cd:string value="definebtxlistvariant"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ </cd:arguments>
+ </cd:command>
+
+ <!-- variants -->
+
+ <cd:command name="setupbtxcitevariant" file="publ-ini.mkiv" category="publications" hash="btxcitevariant">
+ <cd:sequence>
+ <cd:string value="setupbtxcitevariant"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="2">
+ <cd:parameter name="alternative">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="setups">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="interaction">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="andtext">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="otherstext">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="compress">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="putsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="lastputsep">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="inbetween">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="right">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="middle">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ <cd:parameter name="left">
+ <cd:constant type="cd:text"/>
+ </cd:parameter>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="definebtxcitevariant" file="publ-ini.mkiv" category="publications" hash="btxcitevariant">
+ <cd:sequence>
+ <cd:string value="definebtxcitevariant"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:keywords n="2" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:assignments n="3" optional="yes">
+ <cd:inherit name="setupbtxvariant" n="3"/>
+ </cd:assignments>
+ </cd:arguments>
+ </cd:command>
+
+ <!-- refering -->
+
+ <cd:command name="cite" file="publ-ini.mkiv" category="publications">
+ <cd:sequence>
+ <cd:string value="cite"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1" optional="yes">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ <cd:keywords n="2">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ </cd:arguments>
+ </cd:command>
+
+ <cd:command name="nocite" file="publ-ini.mkiv" category="publications">
+ <cd:sequence>
+ <cd:string value="nocite"/>
+ </cd:sequence>
+ <cd:arguments>
+ <cd:keywords n="1">
+ <cd:constant type="cd:name"/>
+ </cd:keywords>
+ </cd:arguments>
+ </cd:command>
+
+</cd:interface>
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.html b/doc/context/scripts/mkiv/mtx-bibtex.html
new file mode 100644
index 000000000..ba1591b4b
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-bibtex.html
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+<!-- compare with lmx framework variant -->
+
+<!--
+ filename : context-base.xml
+ comment : companion to mtx-server-ctx-startup.tex
+ author : Hans Hagen, PRAGMA-ADE, Hasselt NL
+ copyright: PRAGMA ADE / ConTeXt Development Team
+ license : see context related readme files
+-->
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
+ <head>
+ <title>bibtex helpers</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <style type="text/css">
+ body { color: #FFFFFF; background-color: #808080; font-family: optima, verdana, futura, "lucida sans", arial, geneva, helvetica, sans; font-size: 12px; line-height: 18px; } a:link, a:active, a:visited { color: #FFFFFF; } a.dir-view:link, a.dir-view:active, a.dir-view:visited { color: #FFFFFF; text-decoration: underline; } .valid { color: #00FF00; } .invalid { color: #FF0000; } button, .commonlink, .smallbutton { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; border-color: #7F7F7F; border-style: solid; border-width: .125ex; background-color: #FFFFFF; padding: .5ex; } .smallbutton { width: 1em; } a.commonlink:link, a.commonlink:active, a.commonlink:visited, a.smalllink:link, a.smalllink:active, a.smalllink:visited { font-weight: bold; font-size: 12px; text-decoration: none; color: #000000; } h1, .title { font-style: normal; font-weight: normal; font-size: 18px; line-height: 18px; margin-bottom: 20px; } h2, .subtitle { font-style: normal; font-weight: normal; font-size: 12px; margin-top: 18px; margin-bottom: 18px; } table { line-height: 18px; font-size: 12px; margin: 0; } th { font-weight: bold; text-align: left; padding-bottom: 6px; } .tc { font-weight: bold; text-align: left; } p, li { max-width: 60em; } .empty-line { margin-top: 4px; } .more-room { margin-right: 1.5em; } .much-more-room { margin-right: 3em; } #main { position: absolute; left: 10%; top: 10%; right: 10%; bottom: 10%; z-index: 2; width: 80%; height: 80%; padding: 0%; margin: 0%; overflow: auto; border-style: none; border-width: 0; background-color: #3F3F3F; } #main-settings { margin: 12px; x_max-width: 60em; line-height: 18px; font-size: 12px; } #left { position: absolute; top : 10%; left: 0%; bottom: 0%; right: 90%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; } #right { position: absolute; top : 0%; left: 90%; bottom: 10%; right: 0%; z-index: 1; width: 10%; height: 90%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #4F6F6F; _margin-left: -15px; } #bottom { position: absolute; left: 10%; right: 0%; top: 90%; bottom: 0%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top { position: absolute; left: 0%; right: 10%; top: 0%; bottom: 90%; z-index: 1; width: 90%; height: 10%; padding: 0%; margin: 0%; font-size: 16px; border-style: none; border-width: 0; background-color: #6F6F8F; } #top-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #top-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: right; vertical-align: middle; } #bottom-one { position: absolute; bottom: 50%; width: 100%; buggedheight: 100%; } #bottom-two { position: relative; margin-bottom: -9px; margin-left: 12px; margin-right: 12px; line-height: 18px; text-align: left; vertical-align: middle; } #left-one { position: absolute; width: 100%; buggedheight: 100%; } #left-two { position: relative; margin-top: 12px; line-height: 18px; text-align: center; vertical-align: top; } #right-one { display: table; height: 100%; width: 100%; } #right-two { display: table-row; height: 100%; width: 100%; } #right-three { display: table-cell; width: 100%; vertical-align: bottom; _position: absolute; _top: 100%; } #right-four { text-align: center; margin-bottom: 2ex; _position: relative; _top: -100%; } #more-top { position: absolute; top: 0%; left: 90%; bottom: 90%; right: 0%; z-index: 3; width: 10%; height: 10%; padding: 0%; margin: 0%; border-style: none; border-width: 0; } #more-top-settings { text-align: center; } #more-right-settings { margin-right: 12px; margin-left: 12px; line-height: 18px; font-size: 10px; text-align: center; } #right-safari { _display: table; width: 100%; height: 100%; }
+ </style>
+ <style type="text/css">
+ </style>
+ </head>
+ <body>
+ <div id="top"> <div id="top-one">
+ <div id="top-two">bibtex helpers </div>
+ </div>
+ </div>
+ <div id="bottom"> <div id="bottom-one">
+ <div id="bottom-two">wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl</div>
+ </div>
+ </div>
+ <div id="left"></div>
+ <div id="right"></div>
+ <div id="main">
+ <div id='main-settings'>
+ <h1>Command line options</h1>
+<table>
+ <tr><th style="width: 10em">flag</th><th style="width: 8em">value</th><th>description</th></tr>
+ <tr><th/><td/><td/></tr>
+ <tr><th>--toxml</th><td></td><td>convert bibtex database(s) to xml</td></tr>
+ <tr><th>--tolua</th><td></td><td>convert bibtex database(s) to lua</td></tr>
+ </table>
+<br/>
+<h1>Example</h1>
+<tt>mtxrun --script bibtex --tolua bibl-001.bib</tt>
+<br/><tt>mtxrun --script bibtex --tolua --simple bibl-001.bib</tt>
+<br/><tt>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</tt>
+<br/><br/> </div>
+ </div>
+ </body>
+ </html>
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.man b/doc/context/scripts/mkiv/mtx-bibtex.man
new file mode 100644
index 000000000..cedf41b8b
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-bibtex.man
@@ -0,0 +1,30 @@
+.TH "mtx-bibtex" "1" "01-01-2014" "version 1.00" "bibtex helpers"
+.SH NAME
+.B mtx-bibtex
+.SH SYNOPSIS
+.B mtxrun --script bibtex [
+.I OPTIONS ...
+.B ] [
+.I FILENAMES
+.B ]
+.SH DESCRIPTION
+.B bibtex helpers
+.SH OPTIONS
+.TP
+.B --toxml
+convert bibtex database(s) to xml
+.TP
+.B --tolua
+convert bibtex database(s) to lua
+.SH AUTHOR
+More information about ConTeXt and the tools that come with it can be found at:
+
+
+.B "maillist:"
+ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+
+.B "webpage:"
+http://www.pragma-ade.nl / http://tex.aanhet.net
+
+.B "wiki:"
+http://contextgarden.net
diff --git a/doc/context/scripts/mkiv/mtx-bibtex.xml b/doc/context/scripts/mkiv/mtx-bibtex.xml
new file mode 100644
index 000000000..b33e1809c
--- /dev/null
+++ b/doc/context/scripts/mkiv/mtx-bibtex.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-bibtex</entry>
+ <entry name="detail">bibtex helpers</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
+ <flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
diff --git a/metapost/context/base/mp-tool.mpiv b/metapost/context/base/mp-tool.mpiv
index 672a051c2..53e163e07 100644
--- a/metapost/context/base/mp-tool.mpiv
+++ b/metapost/context/base/mp-tool.mpiv
@@ -289,10 +289,14 @@ vardef set_outer_boundingbox text q = % obsolete
setbounds q to outerboundingbox q;
enddef;
-%D Some missing functions can be implemented rather
-%D straightforward:
+%D Some missing functions can be implemented rather straightforward (thanks to
+%D Taco and others):
-numeric Pi ; Pi := 3.1415926 ;
+pi := 3.14159265358979323846 ; radian := 180/pi ; % 2pi*radian = 360 ;
+
+% let +++ = ++ ;
+
+numeric Pi ; Pi := pi ; % for some old compatibility reasons i guess
vardef sqr primary x = x*x enddef ;
vardef log primary x = if x=0: 0 else: mlog(x)/mlog(10) fi enddef ;
@@ -302,15 +306,6 @@ vardef inv primary x = if x=0: 0 else: x**-1 fi enddef ;
vardef pow (expr x,p) = x**p enddef ;
-vardef asin primary x = x+(x**3)/6+3(x**5)/40 enddef ;
-vardef acos primary x = asin(-x) enddef ;
-vardef atan primary x = x-(x**3)/3+(x**5)/5-(x**7)/7 enddef ;
-vardef tand primary x = sind(x)/cosd(x) enddef ;
-
-%D Here are Taco Hoekwater's alternatives (but vardef'd and primaried).
-
-pi := 3.1415926 ; radian := 180/pi ; % 2pi*radian = 360 ;
-
vardef tand primary x = sind(x)/cosd(x) enddef ;
vardef cotd primary x = cosd(x)/sind(x) enddef ;
@@ -321,9 +316,11 @@ vardef cot primary x = cos(x)/sin(x) enddef ;
vardef asin primary x = angle((1+-+x,x)) enddef ;
vardef acos primary x = angle((x,1+-+x)) enddef ;
+vardef atan primary x = angle(1,x) enddef ;
vardef invsin primary x = (asin(x))/radian enddef ;
vardef invcos primary x = (acos(x))/radian enddef ;
+vardef invtan primary x = (atan(x))/radian enddef ;
vardef acosh primary x = ln(x+(x+-+1)) enddef ;
vardef asinh primary x = ln(x+(x++1)) enddef ;
diff --git a/scripts/context/lua/mtx-bibtex.lua b/scripts/context/lua/mtx-bibtex.lua
new file mode 100644
index 000000000..c81fd596f
--- /dev/null
+++ b/scripts/context/lua/mtx-bibtex.lua
@@ -0,0 +1,106 @@
+if not modules then modules = { } end modules ['mtx-bibtex'] = {
+ version = 1.002,
+ comment = "this script is part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE",
+ license = "see context related readme files"
+}
+
+local helpinfo = [[
+<?xml version="1.0"?>
+<application>
+ <metadata>
+ <entry name="name">mtx-bibtex</entry>
+ <entry name="detail">bibtex helpers</entry>
+ <entry name="version">1.00</entry>
+ </metadata>
+ <flags>
+ <category name="basic">
+ <subcategory>
+ <flag name="toxml"><short>convert bibtex database(s) to xml</short></flag>
+ <flag name="tolua"><short>convert bibtex database(s) to lua</short></flag>
+ </subcategory>
+ </category>
+ </flags>
+ <examples>
+ <category>
+ <title>Example</title>
+ <subcategory>
+ <example><command>mtxrun --script bibtex --tolua bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --tolua --simple bibl-001.bib</command></example>
+ <example><command>mtxrun --script bibtex --toxml bibl-001.bib bibl-002.bib bibl-003.bib biblio.xml</command></example>
+ </subcategory>
+ </category>
+ </examples>
+</application>
+]]
+
+local application = logs.application {
+ name = "mtx-bibtex",
+ banner = "bibtex helpers",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+require("publ-dat")
+
+scripts = scripts or { }
+scripts.bibtex = scripts.bibtex or { }
+
+function scripts.bibtex.toxml(files)
+ local instance = bibtex.new()
+ local target = "mtx-bibtex-output.xml"
+ for i=1,#files do
+ local filename = files[i]
+ local filetype = file.suffix(filename)
+ if filetype == "xml" then
+ target = filename
+ elseif filetype == "bib" then
+ bibtex.load(instance,filename)
+ else
+ -- not supported
+ end
+ end
+ bibtex.converttoxml(instance,true)
+ instance.shortcuts = nil
+ instance.luadata = nil
+ xml.save(instance.xmldata,target)
+end
+
+function scripts.bibtex.tolua(files)
+ local instance = bibtex.new()
+ local target = "mtx-bibtex-output.lua"
+ for i=1,#files do
+ local filename = files[i]
+ local filetype = file.suffix(filename)
+ if filetype == "lua" then
+ target = filename
+ elseif filetype == "bib" then
+ bibtex.load(instance,filename)
+ else
+ -- not supported
+ end
+ end
+ instance.shortcuts = nil
+ instance.xmldata = nil
+ bibtex.analyze(instance)
+ if environment.arguments.simple then
+ table.save(target,instance)
+ else
+ table.save(target,instance.luadata)
+ end
+end
+
+if environment.arguments.toxml then
+ scripts.bibtex.toxml(environment.files)
+elseif environment.arguments.tolua then
+ scripts.bibtex.tolua(environment.files)
+elseif environment.arguments.exporthelp then
+ application.export(environment.arguments.exporthelp,environment.files[1])
+else
+ application.help()
+end
+
+-- scripts.bibtex.toxml { "tugboat.bib" }
+-- scripts.bibtex.tolua { "tugboat.bib" }
diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua
index 82f8dc2aa..75dc3e86f 100644
--- a/tex/context/base/bibl-tra.lua
+++ b/tex/context/base/bibl-tra.lua
@@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['bibl-tra'] = {
-- temporary hack, needed for transition
-if not punlications then
+if not publications then
local hacks = utilities.storage.allocate()
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 582e3d2bc..ca49730d4 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.01.11 23:58}
+\newcontextversion{2014.01.14 15:03}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index c54d5b971..4e4621e23 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index ccb237732..847290b0f 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.01.11 23:58}
+\edef\contextversion{2014.01.14 15:03}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -479,14 +479,15 @@
% old bibtex support: (will be m-oldbibtex.mkiv)
-\loadmarkfile{bibl-bib}
-\loadmarkfile{bibl-tra}
+% \loadmarkfile{bibl-bib}
+% \loadmarkfile{bibl-tra}
% new bibtex support:
-% \loadmarkfile{publ-ini}
-% \loadmarkfile{publ-tra}
-% \loadmarkfile{publ-xml}
+\loadmarkfile{publ-ini}
+\loadmarkfile{publ-tra}
+\loadmarkfile{publ-xml}
+\loadmarkfile{publ-old}
%loadmarkfile{x-xtag} % no longer preloaded
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index ce724b973..f74e13e81 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -66,21 +66,6 @@ local function makenameparser(str)
end
end
--- local parser = makenameparser("Japan1")
--- local parser = makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("u123400987600")
--- test("index1234")
--- test("Japan1.123")
-
local function tounicode16(unicode,name)
if unicode < 0x10000 then
return format("%04X",unicode)
@@ -346,3 +331,18 @@ function mappings.addtounicode(data,filename)
report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
end
end
+
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
diff --git a/tex/context/base/m-oldbibtex.mkiv b/tex/context/base/m-oldbibtex.mkiv
new file mode 100644
index 000000000..08c23e7cc
--- /dev/null
+++ b/tex/context/base/m-oldbibtex.mkiv
@@ -0,0 +1,16 @@
+%D \module
+%D [ file=m-oldbibtex,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=Falback on old method,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+\loadmarkfile{bibl-bib}
+\loadmarkfile{bibl-tra}
+
+\endinput
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index 2101b95e9..0f5bd8ace 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -17,7 +17,7 @@ return {
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
-- "halfcircle", "quartercircle",
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
new file mode 100644
index 000000000..57abd3f32
--- /dev/null
+++ b/tex/context/base/publ-aut.lua
@@ -0,0 +1,465 @@
+if not modules then modules = { } end modules ['publ-aut'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+end
+
+local chardata = characters.data
+
+local concat = table.concat
+local lpeg = lpeg
+local utfchar = utf.char
+
+local publications = publications or { }
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.authors = publications.authors or { }
+local authors = publications.authors
+
+local P, C, V, Cs, Ct, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Ct, lpeg.match, lpeg.patterns
+
+-- local function makesplitter(separator)
+-- return Ct { "start",
+-- start = (Cs((V("outer") + (1-separator))^1) + separator^1)^1,
+-- start = Cs(V("outer")) + (Cs((V("inner") + (1-separator))^1) + separator^1)^1,
+-- outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^0) * (P("}")/""),
+-- inner = P("{") * ((V("inner") + P(1-P("}")))^0) * P("}"),
+-- }
+-- end
+
+local space = P(" ")
+local comma = P(",")
+local firstcharacter = lpegpatterns.utf8byte
+
+-- local andsplitter = lpeg.tsplitat(space^1 * "and" * space^1)
+-- local commasplitter = lpeg.tsplitat(space^0 * comma * space^0)
+-- local spacesplitter = lpeg.tsplitat(space^1)
+
+local p_and = space^1 * "and" * space^1
+local p_comma = space^0 * comma * space^0
+local p_space = space^1
+
+local andsplitter = Ct { "start",
+ start = (Cs((V("inner") + (1-p_and))^1) + p_and)^1,
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local commasplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_comma))^1) + p_comma)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local spacesplitter = Ct { "start",
+ start = Cs(V("outer")) + (Cs((V("inner") + (1-p_space))^1) + p_space)^1,
+ outer = (P("{")/"") * ((V("inner") + P(1-P("}")))^1) * (P("}")/""),
+ inner = P("{") * ((V("inner") + P(1-P("}")))^1) * P("}"),
+}
+
+local function is_upper(str)
+ local first = lpegmatch(firstcharacter,str)
+ local okay = chardata[first]
+ return okay and okay.category == "lu"
+end
+
+local cache = { } -- 33% reuse on tugboat.bib
+local nofhits = 0
+local nofused = 0
+
+local function splitauthorstring(str)
+ if not str then
+ return
+ end
+ nofused = nofused + 1
+ local authors = cache[str]
+ if authors then
+ -- hit 1
+ -- print("hit 1",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ return { authors } -- we assume one author
+ end
+ local authors = lpegmatch(andsplitter,str)
+ for i=1,#authors do
+ local author = authors[i]
+ local detail = cache[author]
+ if detail then
+ -- hit 2
+ -- print("hit 2",author,nofhits,nofused,math.round(100*nofhits/nofused))
+ end
+ if not detail then
+ local firstnames, vons, surnames, initials, juniors, words
+ local split = lpegmatch(commasplitter,author)
+ local n = #split
+ if n == 1 then
+ -- First von Last
+ words = lpegmatch(spacesplitter,author)
+-- inspect(words)
+ firstnames, vons, surnames = { }, { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ break
+ else
+ vons[#vons+1], i = w, i + 1
+ end
+ end
+ if i < n then
+ while i <= n do
+ surnames[#surnames+1], i = words[i], i + 1
+ end
+ elseif #vons == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ else
+ -- mess
+ end
+ -- safeguard
+ if #surnames == 0 then
+ firstnames = { }
+ vons = { }
+ surnames = { author }
+ end
+ elseif n == 2 then
+ -- von Last, First
+ words = lpegmatch(spacesplitter,split[2])
+ surnames = lpegmatch(spacesplitter,split[1])
+ firstnames, vons = { }, { }
+ local i, n = 1, #words
+ while i <= n do
+ local w = words[i]
+ if is_upper(w) then
+ firstnames[#firstnames+1], i = w, i + 1
+ else
+ break
+ end
+ end
+ while i <= n do
+ vons[#vons+1], i = words[i], i + 1
+ end
+ else
+ -- von Last, Jr ,First
+ firstnames = lpegmatch(spacesplitter,split[1])
+ juniors = lpegmatch(spacesplitter,split[2])
+ surnames = lpegmatch(spacesplitter,split[3])
+ if n > 3 then
+ -- error
+ end
+ end
+ if #surnames == 0 then
+ surnames[1] = firstnames[#firstnames]
+ firstnames[#firstnames] = nil
+ end
+ if firstnames then
+ initials = { }
+ for i=1,#firstnames do
+ initials[i] = utfchar(lpegmatch(firstcharacter,firstnames[i]))
+ end
+ end
+ detail = {
+ original = author,
+ firstnames = firstnames,
+ vons = vons,
+ surnames = surnames,
+ initials = initials,
+ juniors = juniors,
+ }
+ cache[author] = detail
+ nofhits = nofhits + 1
+ end
+ authors[i] = detail
+ end
+ return authors
+end
+
+-- local function splitauthors(dataset,tag,field)
+-- local entries = datasets[dataset]
+-- local luadata = entries.luadata
+-- if not luadata then
+-- return { }
+-- end
+-- local entry = luadata[tag]
+-- if not entry then
+-- return { }
+-- end
+-- return splitauthorstring(entry[field])
+-- end
+
+local function the_initials(initials,symbol)
+ local t, symbol = { }, symbol or "."
+ for i=1,#initials do
+ t[i] = initials[i] .. symbol
+ end
+ return t
+end
+
+-- authors
+
+local settings = { }
+
+-- local defaultsettings = {
+-- firstnamesep = " ",
+-- vonsep = " ",
+-- surnamesep = " ",
+-- juniorsep = " ",
+-- surnamejuniorsep = ", ",
+-- juniorjuniorsep = ", ",
+-- surnamefirstnamesep = ", ",
+-- surnameinitialsep = ", ",
+-- namesep = ", ",
+-- lastnamesep = " and ",
+-- finalnamesep = " and ",
+-- etallimit = 1000,
+-- etaldisplay = 1000,
+-- etaltext = "",
+-- }
+
+local defaultsettings = {
+ firstnamesep = [[\btxlistvariantparameter{firstnamesep}]],
+ vonsep = [[\btxlistvariantparameter{vonsep}]],
+ surnamesep = [[\btxlistvariantparameter{surnamesep}]],
+ juniorsep = [[\btxlistvariantparameter{juniorsep}]],
+ surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]],
+ juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]],
+ surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]],
+ surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]],
+ namesep = [[\btxlistvariantparameter{namesep}]],
+ lastnamesep = [[\btxlistvariantparameter{lastnamesep}]],
+ finalnamesep = [[\btxlistvariantparameter{finalnamesep}]],
+ --
+ etaltext = [[\btxlistvariantparameter{etaltext}]],
+ --
+ etallimit = 1000,
+ etaldisplay = 1000,
+}
+
+function authors.setsettings(s)
+end
+
+authors.splitstring = splitauthorstring
+
+-- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr)
+
+function authors.normal(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = concat(firstnames," ")
+ result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr)
+
+function authors.normalshort(author,settings)
+ local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if initials and #initials > 0 then
+ result[#result+1] = concat(initials," ")
+ result[#result+1] = settings.initialsep or defaultsettings.initialsep
+ end
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ return concat(result)
+end
+
+-- vons surnames juniors, firstnames
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco)
+
+function authors.inverted(author,settings)
+ local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if firstnames and #firstnames > 0 then
+ result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep
+ result[#result+1] = concat(firstnames," ")
+ end
+ return concat(result)
+end
+
+-- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T)
+
+function authors.invertedshort(author,settings)
+ local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors
+ local result, settings = { }, settings or defaultsettings
+ if vons and #vons > 0 then
+ result[#result+1] = concat(vons," ")
+ result[#result+1] = settings.vonsep or defaultsettings.vonsep
+ end
+ if surnames then
+ result[#result+1] = concat(surnames," ")
+ end
+ if surnames and #surnames > 0 then
+ result[#result+1] = concat(surnames," ")
+ if juniors and #juniors > 0 then
+ result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep
+ result[#result+1] = concat(juniors," ")
+ end
+ elseif juniors and #juniors > 0 then
+ result[#result+1] = concat(juniors," ")
+ end
+ if initials and #initials > 0 then
+ result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep
+ result[#result+1] = concat(the_initials(initials)," ")
+ end
+ return concat(result)
+end
+
+local lastconcatsize = 1
+
+local function concatnames(t,settings)
+ local namesep = settings.namesep
+ local lastnamesep = settings.lastnamesep
+ local finalnamesep = settings.finalnamesep
+ local lastconcatsize = #t
+ if lastconcatsize > 2 then
+ local s = { }
+ for i=1,lastconcatsize-2 do
+ s[i] = t[i] .. namesep
+ end
+ s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize]
+ return concat(s)
+ elseif lastconcatsize > 1 then
+ return concat(t,lastnamesep)
+ elseif lastconcatsize > 0 then
+ return t[1]
+ else
+ return ""
+ end
+end
+
+function authors.concat(dataset,tag,field,settings)
+ table.setmetatableindex(settings,defaultsettings)
+ local combined = settings.combiner
+ if not combiner or type(combiner) == "string" then
+ combiner = authors[combiner or "normal"] or authors.normal
+ end
+ local split = datasets[dataset].details[tag][field]
+ local etallimit = settings.etallimit or 1000
+ local etaldisplay = settings.etaldisplay or etallimit
+ local max = split and #split or 0
+ if max == 0 then
+ -- error
+ end
+ if max > etallimit and etaldisplay < max then
+ max = etaldisplay
+ end
+ local combined = { }
+ for i=1,max do
+ combined[i] = combiner(split[i],settings)
+ end
+ local result = concatnames(combined,settings)
+ if #combined <= max then
+ return result
+ else
+ return result .. settings.etaltext
+ end
+end
+
+function commands.btxauthor(...)
+ context(authors.concat(...))
+end
+
+function authors.short(author,year)
+ -- todo
+-- local result = { }
+-- if author then
+-- local authors = splitauthors(author)
+-- for a=1,#authors do
+-- local aa = authors[a]
+-- local initials = aa.initials
+-- for i=1,#initials do
+-- result[#result+1] = initials[i]
+-- end
+-- local surnames = aa.surnames
+-- for s=1,#surnames do
+-- result[#result+1] = utfchar(lpegmatch(firstcharacter,surnames[s]))
+-- end
+-- end
+-- end
+-- if year then
+-- result[#result+1] = year
+-- end
+-- return concat(result)
+end
+
+-- We can consider creating a hashtable key -> entry but I wonder if
+-- pays off.
+
+-- local function test(sample)
+-- local authors = splitauthors(sample)
+-- print(table.serialize(authors))
+-- for i=1,#authors do
+-- local author = authors[i]
+-- print(normalauthor (author,settings))
+-- print(normalshortauthor (author,settings))
+-- print(invertedauthor (author,settings))
+-- print(invertedshortauthor(author,settings))
+-- end
+-- print(concatauthors(sample,settings,normalauthor))
+-- print(concatauthors(sample,settings,normalshortauthor))
+-- print(concatauthors(sample,settings,invertedauthor))
+-- print(concatauthors(sample,settings,invertedshortauthor))
+-- end
+
+-- local sample_a = "Hagen, Hans and Hoekwater, Taco Whoever T. Ex. and Henkel Hut, Hartmut Harald von der"
+-- local sample_b = "Hans Hagen and Taco Whoever T. Ex. Hoekwater and Hartmut Harald von der Henkel Hut"
+
+-- test(sample_a)
+-- test(sample_b)
+
+
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
new file mode 100644
index 000000000..b1bf34265
--- /dev/null
+++ b/tex/context/base/publ-dat.lua
@@ -0,0 +1,510 @@
+if not modules then modules = { } end modules ['publ-dat'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: strip the @ in the lpeg instead of on do_definition and do_shortcut
+-- todo: store bibroot and bibrootdt
+
+--[[ldx--
+<p>This is a prelude to integrated bibliography support. This file just loads
+bibtex files and converts them to xml so that the we access the content
+in a convenient way. Actually handling the data takes place elsewhere.</p>
+--ldx]]--
+
+if not characters then
+ dofile(resolvers.findfile("char-def.lua"))
+ dofile(resolvers.findfile("char-ini.lua"))
+ dofile(resolvers.findfile("char-tex.lua"))
+end
+
+local chardata = characters.data
+local lowercase = characters.lower
+
+local lower, gsub, concat = string.lower, string.gsub, table.concat
+local next, type = next, type
+local utfchar = utf.char
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local textoutf = characters and characters.tex.toutf
+local settings_to_hash, settings_to_array = utilities.parsers.settings_to_hash, utilities.parsers.settings_to_array
+local formatters = string.formatters
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local xmlcollected, xmltext, xmlconvert = xml.collected, xml.text, xmlconvert
+local setmetatableindex = table.setmetatableindex
+
+-- todo: more allocate
+
+local P, R, S, V, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
+
+local trace = false trackers.register("publications", function(v) trace = v end)
+local report = logs.reporter("publications")
+
+publications = publications or { }
+local publications = publications
+
+local datasets = publications.datasets or { }
+publications.datasets = datasets
+
+publications.statistics = publications.statistics or { }
+local publicationsstats = publications.statistics
+
+publicationsstats.nofbytes = 0
+publicationsstats.nofdefinitions = 0
+publicationsstats.nofshortcuts = 0
+publicationsstats.nofdatasets = 0
+
+local xmlplaceholder = "<?xml version='1.0' standalone='yes'?>\n<bibtex></bibtex>"
+
+local defaultshortcuts = {
+ jan = "1",
+ feb = "2",
+ mar = "3",
+ apr = "4",
+ may = "5",
+ jun = "6",
+ jul = "7",
+ aug = "8",
+ sep = "9",
+ oct = "10",
+ nov = "11",
+ dec = "12",
+}
+
+function publications.new(name)
+ publicationsstats.nofdatasets = publicationsstats.nofdatasets + 1
+ local dataset = {
+ name = name or "dataset " .. publicationsstats.nofdatasets,
+ nofentries = 0,
+ shortcuts = { },
+ luadata = { },
+ xmldata = xmlconvert(xmlplaceholder),
+ -- details = { },
+ nofbytes = 0,
+ entries = nil, -- empty == all
+ sources = { },
+ loaded = { },
+ fields = { },
+ userdata = { },
+ used = { },
+ commands = { }, -- for statistical purposes
+ status = {
+ resources = false,
+ userdata = false,
+ },
+ }
+ setmetatableindex(dataset,function(t,k)
+ -- will become a plugin
+ if k == "details" and publications.enhance then
+ dataset.details = { }
+ publications.enhance(dataset.name)
+ return dataset.details
+ end
+ end)
+ return dataset
+end
+
+function publications.markasupdated(name)
+ if name == "string" then
+ datasets[name].details = nil
+ else
+ datasets.details = nil
+ end
+end
+
+setmetatableindex(datasets,function(t,k)
+ local v = publications.new(k)
+ datasets[k] = v
+ return v
+end)
+
+-- we apply some normalization
+
+----- command = P("\\") * Cc("btxcmd{") * (R("az","AZ")^1) * Cc("}")
+local command = P("\\") * (Carg(1) * C(R("az","AZ")^1) / function(list,c) list[c] = (list[c] or 0) + 1 return "btxcmd{" .. c .. "}" end)
+local somemath = P("$") * ((1-P("$"))^1) * P("$") -- let's not assume nested math
+local any = P(1)
+local done = P(-1)
+local one_l = P("{") / ""
+local one_r = P("}") / ""
+local two_l = P("{{") / ""
+local two_r = P("}}") / ""
+local special = P("#") / "\\letterhash"
+
+local filter_0 = S('\\{}')
+local filter_1 = (1-filter_0)^0 * filter_0
+local filter_2 = Cs(
+-- {{...}} ... {{...}}
+-- two_l * (command + special + any - two_r - done)^0 * two_r * done +
+-- one_l * (command + special + any - one_r - done)^0 * one_r * done +
+ (somemath + command + special + any )^0
+)
+
+-- Currently we expand shortcuts and for large ones (like the acknowledgements
+-- in tugboat.bib this is not that efficient. However, eventually strings get
+-- hashed again.
+
+local function do_shortcut(tag,key,value,shortcuts)
+ publicationsstats.nofshortcuts = publicationsstats.nofshortcuts + 1
+ tag = lowercase(tag)
+ if tag == "@string" then
+ shortcuts[key] = value
+ end
+end
+
+local function getindex(dataset,luadata,tag)
+ local found = luadata[tag]
+ if found then
+ return found.index or 0
+ else
+ local index = dataset.nofentries + 1
+ dataset.nofentries = index
+ return index
+ end
+end
+
+publications.getindex = getindex
+
+local function do_definition(category,tag,tab,dataset)
+ publicationsstats.nofdefinitions = publicationsstats.nofdefinitions + 1
+ local fields = dataset.fields
+ local luadata = dataset.luadata
+ local found = luadata[tag]
+ local index = getindex(dataset,luadata,tag)
+ local entries = {
+ category = gsub(lower(category),"^@",""),
+ tag = tag,
+ index = index,
+ }
+ for i=1,#tab,2 do
+ local original = tab[i]
+ local normalized = fields[original]
+ if not normalized then
+ normalized = lower(original) -- we assume ascii fields
+ fields[original] = normalized
+ end
+ local value = tab[i+1]
+ value = textoutf(value)
+ if lpegmatch(filter_1,value) then
+ value = lpegmatch(filter_2,value,1,dataset.commands) -- we need to start at 1 for { }
+ end
+ if normalized == "crossref" then
+ local parent = luadata[value]
+ if parent then
+ setmetatableindex(entries,parent)
+ else
+ -- warning
+ end
+ end
+ entries[normalized] = value
+ end
+ luadata[tag] = entries
+end
+
+local function resolve(s,dataset)
+ return dataset.shortcuts[s] or defaultshortcuts[s] or s -- can be number
+end
+
+local percent = P("%")
+local start = P("@")
+local comma = P(",")
+local hash = P("#")
+local escape = P("\\")
+local single = P("'")
+local double = P('"')
+local left = P('{')
+local right = P('}')
+local both = left + right
+local lineending = S("\n\r")
+local space = S(" \t\n\r\f") -- / " "
+local spacing = space^0
+local equal = P("=")
+----- collapsed = (space^1)/ " "
+local collapsed = (lpegpatterns.whitespace^1)/ " "
+
+----- balanced = lpegpatterns.balanced
+local balanced = P {
+ [1] = ((escape * (left+right)) + (collapsed + 1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
+
+local keyword = C((R("az","AZ","09") + S("@_:-"))^1) -- C((1-space)^1)
+local s_quoted = ((escape*single) + collapsed + (1-single))^0
+local d_quoted = ((escape*double) + collapsed + (1-double))^0
+
+local b_value = (left /"") * balanced * (right /"")
+local s_value = (single/"") * (b_value + s_quoted) * (single/"")
+local d_value = (double/"") * (b_value + d_quoted) * (double/"")
+local r_value = keyword * Carg(1) /resolve
+
+local somevalue = s_value + d_value + b_value + r_value
+local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0))
+
+local assignment = spacing * keyword * spacing * equal * spacing * value * spacing
+local shortcut = keyword * spacing * left * spacing * (assignment * comma^0)^0 * spacing * right * Carg(1)
+local definition = keyword * spacing * left * spacing * keyword * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1)
+local comment = keyword * spacing * left * (1-right)^0 * spacing * right
+local forget = percent^1 * (1-lineending)^0
+
+-- todo \%
+
+local bibtotable = (space + forget + shortcut/do_shortcut + definition/do_definition + comment + 1)^0
+
+-- loadbibdata -> dataset.luadata
+-- loadtexdata -> dataset.luadata
+-- loadluadata -> dataset.luadata
+
+-- converttoxml -> dataset.xmldata from dataset.luadata
+
+function publications.loadbibdata(dataset,content,source,kind)
+ statistics.starttiming(publications)
+ publicationsstats.nofbytes = publicationsstats.nofbytes + #content
+ dataset.nofbytes = dataset.nofbytes + #content
+ if source then
+ table.insert(dataset.sources, { filename = source, checksum = md5.HEX(content) })
+ dataset.loaded[source] = kind or true
+ end
+ dataset.newtags = #dataset.luadata > 0 and { } or dataset.newtags
+ publications.markasupdated(dataset)
+ lpegmatch(bibtotable,content or "",1,dataset)
+ statistics.stoptiming(publications)
+end
+
+-- we could use xmlescape again
+
+local cleaner_0 = S('<>&')
+local cleaner_1 = (1-cleaner_0)^0 * cleaner_0
+local cleaner_2 = Cs ( (
+ P("<") / "&lt;" +
+ P(">") / "&gt;" +
+ P("&") / "&amp;" +
+ P(1)
+)^0)
+
+local compact = false -- can be a directive but then we also need to deal with newlines ... not now
+
+function publications.converttoxml(dataset,nice) -- we have fields !
+ local luadata = dataset and dataset.luadata
+ if luadata then
+ statistics.starttiming(publications)
+ statistics.starttiming(xml)
+ --
+ local result, r = { }, 0
+ --
+ r = r + 1 ; result[r] = "<?xml version='1.0' standalone='yes'?>"
+ r = r + 1 ; result[r] = "<bibtex>"
+ --
+ if nice then
+ local f_entry_start = formatters[" <entry tag='%s' category='%s' index='%s'>"]
+ local f_entry_stop = " </entry>"
+ local f_field = formatters[" <field name='%s'>%s</field>"]
+ for tag, entry in sortedhash(luadata) do
+ r = r + 1 ; result[r] = f_entry_start(tag,entry.category,entry.index)
+ for key, value in sortedhash(entry) do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ else
+ local f_entry_start = formatters["<entry tag='%s' category='%s' index='%s'>"]
+ local f_entry_stop = "</entry>"
+ local f_field = formatters["<field name='%s'>%s</field>"]
+ for tag, entry in next, luadata do
+ r = r + 1 ; result[r] = f_entry_start(entry.tag,entry.category,entry.index)
+ for key, value in next, entry do
+ if key ~= "tag" and key ~= "category" and key ~= "index" then
+ if lpegmatch(cleaner_1,value) then
+ value = lpegmatch(cleaner_2,value)
+ end
+ if value ~= "" then
+ r = r + 1 ; result[r] = f_field(key,value)
+ end
+ end
+ end
+ r = r + 1 ; result[r] = f_entry_stop
+ end
+ end
+ --
+ r = r + 1 ; result[r] = "</bibtex>"
+ --
+ result = concat(result,nice and "\n" or nil)
+ --
+ dataset.xmldata = xmlconvert(result, {
+ resolve_entities = true,
+ resolve_predefined_entities = true, -- in case we have escaped entities
+ -- unify_predefined_entities = true, -- &#038; -> &amp;
+ utfize_entities = true,
+ } )
+ --
+ statistics.stoptiming(xml)
+ statistics.stoptiming(publications)
+ if lxml then
+ lxml.register(formatters["btx:%s"](dataset.name),dataset.xmldata)
+ end
+ end
+end
+
+local loaders = publications.loaders or { }
+publications.loaders = loaders
+
+function loaders.bib(dataset,filename,kind)
+ local data = io.loaddata(filename) or ""
+ if data == "" then
+ report("empty file %a, nothing loaded",filename)
+ elseif trace then
+ report("loading file",filename)
+ end
+ publications.loadbibdata(dataset,data,filename,kind)
+end
+
+function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ if type(dataset) == "table" then
+ dataset = datasets[dataset]
+ end
+ local data = type(filename) == "table" and filename or table.load(filename)
+ if data then
+ local luadata = dataset.luadata
+ for tag, entry in next, data do
+ if type(entry) == "table" then
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry -- no cleaning yet
+ end
+ end
+ end
+end
+
+function loaders.xml(dataset,filename)
+ local luadata = dataset.luadata
+ local root = xml.load(filename)
+ for entry in xmlcollected(root,"/bibtex/entry") do
+ local attributes = entry.at
+ local tag = attributes.tag
+ local entry = {
+ category = attributes.category
+ }
+ for field in xmlcollected(entry,"/field") do
+ -- entry[field.at.name] = xmltext(field)
+ entry[field.at.name] = field.dt[1] -- no cleaning yet
+ end
+ -- local edt = entry.dt
+ -- for i=1,#edt do
+ -- local e = edt[i]
+ -- local a = e.at
+ -- if a and a.name then
+ -- t[a.name] = e.dt[1] -- no cleaning yet
+ -- end
+ -- end
+ entry.index = getindex(dataset,luadata,tag)
+ luadata[tag] = entry
+ end
+end
+
+setmetatableindex(loaders,function(t,filetype)
+ local v = function(dataset,filename)
+ report("no loader for file %a with filetype %a",filename,filetype)
+ end
+ t[k] = v
+ return v
+end)
+
+function publications.load(dataset,filename,kind)
+ statistics.starttiming(publications)
+ local files = settings_to_array(filename)
+ for i=1,#files do
+ local filetype, filename = string.splitup(files[i],"::")
+ if not filename then
+ filename = filetype
+ filetype = file.suffix(filename)
+ end
+ local fullname = resolvers.findfile(filename,"bib")
+ if dataset.loaded[fullname] then -- will become better
+ -- skip
+ elseif fullname == "" then
+ report("no file %a",fullname)
+ else
+ loaders[filetype](dataset,fullname)
+ end
+ if kind then
+ dataset.loaded[fullname] = kind
+ end
+ end
+ statistics.stoptiming(publications)
+end
+
+local checked = function(s,d) d[s] = (d[s] or 0) + 1 end
+local checktex = ( (1-P("\\"))^1 + P("\\") * ((C(R("az","AZ")^1) * Carg(1))/checked))^0
+
+function publications.analyze(dataset)
+ local data = dataset.luadata
+ local categories = { }
+ local fields = { }
+ local commands = { }
+ for k, v in next, data do
+ categories[v.category] = (categories[v.category] or 0) + 1
+ for k, v in next, v do
+ fields[k] = (fields[k] or 0) + 1
+ lpegmatch(checktex,v,1,commands)
+ end
+ end
+ dataset.analysis = {
+ categories = categories,
+ fields = fields,
+ commands = commands,
+ }
+end
+
+-- str = [[
+-- @COMMENT { CRAP }
+-- @STRING{ hans = "h a n s" }
+-- @STRING{ taco = "t a c o" }
+-- @SOMETHING{ key1, abc = "t a c o" , def = "h a n s" }
+-- @SOMETHING{ key2, abc = hans # taco }
+-- @SOMETHING{ key3, abc = "hans" # taco }
+-- @SOMETHING{ key4, abc = hans # "taco" }
+-- @SOMETHING{ key5, abc = hans # taco # "hans" # "taco"}
+-- @SOMETHING{ key6, abc = {oeps {oeps} oeps} }
+-- ]]
+
+-- local dataset = publications.new()
+-- publications.tolua(dataset,str)
+-- publications.toxml(dataset)
+-- publications.toxml(dataset)
+-- print(dataset.xmldata)
+-- inspect(dataset.luadata)
+-- inspect(dataset.xmldata)
+-- inspect(dataset.shortcuts)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"IEEEabrv.bib")
+-- publications.load(dataset,"IEEEfull.bib")
+-- publications.load(dataset,"IEEEexample.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- local dataset = publications.new()
+-- publications.load(dataset,"gut.bib")
+-- publications.load(dataset,"komoedie.bib")
+-- publications.load(dataset,"texbook1.bib")
+-- publications.load(dataset,"texbook2.bib")
+-- publications.load(dataset,"texbook3.bib")
+-- publications.load(dataset,"texgraph.bib")
+-- publications.load(dataset,"texjourn.bib")
+-- publications.load(dataset,"texnique.bib")
+-- publications.load(dataset,"tugboat.bib")
+-- publications.toxml(dataset)
+-- print(dataset.nofbytes,statistics.elapsedtime(publications))
+
+-- print(table.serialize(dataset.luadata))
+-- print(table.serialize(dataset.xmldata))
+-- print(table.serialize(dataset.shortcuts))
+-- print(xml.serialize(dataset.xmldata))
diff --git a/tex/context/base/publ-imp-apa.mkiv b/tex/context/base/publ-imp-apa.mkiv
new file mode 100644
index 000000000..3df33ce63
--- /dev/null
+++ b/tex/context/base/publ-imp-apa.mkiv
@@ -0,0 +1,547 @@
+%D \module
+%D [ file=publ-imp-apa,
+%D version=2013.12.12, % based on bibl-apa.tex and later xml variant
+%D title=APA bibliography style,
+%D subtitle=Publications,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is therefore copyrighted
+%D by \PRAGMA. See mreadme.pdf for details.
+
+% common
+
+\loadbtxdefinitionfile[def]
+
+\startsetups btx:apa:common:wherefrom
+ \btxdoifelse {address} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {country} {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}\btxcomma\btxflush{country}
+ }
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{address}\btxcomma\btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{address}
+ }
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {country} {
+ \getvariable{btx:temp}{left}
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \btxflush{country}: \btxflush{\getvariable{btx:temp}{label}}
+ } {
+ \btxflush{country}
+ }
+ \getvariable{btx:temp}{right}
+ } {
+ \btxdoifelse {\getvariable{btx:temp}{label}} {
+ \getvariable{btx:temp}{left}
+ \btxflush{\getvariable{btx:temp}{label}}
+ \getvariable{btx:temp}{right}
+ } {
+ \getvariable{btx:temp}{otherwise}
+ }
+ }
+ }
+\stopsetups
+
+% \setvariables[btx:temp][label=,left=,right=,otherwise=]
+
+\startsetups btx:apa:common:publisher
+ \begingroup
+ \setvariables[btx:temp][label=publisher]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization
+ \begingroup
+ \setvariables[btx:temp][label=organization]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school
+ \begingroup
+ \setvariables[btx:temp][label=school]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution
+ \begingroup
+ \setvariables[btx:temp][label=institution]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:school:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=school,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:institution:subsentence
+ \begingroup
+ \setvariables[btx:temp][label=institution,left=\btxcomma,right=\btxperiod,otherwise=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:publisher:sentence
+ \begingroup
+ \setvariables[btx:temp][label=publisher,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:organization:sentence
+ \begingroup
+ \setvariables[btx:temp][label=organization,left=\btxspace,right=\btxperiod]\relax
+ \btxsetup{btx:apa:common:wherefrom}
+ \endgroup
+\stopsetups
+
+\startsetups btx:apa:common:title-and-series
+ \btxdoif {title} {
+ \btxflush{title}
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:title-it-and-series
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxlparent\btxflush{series}\btxrparent
+ }
+ \btxperiod
+ }
+\stopsetups
+
+\disablemode[btx:apa:edited-book]
+
+\startsetups btx:apa:common:author-and-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-or-key-and-year
+ \btxdoifelse {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:author-editors-crossref-year
+ \btxdoif {author} {
+ \btxflushauthor{author}
+ } {
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editors}
+ } {
+ % weird period
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket\btxperiod
+ }
+ }
+ }
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:editor-or-key-and-year
+ \btxdoifelse {editor} {
+ \enablemode[btx:apa:edited-book]
+ \btxflushauthor{editor}
+ \btxcomma\btxsingularplural{editor}{editors}
+ } {
+ \btxdoif {key} {
+ \btxlbracket\btxsetup{btx:format:key}\btxrbracket
+ }
+ }
+ \btxspace
+ \btxdoif {year} {
+ \btxlparent\btxflush{year}\btxrparent
+ }
+ \btxperiod
+\stopsetups
+
+\startsetups btx:apa:common:note
+ \btxdoif {note} {
+ \btxspace\btxflush{note}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:comment
+ \btxdoif {comment} {
+ \btxspace\btxflush{comment}\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:p
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace p\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pp
+ \btxdoif {pages} {
+ \btxspace\btxflush{pages}\btxspace pp\btxperiod
+ }
+\stopsetups
+
+\startsetups btx:apa:common:pages:pages
+ \btxdoif {pages} {
+ \btxcomma pages~\btxflush{pages}
+ }
+\stopsetups
+
+\startsetups btx:apa:common:edition:sentense
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition\btxperiod
+ }
+\stopsetups
+
+% check when the next is used (no period)
+
+% \startsetups btx:apa:common:edition
+% \btxdoif {edition} {
+% \btxspace\btxflush{edition}\btxspace edition
+% }
+% \stopsetups
+
+% we can share more, todo
+
+% specific
+
+\startsetups btx:apa:article
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxdoif {title} {
+ \btxflush{title}\btxperiod
+ }
+ \btxdoifelse {journal} {
+ \bgroup\it\btxflush{journal}\/\egroup
+ } {
+ \btxdoif {crossref} {
+ In\btxspace\btxflush{crossref}
+ }
+ }
+ \btxdoifelse {volume} {
+ \btxcomma\bgroup\it\btxflush{volume}\/\egroup
+ \btxdoif {issue} {
+ \btxlparent\btxflush{issue}\btxrparent
+ }
+ \btxdoif {pages} {
+ \btxcomma\btxflush{pages}
+ }
+ \btxperiod
+ } {
+ \btxsetup{btx:apa:common:pages:pp}
+ }
+ \btxsetup{btx:apa:common:note}
+ \btxsetup{btx:apa:common:comment}
+\stopsetups
+
+\startsetups btx:apa:book
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace Number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}
+ }
+ \btxperiod
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ }
+ }
+ }
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}% twice?
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inbook
+ \btxsetup{btx:apa:common:author-editors-crossref-year}
+ \btxdoifelse {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ } {
+ \doifmodeelse {btx:apa:edited-book} {
+ \btxdoifelse {volume} {
+ \btxspace number\nonbreakablespace\btxflush{volume}
+ \btxdoifelse {series} {
+ \btxspace in\nonbreakablespace\btxflush{series}\btxperiod
+ } {
+ \btxdoifelse {crossref} {
+ \btxspace in\btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ } {
+ \btxperiod
+ }
+ }
+ } {
+ \btxdoif {series} {
+ \btxspace\btxflush{series}\btxperiod
+ }
+ }
+ } {
+ \btxdoifelse {crossref} {
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {volume} {
+ Volume\nonbreakablespace\btxflush{volume}\btxspace of\nonbreakablespace
+ }
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ } {
+ \btxdoif {volume} {
+ \btxcomma volume\nonbreakablespace\btxflush{volume}
+ \btxdoif {series} {
+ \btxspace of\nonbreakablespace\bgroup\it\btxflush{series}\/\egroup
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ }
+ }
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publisher}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:booklet
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:publication:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:manual
+ \btxsetup{btx:apa:common:author-or-key-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxsetup{btx:apa:common:edition:sentence}
+ \btxsetup{btx:apa:common:organization:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:incollection
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxflushauthor{editor}\btxcomma
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in
+ }
+ \btxspace\btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxdoif {edition} {
+ \btxspace\btxflush{edition}\btxspace edition
+ }
+ \btxsetup{btx:apa:common:publisher:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}
+ }
+ \btxspace
+ \btxsetup{btx:apa:common:pages:pages}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:inproceedings
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxdoif {arttitle} {
+ \btxflush{arttitle}\btxperiod
+ }
+ In\btxspace
+ \btxdoifelse {title} {
+ \btxdoif {editors} {
+ \btxflush{btx:apa:format:editors}
+ \btxcomma\btxsingularplural{editor}{editors}\btxcomma
+ }
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {series} {
+ \btxdoif {volume} {
+ \btxcomma number~\btxflush{volume} in
+ }
+ \btxspace
+ \btxflush{series}
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ } {
+ \btxdoif {crossref} {
+ \btxlbracket\btxsetup{btx:format:crossref}\btxrbracket
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:proceedings
+ \btxsetup{btx:apa:common:editor-or-key-and-year}
+ \btxdoif {title} {
+ \bgroup\it\btxflush{title}\/\egroup
+ \btxdoif {volume} {
+ \btxcomma number\btxspace\btxflush{volume}\btxspace in\btxspace
+ }
+ \btxdoif {chapter} {
+ \btxcomma\btxflush{chapter}\btxspace
+ }
+ \btxsetup{btx:apa:common:pages:pages}
+ \btxperiod
+ \btxsetup{btx:apa:common:organization:sentence}
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:common:thesis
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-it-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ } {
+ \getvariable{btx:temp}{label}
+ }
+ \btxsetup{btx:apa:common:school:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:mastersthesis
+ \setvariables[btx:temp][label=Master's thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:phdthesis
+ \setvariables[btx:temp][label=PhD thesis]
+ \btxsetup{btx:apa:common:thesis}
+\stopsetups
+
+\startsetups btx:apa:techreport
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxdoifelse {type} {
+ \btxflush{type}
+ \btxdoif {volume} {
+ \btxspace\btxflush{volume}
+ }
+ } {
+ \btxspace Technical Report
+ }
+ \btxsetup{btx:apa:common:institution:subsentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:misc
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:publisher:sentence}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\startsetups btx:apa:unpublished
+ \btxsetup{btx:apa:common:author-and-year}
+ \btxsetup{btx:apa:common:title-and-series}
+ \btxsetup{btx:apa:common:pages:p}
+ \btxdoif {type} {
+ \btxlparent\btxflush{type}\btxrparent
+ }
+ \btxsetup{btx:apa:common:note}
+\stopsetups
+
+\endinput
diff --git a/tex/context/base/publ-imp-cite.mkiv b/tex/context/base/publ-imp-cite.mkiv
new file mode 100644
index 000000000..d64c2132c
--- /dev/null
+++ b/tex/context/base/publ-imp-cite.mkiv
@@ -0,0 +1,74 @@
+%D \module
+%D [ file=publ-imp-cite,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startsetups btx:cite:author
+ \btxcitevariant{author}
+\stopsetups
+
+\startsetups btx:cite:authoryear
+ \btxcitevariant{authoryear}
+\stopsetups
+
+\startsetups btx:cite:authoryears
+ \btxcitevariant{authoryears}
+\stopsetups
+
+% \startsetups btx:cite:authornum
+% \btxcitevariant{author}
+% \btxcitevariantparameter\c!inbetween
+% \btxcitevariant{num}
+% \stopsetups
+
+\startsetups btx:cite:authornum
+ \btxcitevariant{authornum}
+\stopsetups
+
+\startsetups btx:cite:year
+ \btxcitevariant{year}
+\stopsetups
+
+\startsetups btx:cite:short
+ \btxcitevariant{short}
+\stopsetups
+
+\startsetups btx:cite:serial
+ \btxcitevariant{serial}
+\stopsetups
+
+\startsetups btx:cite:key
+ \currentbtxtag % \btxcitevariant{tag}
+\stopsetups
+
+\startsetups btx:cite:doi
+ todo: \btxcitevariant{doi}
+\stopsetups
+
+\startsetups btx:cite:url
+ todo: \btxcitevariant{url}
+\stopsetups
+
+\startsetups btx:cite:type
+ \btxcitevariant{category}
+\stopsetups
+
+\startsetups btx:cite:page
+ \btxcitevariant{page}
+\stopsetups
+
+\startsetups btx:cite:none
+ % dummy
+\stopsetups
+
+\startsetups btx:cite:num
+ \btxcitevariant{num}
+\stopsetups
diff --git a/tex/context/base/publ-imp-commands.mkiv b/tex/context/base/publ-imp-commands.mkiv
new file mode 100644
index 000000000..14e2dbae1
--- /dev/null
+++ b/tex/context/base/publ-imp-commands.mkiv
@@ -0,0 +1,15 @@
+\unprotect
+
+% for tugboat
+
+\definebtxcommand\hbox {\hbox}
+\definebtxcommand\vbox {\vbox}
+\definebtxcommand\llap {\llap}
+\definebtxcommand\rlap {\rlap}
+\definebtxcommand\url #1{\hyphenatedurl{#1}}
+\definebtxcommand\acro #1{\dontleavehmode{\smallcaps#1}}
+
+\let\<<
+\let\<>
+
+\protect \endinput
diff --git a/tex/context/base/publ-imp-definitions.mkiv b/tex/context/base/publ-imp-definitions.mkiv
new file mode 100644
index 000000000..2cf2e3e8e
--- /dev/null
+++ b/tex/context/base/publ-imp-definitions.mkiv
@@ -0,0 +1,68 @@
+%D \module
+%D [ file=publ-imp-def,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Definitions,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Here we collect some helper setups. We assume that checking of a field
+%D happens in the calling setup, if only because that is the place where
+%D fences are also dealt with.
+
+\unprotect
+
+\startxmlsetups btx:format:crossref
+ \cite[\btxfield{crossref}]
+\stopxmlsetups
+
+\startxmlsetups btx:format:key
+ \btxfield{short}
+\stopxmlsetups
+
+\startxmlsetups btx:format:doi
+ \edef\currentbtxfielddoi{\btxfield{doi}}
+ \ifx\currentbtxfielddoi\empty
+ {\tttf no-doi}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfielddoi}}[url(http://dx.doi.org/\currentbtxfielddoi)]
+ \else
+ \hyphenatedurl{\currentbtxfielddoi}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:url
+ \edef\currentbtxfieldurl{\btxfield{url}}
+ \ifx\currentbtxfieldurl\empty
+ {\tttf no-url}
+ \else\ifconditional\btxinteractive
+ \goto{\hyphenatedurl{\currentbtxfieldurl}}[url(\currentbtxfieldurl)]
+ \else
+ \hyphenatedurl{\currentbtxfieldurl}
+ \fi\fi
+\stopxmlsetups
+
+\startxmlsetups btx:format:month
+ \edef\currentbtxfieldmonth{\btxfield{month}}
+ \ifx\currentbtxfieldmonth\empty
+ {\tttf no-month}
+ \else
+ \edef\p_monthconversion{\btxlistvariantparameter\c!monthconversion}
+ \ifx\p_monthconversion\empty % month month:mnem
+ \currentbtxfieldmonth
+ \else
+ \doifnumberelse \currentbtxfieldmonth {
+ \convertnumber\p_monthconversion\currentbtxfieldmonth
+ } {
+ \currentbtxfieldmonth
+ }
+ \fi
+ \fi
+\stopxmlsetups
+
+\protect
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
new file mode 100644
index 000000000..a791f4726
--- /dev/null
+++ b/tex/context/base/publ-ini.lua
@@ -0,0 +1,1222 @@
+if not modules then modules = { } end modules ['publ-ini'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- use: for rest in gmatch(reference,"[^, ]+") do
+
+local next, rawget, type = next, rawget, type
+local match, gmatch, format, gsub = string.match, string.gmatch, string.format, string.gsub
+local concat, sort = table.concat, table.sort
+local utfsub = utf.sub
+local formatters = string.formatters
+local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
+local sortedkeys, sortedhash = table.sortedkeys, table.sortedhash
+local lpegmatch = lpeg.match
+
+local report = logs.reporter("publications")
+local trace = false trackers.register("publications", function(v) trace = v end)
+
+local context = context
+
+local datasets = publications.datasets
+
+local variables = interfaces.variables
+
+local v_local = variables["local"]
+local v_global = variables["global"]
+
+local v_force = variables.force
+local v_standard = variables.standard
+local v_start = variables.start
+local v_none = variables.none
+local v_left = variables.left
+local v_right = variables.right
+local v_middle = variables.middle
+local v_inbetween = variables.inbetween
+
+local v_short = variables.short
+local v_cite = variables.cite
+local v_default = variables.default
+local v_reference = variables.default
+local v_dataset = variables.dataset
+
+local numbertochar = converters.characters
+
+local logsnewline = logs.newline
+local logspushtarget = logs.pushtarget
+local logspoptarget = logs.poptarget
+local csname_id = token.csname_id
+
+statistics.register("publications load time", function()
+ local publicationsstats = publications.statistics
+ local nofbytes = publicationsstats.nofbytes
+ if nofbytes > 0 then
+ return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts",
+ statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts)
+ else
+ return nil
+ end
+end)
+
+luatex.registerstopactions(function()
+ logspushtarget("logfile")
+ logsnewline()
+ report("start used btx commands")
+ logsnewline()
+ local undefined = csname_id("undefined*crap")
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ local c = csname_id(command)
+ if c and c ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"known")
+ else
+ local u = csname_id(utf.upper(command))
+ if u and u ~= undefined then
+ report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
+ else
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ end
+ logsnewline()
+ report("stop used btxcommands")
+ logsnewline()
+ logspoptarget()
+end)
+
+-- multipass, we need to sort because hashing is random per run and not per
+-- version (not the best changed feature of lua)
+
+local collected = allocate()
+local tobesaved = allocate()
+
+-- we use a a dedicated (and efficient as it know what it deals with) serializer,
+-- also because we need to ignore the 'details' field
+
+local function serialize(t)
+ local f_key_table = formatters[" [%q] = {"]
+ local f_key_string = formatters[" %s = %q,"]
+ local r = { "return {" }
+ local m = 1
+ for tag, entry in sortedhash(t) do
+ m = m + 1
+ r[m] = f_key_table(tag)
+ local s = sortedkeys(entry)
+ for i=1,#s do
+ local k = s[i]
+ -- if k ~= "details" then
+ m = m + 1
+ r[m] = f_key_string(k,entry[k])
+ -- end
+ end
+ m = m + 1
+ r[m] = " },"
+ end
+ r[m] = "}"
+ return concat(r,"\n")
+end
+
+local function finalizer()
+ local prefix = tex.jobname -- or environment.jobname
+ local setnames = sortedkeys(datasets)
+ for i=1,#setnames do
+ local name = setnames[i]
+ local dataset = datasets[name]
+ local userdata = dataset.userdata
+ local checksum = nil
+ local username = file.addsuffix(file.robustname(formatters["%s-btx-%s"](prefix,name)),"lua")
+ if userdata and next(userdata) then
+ if job.passes.first then
+ local newdata = serialize(userdata)
+ checksum = md5.HEX(newdata)
+ io.savedata(username,newdata)
+ end
+ else
+ os.remove(username)
+ username = nil
+ end
+ local loaded = dataset.loaded
+ local sources = dataset.sources
+ local used = { }
+ for i=1,#sources do
+ local source = sources[i]
+ if loaded[source.filename] ~= "previous" then -- or loaded[source.filename] == "current"
+ used[#used+1] = source
+ end
+ end
+ tobesaved[name] = {
+ usersource = {
+ filename = username,
+ checksum = checksum,
+ },
+ datasources = used,
+ }
+ end
+end
+
+local function initializer()
+ statistics.starttiming(publications)
+collected = publications.collected or collected -- for the moment as we load runtime
+ for name, state in next, collected do
+ local dataset = datasets[name]
+ local datasources = state.datasources
+ local usersource = state.usersource
+ if datasources then
+ for i=1,#datasources do
+ local filename = datasources[i].filename
+ publications.load(dataset,filename,"previous")
+ end
+ end
+ if usersource then
+ dataset.userdata = table.load(usersource.filename) or { }
+ end
+ end
+ statistics.stoptiming(publications)
+ function initializer() end -- will go, for now, runtime loaded
+end
+
+job.register('publications.collected',tobesaved,initializer,finalizer)
+
+if not publications.authors then
+ initializer() -- for now, runtime loaded
+end
+
+-- basic access
+
+local function getfield(dataset,tag,name)
+ local d = datasets[dataset].luadata[tag]
+ return d and d[name]
+end
+
+local function getdetail(dataset,tag,name)
+ local d = datasets[dataset].details[tag]
+ return d and d[name]
+end
+
+-- basic loading
+
+function commands.usebtxdataset(name,filename)
+ publications.load(datasets[name],filename,"current")
+end
+
+function commands.convertbtxdatasettoxml(name,nice)
+ publications.converttoxml(datasets[name],nice)
+end
+
+-- enhancing
+
+local splitauthorstring = publications.authors.splitstring
+
+local pagessplitter = lpeg.splitat(lpeg.P("-")^1)
+
+-- maybe not redo when already done
+
+function publications.enhance(dataset) -- for the moment split runs (maybe publications.enhancers)
+ statistics.starttiming(publications)
+ if type(dataset) == "string" then
+ dataset = datasets[dataset]
+ end
+ local luadata = dataset.luadata
+ local details = dataset.details
+ -- author, editor
+ for tag, entry in next, luadata do
+ local author = entry.author
+ local editor = entry.editor
+ details[tag] = {
+ author = author and splitauthorstring(author),
+ editor = editor and splitauthorstring(editor),
+ }
+ end
+ -- short
+ local shorts = { }
+ for tag, entry in next, luadata do
+ local author = details[tag].author
+ if author then
+ -- number depends on sort order
+ local t = { }
+ if #author == 0 then
+ -- what
+ else
+ local n = #author == 1 and 3 or 1
+ for i=1,#author do
+ local surnames = author[i].surnames
+ if not surnames or #surnames == 0 then
+ -- error
+ else
+ t[#t+1] = utfsub(surnames[1],1,n)
+ end
+ end
+ end
+ local year = tonumber(entry.year) or 0
+ local short = formatters["%t%02i"](t,math.mod(year,100))
+ local s = shorts[short]
+ if not s then
+ shorts[short] = tag
+ elseif type(s) == "string" then
+ shorts[short] = { s, tag }
+ else
+ s[#s+1] = tag
+ end
+ else
+ --
+ end
+ end
+ for short, tags in next, shorts do
+ if type(tags) == "table" then
+ table.sort(tags)
+ for i=1,#tags do
+ details[tags[i]].short = short .. numbertochar(i)
+ end
+ else
+ details[tags].short = short
+ end
+ end
+ -- pages
+ for tag, entry in next, luadata do
+ local pages = entry.pages
+ if pages then
+ local first, last = lpegmatch(pagessplitter,pages)
+ details[tag].pages = first and last and { first, last } or pages
+ end
+ end
+ statistics.stoptiming(publications)
+end
+
+function commands.addbtxentry(name,settings,content)
+ local dataset = datasets[name]
+ if dataset then
+ publications.addtexentry(dataset,settings,content)
+ end
+end
+
+function commands.setbtxdataset(name)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ context(name)
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.setbtxentry(name,tag)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ if dataset.luadata[tag] then
+ context(tag)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- rendering of fields
+
+function commands.btxflush(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local value = fields[field]
+ if type(value) == "string" then
+ context(value)
+ return
+ end
+ local details = dataset.details[tag]
+ if details then
+ local value = details[field]
+ if type(value) == "string" then
+ context(value)
+ return
+ end
+ end
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxdetail(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local details = dataset.details[tag]
+ if details then
+ local value = details[field]
+ if type(value) == "string" then
+ context(value)
+ else
+ report("unknown detail %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+function commands.btxfield(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local fields = dataset.luadata[tag]
+ if fields then
+ local value = fields[field]
+ if type(value) == "string" then
+ context(value)
+ else
+ report("unknown field %a of tag %a in dataset %a",field,tag,name)
+ end
+ else
+ report("unknown tag %a in dataset %a",tag,name)
+ end
+ else
+ report("unknown dataset %a",name)
+ end
+end
+
+-- testing: to be speed up with testcase
+
+function commands.btxdoifelse(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.firstoftwoarguments()
+ return
+ end
+ end
+ context.secondoftwoarguments()
+end
+
+function commands.btxdoif(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.firstofoneargument()
+ return
+ end
+ end
+ context.gobbleoneargument()
+end
+
+function commands.btxdoifnot(name,tag,field)
+ local dataset = rawget(datasets,name)
+ if dataset then
+ local data = dataset.luadata[tag]
+ local value = data and data[field]
+ if value and value ~= "" then
+ context.gobbleoneargument()
+ return
+ end
+ end
+ context.firstofoneargument()
+end
+
+-- -- alternative approach: keep data at the tex end
+
+function publications.listconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ context.btxlistparameter("sep")
+ context(t[i])
+ end
+ context.btxlistparameter("finalsep")
+ else
+ context.btxlistparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.citeconcat(t)
+ local n = #t
+ if n > 0 then
+ context(t[1])
+ if n > 1 then
+ if n > 2 then
+ for i=2,n-1 do
+ context.btxcitevariantparameter("sep")
+ context(t[i])
+ end
+ context.btxcitevariantparameter("finalsep")
+ else
+ context.btxcitevariantparameter("lastsep")
+ end
+ context(t[n])
+ end
+ end
+end
+
+function publications.singularorplural(singular,plural)
+ if lastconcatsize and lastconcatsize > 1 then
+ context(plural)
+ else
+ context(singular)
+ end
+end
+
+function commands.makebibauthorlist(settings)
+ if not settings then
+ return
+ end
+ local dataset = datasets[settings.dataset]
+ if not dataset or dataset == "" then
+ return
+ end
+ local tag = settings.tag
+ if not tag or tag == "" then
+ return
+ end
+ local asked = settings_to_array(tag)
+ if #asked == 0 then
+ return
+ end
+ local compress = settings.compress
+ local interaction = settings.interactionn == v_start
+ local limit = tonumber(settings.limit)
+ local found = { }
+ local hash = { }
+ local total = 0
+ local luadata = dataset.luadata
+ for i=1,#asked do
+ local tag = asked[i]
+ local data = luadata[tag]
+ if data then
+ local author = data.a or "Xxxxxxxxxx"
+ local year = data.y or "0000"
+ if not compress or not hash[author] then
+ local t = {
+ author = author,
+ name = name, -- first
+ year = { [year] = name },
+ }
+ total = total + 1
+ found[total] = t
+ hash[author] = t
+ else
+ hash[author].year[year] = name
+ end
+ end
+ end
+ for i=1,total do
+ local data = found[i]
+ local author = data.author
+ local year = table.keys(data.year)
+ table.sort(year)
+ if interaction then
+ for i=1,#year do
+ year[i] = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.year[year[i]],year[i])
+ end
+ end
+ context.setvalue("currentbibyear",concat(year,","))
+ if author == "" then
+ context.setvalue("currentbibauthor","")
+ else -- needs checking
+ local authors = settings_to_array(author) -- {{}{}},{{}{}}
+ local nofauthors = #authors
+ if nofauthors == 1 then
+ if interaction then
+ author = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,author)
+ end
+ context.setvalue("currentbibauthor",author)
+ else
+ limit = limit or nofauthors
+ if interaction then
+ for i=1,#authors do
+ authors[i] = string.formatters["\\bibmaybeinteractive{%s}{%s}"](data.name,authors[i])
+ end
+ end
+ if limit == 1 then
+ context.setvalue("currentbibauthor",authors[1] .. "\\bibalternative{otherstext}")
+ elseif limit == 2 and nofauthors == 2 then
+ context.setvalue("currentbibauthor",concat(authors,"\\bibalternative{andtext}"))
+ else
+ for i=1,limit-1 do
+ authors[i] = authors[i] .. "\\bibalternative{namesep}"
+ end
+ if limit < nofauthors then
+ authors[limit+1] = "\\bibalternative{otherstext}"
+ context.setvalue("currentbibauthor",concat(authors,"",1,limit+1))
+ else
+ authors[limit-1] = authors[limit-1] .. "\\bibalternative{andtext}"
+ context.setvalue("currentbibauthor",concat(authors))
+ end
+ end
+ end
+ end
+ -- the following use: currentbibauthor and currentbibyear
+ if i == 1 then
+ context.ixfirstcommand()
+ elseif i == total then
+ context.ixlastcommand()
+ else
+ context.ixsecondcommand()
+ end
+ end
+end
+
+local patterns = { "publ-imp-%s.mkiv", "publ-imp-%s.tex" }
+
+local function failure(name)
+ report("unknown library %a",name)
+end
+
+local function action(name,foundname)
+ context.input(foundname)
+end
+
+function commands.loadbtxdefinitionfile(name) -- a more specific name
+ commands.uselibrary {
+ name = gsub(name,"^publ%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false,
+ }
+end
+
+-- lists:
+
+publications.lists = publications.lists or { }
+local lists = publications.lists
+
+local context = context
+local structures = structures
+
+local references = structures.references
+local sections = structures.sections
+
+-- per rendering
+
+local renderings = { } --- per dataset
+
+table.setmetatableindex(renderings,function(t,k)
+ local v = {
+ list = { },
+ done = { },
+ alldone = { },
+ used = { },
+ registered = { },
+ ordered = { },
+ shorts = { },
+ method = v_none,
+ currentindex = 0,
+ }
+ t[k] = v
+ return v
+end)
+
+-- why shorts vs tags: only for sorting
+
+function lists.register(dataset,tag,short)
+ local r = renderings[dataset]
+ if not short or short == "" then
+ short = tag
+ end
+ if trace then
+ report("registering publication entry %a with shortcut %a",tag,short)
+ end
+ local top = #r.registered + 1
+ -- do we really need these
+ r.registered[top] = tag
+ r.ordered [tag] = top
+ r.shorts [tag] = short
+end
+
+function lists.nofregistered(dataset)
+ return #renderings[dataset].registered
+end
+
+function lists.setmethod(dataset,method)
+ local r = renderings[dataset]
+ r.method = method or v_none
+ r.list = { }
+ r.done = { }
+end
+
+function lists.collectentries(specification)
+ local dataset = specification.btxdataset
+ if not dataset then
+ return
+ end
+ local rendering = renderings[dataset]
+ local method = rendering.method
+ if method == v_none then
+ return
+ end
+-- method=v_local --------------------
+ local result = structures.lists.filter(specification)
+ lists.result = result
+ local section = sections.currentid()
+ local list = rendering.list
+ local done = rendering.done
+ local alldone = rendering.alldone
+ if method == v_local then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and done[tag] ~= section then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ elseif method == v_global then
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag and not alldone[tag] and done[tag] ~= section then
+ done[tag] = section
+ alldone[tag] = true
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ elseif method == v_force then
+ -- only for checking, can have duplicates, todo: collapse page numbers, although
+ -- we then also needs deferred writes
+ for listindex=1,#result do
+ local r = result[listindex]
+ local u = r.userdata
+ if u and u.btxset == dataset then
+ local tag = u.btxref
+ if tag then
+ list[#list+1] = { tag, listindex }
+ end
+ end
+ end
+ end
+end
+
+lists.sorters = {
+ [v_short] = function(dataset,rendering,list)
+ local shorts = rendering.shorts
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = shorts[aa], shorts[bb]
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_reference] = function(dataset,rendering,list)
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_dataset] = function(dataset,rendering,list)
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = list[aa].index or 0, list[bb].index or 0
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+ [v_default] = function(dataset,rendering,list) -- not really needed
+ local ordered = rendering.ordered
+ return function(a,b)
+ local aa, bb = a and a[1], b and b[1]
+ if aa and bb then
+ aa, bb = ordered[aa], ordered[bb]
+ return aa and bb and aa < bb
+ end
+ return false
+ end
+ end,
+}
+
+function lists.flushentries(dataset,sortvariant)
+ local rendering = renderings[dataset]
+ local list = rendering.list
+ local compare = lists.sorters[sortvariant] or lists.sorters[v_default]
+ compare = type(compare) == "function" and compare(dataset,rendering,list)
+ if compare then
+ sort(list,compare)
+ end
+ for i=1,#list do
+ context.setvalue("currentbtxindex",i)
+ context.btxhandlelistentry(list[i][1]) -- we can pass i here too ... more efficient to avoid the setvalue
+ end
+end
+
+function lists.fetchentries(dataset)
+ local list = renderings[dataset].list
+ for i=1,#list do
+ context.setvalue("currentbtxindex",i)
+ context.btxchecklistentry(list[i][1])
+ end
+end
+
+function lists.filterall(dataset)
+ local r = renderings[dataset]
+ local list = r.list
+ local registered = r.registered
+ for i=1,#registered do
+ list[i] = { registered[i], i }
+ end
+end
+
+function lists.registerplaced(dataset,tag)
+ renderings[dataset].used[tag] = true
+end
+
+function lists.doifalreadyplaced(dataset,tag)
+ commands.doifelse(renderings[dataset].used[tag])
+end
+
+-- we ask for <n>:tag but when we can't find it we go back
+-- to look for previous definitions, and when not found again
+-- we look forward
+
+local function compare(a,b)
+ local aa, bb = a and a[3], b and b[3]
+ return aa and bb and aa < bb
+end
+
+-- maybe hash subsets
+-- how efficient is this? old leftovers?
+
+-- rendering ?
+
+local f_reference = formatters["r:%s:%s:%s"] -- dataset, instance, tag
+local f_destination = formatters["d:%s:%s:%s"] -- dataset, instance, tag
+
+function lists.resolve(dataset,reference) -- maybe already feed it split
+ -- needs checking (the prefix in relation to components)
+ local subsets = nil
+ local block = tex.count.btxblock
+ local collected = references.collected
+ local prefix = nil -- todo: dataset ?
+ if prefix and prefix ~= "" then
+ subsets = { collected[prefix] or collected[""] }
+ else
+ local components = references.productdata.components
+ local subset = collected[""]
+ if subset then
+ subsets = { subset }
+ else
+ subsets = { }
+ end
+ for i=1,#components do
+ local subset = collected[components[i]]
+ if subset then
+ subsets[#subsets+1] = subset
+ end
+ end
+ end
+ if #subsets > 0 then
+ local result, nofresult, done = { }, 0, { }
+ for i=1,#subsets do
+ local subset = subsets[i]
+ for rest in gmatch(reference,"[^, ]+") do
+ local blk, tag, found = block, nil, nil
+ if block then
+-- tag = blk .. ":" .. rest
+ tag = dataset .. ":" .. blk .. ":" .. rest
+ found = subset[tag]
+ if not found then
+ for i=block-1,1,-1 do
+ tag = i .. ":" .. rest
+ found = subset[tag]
+ if found then
+ blk = i
+ break
+ end
+ end
+ end
+ end
+ if not found then
+ blk = "*"
+ tag = dataset .. ":" .. blk .. ":" .. rest
+ found = subset[tag]
+ end
+ if found then
+ local current = tonumber(found.entries and found.entries.text) -- tonumber needed
+ if current and not done[current] then
+ nofresult = nofresult + 1
+ result[nofresult] = { blk, rest, current }
+ done[current] = true
+ end
+ end
+ end
+ end
+ local first, last, firsti, lasti, firstr, lastr
+ local collected, nofcollected = { }, 0
+ for i=1,nofresult do
+ local r = result[i]
+ local current = r[3]
+ if not first then
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ elseif current == last + 1 then
+ last, lasti, lastr = current, i, r
+ else
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r
+ end
+ end
+ if first and last then
+ if last > first + 1 then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = { firstr, lastr }
+ else
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = firstr
+ if last > first then
+ nofcollected = nofcollected + 1
+ collected[nofcollected] = lastr
+ end
+ end
+ end
+ if nofcollected > 0 then
+ for i=1,nofcollected do
+ local c = collected[i]
+ if i == nofcollected then
+ context.btxlistvariantparameter("lastpubsep")
+ elseif i > 1 then
+ context.btxlistvariantparameter("pubsep")
+ end
+ if #c == 3 then -- a range (3 is first or last)
+ context.btxdirectlink(f_reference(dataset,c[1],c[2]),c[3])
+ else
+ local f, l = c[2], c[2]
+ context.btxdirectlink(f_reference(dataset,f[1],f[2]),f[3])
+ context.endash() -- to do
+ context.btxdirectlink(f_reference(dataset,l[4],l[5]),l[6])
+ end
+ end
+ else
+ context("[btx error 1]")
+ end
+ else
+ context("[btx error 2]")
+ end
+end
+
+local done = { }
+
+function commands.btxreference(dataset,block,tag,data)
+ local ref = f_reference(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+ context.dodirectfullreference(ref,data)
+ end
+end
+
+local done = { }
+
+function commands.btxdestination(dataset,block,tag,data)
+ local ref = f_destination(dataset,block,tag)
+ if not done[ref] then
+ done[ref] = true
+ context.dodirectfullreference(ref,data)
+ end
+end
+
+commands.btxsetlistmethod = lists.setmethod
+commands.btxresolvelistreference = lists.resolve
+commands.btxregisterlistentry = lists.registerplaced
+commands.btxaddtolist = lists.addentry
+commands.btxcollectlistentries = lists.collectentries
+commands.btxfetchlistentries = lists.fetchentries
+commands.btxflushlistentries = lists.flushentries
+commands.btxdoifelselistentryplaced = lists.doifalreadyplaced
+
+local citevariants = { }
+publications.citevariants = citevariants
+
+-- helper
+
+local compare = sorters.comparers.basic -- (a,b)
+local strip = sorters.strip
+local splitter = sorters.splitters.utf
+
+local function sortedtags(dataset,list,sorttype)
+ local luadata = datasets[dataset].luadata
+ local valid = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local key = entry[sorttype]
+ if key then
+ valid[#valid+1] = {
+ tag = tag,
+ split = splitter(strip(key))
+ }
+ else
+ end
+ end
+ end
+ if #valid == 0 or #valid ~= #list then
+ return list
+ else
+ table.sort(valid,sorters.comparers.basic)
+ for i=1,#valid do
+ valid[i] = valid[i].tag
+ end
+ return valid
+ end
+end
+
+-- todo: standard : current
+
+local splitter = lpeg.splitat("::")
+
+function commands.btxhandlecite(dataset,tag,mark,variant,sorttype,setup) -- variant for tracing
+ local prefix, rest = lpegmatch(splitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ context.setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ if #tags > 0 then
+ if sorttype and sorttype ~= "" then
+ tags = sortedtags(dataset,tags,sorttype)
+ end
+ context.btxcitevariantparameter(v_left)
+ for i=1,#tags do
+ local tag = tags[i]
+ context.setvalue("currentbtxtag",tag)
+ if i > 1 then
+ context.btxcitevariantparameter(v_middle)
+ end
+ if mark then
+ context.dobtxmarkcitation(dataset,tag)
+ end
+ context.formatted.directsetup(setup) -- cite can become alternative
+ end
+ context.btxcitevariantparameter(v_right)
+ else
+ -- error
+ end
+end
+
+function commands.btxhandlenocite(dataset,tag)
+ local prefix, rest = lpegmatch(splitter,tag)
+ if rest then
+ dataset = prefix
+ else
+ rest = tag
+ end
+ context.setvalue("currentbtxdataset",dataset)
+ local tags = settings_to_array(rest)
+ for i=1,#tags do
+ context.dobtxmarkcitation(dataset,tags[i])
+ end
+end
+
+function commands.btxcitevariant(dataset,block,tags,variant)
+ local action = citevariants[variant] or citevariants.default
+ if action then
+ action(dataset,tags,variant)
+ end
+end
+
+function citevariants.default(dataset,tags,variant)
+ local content = getfield(dataset,tags,variant)
+ if content then
+ context(content)
+ end
+end
+
+-- todo : sort
+-- todo : choose between publications or commands namespace
+-- todo : use details.author
+-- todo : sort details.author
+
+local function collectauthoryears(dataset,tags)
+ local luadata = datasets[dataset].luadata
+ local list = settings_to_array(tags)
+ local found = { }
+ local result = { }
+ local order = { }
+ for i=1,#list do
+ local tag = list[i]
+ local entry = luadata[tag]
+ if entry then
+ local year = entry.year
+ local author = entry.author
+ if author and year then
+ local a = found[author]
+ if not a then
+ a = { }
+ found[author] = a
+ order[#order+1] = author
+ end
+ local y = a[year]
+ if not y then
+ y = { }
+ a[year] = y
+ end
+ y[#y+1] = tag
+ end
+ end
+ end
+ -- found = { author = { year_1 = { e1, e2, e3 } } }
+ for i=1,#order do
+ local author = order[i]
+ local years = found[author]
+ local yrs = { }
+ for year, entries in next, years do
+ if subyears then
+ -- -- add letters to all entries of an author and if so shouldn't
+ -- -- we tag all years of an author as soon as we do this?
+ -- if #entries > 1 then
+ -- for i=1,#years do
+ -- local entry = years[i]
+ -- -- years[i] = year .. string.char(i + string.byte("0") - 1)
+ -- end
+ -- end
+ else
+ yrs[#yrs+1] = year
+ end
+ end
+ result[i] = { author = author, years = yrs }
+ end
+ return result, order
+end
+
+-- (name, name and name) .. how names? how sorted?
+-- todo: we loop at the tex end .. why not here
+-- \cite[{hh,afo},kvm]
+
+-- maybe we will move this tex anyway
+
+function citevariants.author(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+end
+
+local function authorandyear(dataset,tags,formatter)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ for i=1,#result do
+ local r = result[i]
+ order[i] = formatter(r.author,r.years) -- reuse order
+ end
+ publications.citeconcat(order)
+end
+
+function citevariants.authoryear(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s (%, t)"])
+end
+
+function citevariants.authoryears(dataset,tags)
+ authorandyear(dataset,tags,formatters["%s, %, t"])
+end
+
+function citevariants.authornum(dataset,tags)
+ local result, order = collectauthoryears(dataset,tags,method,what) -- we can have a collectauthors
+ publications.citeconcat(order)
+ context.btxcitevariantparameter(v_inbetween)
+ lists.resolve(dataset,tags) -- left/right ?
+end
+
+function citevariants.short(dataset,tags)
+ local short = getdetail(dataset,tags,"short")
+ if short then
+ context(short)
+ end
+end
+
+function citevariants.page(dataset,tags)
+ local pages = getdetail(dataset,tags,"pages")
+ if not pages then
+ -- nothing
+ elseif type(pages) == "table" then
+ context(pages[1])
+ context.btxcitevariantparameter(v_inbetween)
+ context(pages[2])
+ else
+ context(pages)
+ end
+end
+
+function citevariants.num(dataset,tags)
+ lists.resolve(dataset,tags)
+end
+
+function citevariants.serial(dataset,tags) -- the traditional fieldname is "serial" and not "index"
+ local index = getfield(dataset,tags,"index")
+ if index then
+ context(index)
+ end
+end
+
+-- List variants
+
+local listvariants = { }
+publications.listvariants = listvariants
+
+-- function commands.btxhandlelist(dataset,block,tag,variant,setup)
+-- if sorttype and sorttype ~= "" then
+-- tags = sortedtags(dataset,tags,sorttype)
+-- end
+-- context.setvalue("currentbtxtag",tag)
+-- context.btxlistvariantparameter(v_left)
+-- context.formatted.directsetup(setup)
+-- context.btxlistvariantparameter(v_right)
+-- end
+
+function commands.btxlistvariant(dataset,block,tags,variant,listindex)
+ local action = listvariants[variant] or listvariants.default
+ if action then
+ action(dataset,block,tags,variant,tonumber(listindex) or 0)
+ end
+end
+
+function listvariants.default(dataset,block,tags,variant)
+ context("?")
+end
+
+function listvariants.num(dataset,block,tags,variant,listindex)
+ context.btxdirectlink(f_destination(dataset,block,tags),listindex) -- not okay yet
+end
+
+function listvariants.short(dataset,block,tags,variant,listindex)
+ local short = getdetail(dataset,tags,variant,variant)
+ if short then
+ context(short)
+ end
+end
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
new file mode 100644
index 000000000..dafb0dca3
--- /dev/null
+++ b/tex/context/base/publ-ini.mkiv
@@ -0,0 +1,849 @@
+%D \module
+%D [ file=publ-ini,
+%D version=2013.05.12,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Initialization,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% todo: we cannot use 'default' as this wipes metadata names (maybe no longer do that)
+% todo: \v!cite => \s!cite
+% todo: interface with (ml)bibtex (export -> call -> import)
+
+% \definecolor[btx:field] [darkred]
+% \definecolor[btx:crossref][darkblue]
+% \definecolor[btx:key] [darkgreen]
+% \definecolor[btx:todo] [darkyellow]
+
+%D We operate on several axis:
+%D
+%D \startitemize[packed]
+%D \startitem we can have several databases (or combinations) \stopitem
+%D \startitem we can add entries to them if needed (coded in tex) \stopitem
+%D \startitem we can have several lists each using one of the databases \stopitem
+%D \startitem we can render each list or citation independently \stopitem
+%D \stopitemize
+%D
+%D We assume that the rendering of a list entry is consistent in a document,
+%D although one can redefine properties if needed. Adding more granularity would
+%D complicate the user interface beyond comprehension.
+
+\writestatus{loading}{ConTeXt Publication Support / Initialization}
+
+\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-aut}{1.001}
+\registerctxluafile{publ-usr}{1.001}
+\registerctxluafile{publ-ini}{1.001}
+\registerctxluafile{publ-oth}{1.001} % this could become an option
+
+\unprotect
+
+\installcorenamespace {btxdataset}
+\installcorenamespace {btxlistvariant}
+\installcorenamespace {btxcitevariant}
+\installcorenamespace {btxrendering}
+\installcorenamespace {btxcommand}
+\installcorenamespace {btxnumbering}
+
+\installcommandhandler \??btxdataset {btxdataset} \??btxdataset
+\installcommandhandler \??btxlistvariant {btxlistvariant} \??btxlistvariant
+\installcommandhandler \??btxcitevariant {btxcitevariant} \??btxcitevariant
+\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
+
+\unexpanded\def\usebtxdataset
+ {\dodoubleargument\publ_use_dataset}
+
+\def\publ_use_dataset[#1][#2]%
+ {\ifsecondargument
+ \ctxcommand{usebtxdataset("#1","#2")}%
+ \else
+ \ctxcommand{usebtxdataset("\v!standard","#1")}%
+ \fi}
+
+\definebtxdataset
+ [\v!standard]
+
+% \usebtxdataset
+% [standard]
+% [mybibs.bib]
+
+\unexpanded\def\startpublication
+ {\dodoubleempty\publ_set_publication}
+
+\let\stoppublication\relax
+
+\def\publ_set_publication[#1][#2]%
+ {\begingroup
+ \catcode\commentasciicode\othercatcode
+ \ifsecondargument
+ \expandafter\publ_set_publication_indeed
+ \else\iffirstargument
+ \doubleexpandafter\publ_set_publication_checked
+ \else
+ \doubleexpandafter\publ_set_publication_default
+ \fi\fi{#1}{#2}}
+
+\def\publ_set_publication_default#1#2%
+ {\publ_set_publication_indeed\v!standard{#1}}
+
+\def\publ_set_publication_checked#1#2%
+ {\doifassignmentelse{#1}
+ {\publ_set_publication_indeed\v!standard{#1}}
+ {\publ_set_publication_indeed{#1}{}}}
+
+\def\publ_set_publication_indeed#1#2#3\stoppublication
+ {\ctxcommand{addbtxentry("#1",\!!bs#2\!!es,\!!bs\detokenize{#3}\!!es)}%
+ \endgroup
+ \ignorespaces}
+
+% commands
+
+\unexpanded\def\btxcommand#1%
+ {\ifcsname\??btxcommand#1\endcsname
+ \expandafter\publ_command_yes
+ \else
+ \expandafter\publ_command_nop
+ \fi{#1}}
+
+\let\btxcmd\btxcommand
+
+\def\publ_command_yes#1%
+ {\csname\??btxcommand#1\endcsname}
+
+\def\publ_command_nop#1%
+ {\ifcsname#1\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant #1}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname#1\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname#1\endcsname
+ \else\ifcsname\utfupper{#1}\endcsname
+ \writestatus\m!publications{unknown command: #1, using built-in context variant \utfupper{#1}}%
+ %\setuxvalue{\??btxcommand#1}{\expandafter\noexpand\csname\utfupper{#1}\endcsname}%
+ \global\expandafter\let\csname\??btxcommand#1\expandafter\endcsname\csname\utfupper{#1}\endcsname
+ \else
+ \writestatus\m!publications{unknown command: #1}%
+ \setugvalue{\??btxcommand#1}{\underbar{\tttf#1}}%
+ \fi\fi
+ \publ_command_yes{#1}}
+
+\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
+ {\setuvalue{\??btxcommand\strippedcsname#1}}%
+
+% access
+
+\let\currentbtxtag \empty
+\let\currentbtxdataset\v!standard
+
+\unexpanded\def\setbtxdataset[#1]%
+ {\edef\currentbtxdataset{\ctxcommand{setbtxdataset("#1")}}}
+
+\unexpanded\def\setbtxentry[#1]%
+ {\edef\currentbtxtag{\ctxcommand{setbtxentry("\currentbtxdataset","#1")}}}
+
+% \let\btxsetdataset\setbtxdataset
+% \let\btxsetentry \setbtxentry
+
+\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}}
+%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}}
+\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}}
+\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}}
+
+\let\btxsetup \directsetup
+
+%D How complex will we go? Can we assume that e.g. an apa style will not be mixed
+%D with another one? I think this assumption is okay. For manuals we might want to
+%D mix but we can work around it.
+
+%D Rendering.
+
+\unexpanded\def\btxspace {\removeunwantedspaces\space}
+\unexpanded\def\btxperiod {\removeunwantedspaces.\space}
+\unexpanded\def\btxcomma {\removeunwantedspaces,\space}
+\unexpanded\def\btxlparent {\removeunwantedspaces\space(}
+\unexpanded\def\btxrparent {\removeunwantedspaces)\space}
+\unexpanded\def\btxlbracket{\removeunwantedspaces\space[}
+\unexpanded\def\btxrbracket{\removeunwantedspaces]\space}
+
+%D Rendering lists and citations.
+
+\newconditional\c_btx_trace
+
+\installtextracker
+ {btxrendering}
+ {\settrue \c_btx_trace}
+ {\setfalse\c_btx_trace}
+
+\unexpanded\def\startbtxrendering
+ {\begingroup
+ \dosingleempty\btx_start_rendering}
+
+\def\btx_start_rendering[#1]%
+ {\edef\currentbtxrendering{#1}}
+
+\unexpanded\def\stopbtxrendering
+ {\endgroup}
+
+\unexpanded\def\btxtodo#1%
+ {[#1]}
+
+%D Specific rendering definitions (like apa):
+
+\unexpanded\def\loadbtxdefinitionfile[#1]%
+ {\ctxcommand{loadbtxdefinitionfile("#1")}}
+
+%D Lists:
+
+\newdimen\d_publ_number_width
+\newdimen\d_publ_number_distance
+
+\ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone
+\ifdefined\btxcounter \else \newcount\btxcounter \fi
+
+\newtoks \everysetupbtxlistplacement % name will change
+\newtoks \everysetupbtxciteplacement % name will change
+
+\def\btxlistnumberbox
+ {\hbox \ifzeropt\d_publ_number_width\else to \d_publ_number_width\fi}
+
+% \def\publ_list_processor % bibref -> btx (old method, keep as reference)
+% {\ctxcommand{btxaddtolist("\currentbtxrendering",\currentlistindex,"btxref")}}
+
+\definelist
+ [btx]
+ [\c!before=,
+ %\c!inbetween=,
+ \c!after=]
+
+\appendtoks
+ \definelist
+ [btx:\currentbtxrendering]%
+ [btx]
+ \setuplist
+ [btx:\currentbtxrendering]%
+ [\c!state=\v!start]%
+ % \installstructurelistprocessor
+ % {\currentbtxrendering:userdata}%
+ % {\publ_list_processor}%
+\to \everydefinebtxrendering
+
+\unexpanded\def\btx_entry_inject
+ {\begingroup
+ \edef\currentbtxcategory{\btxfield{category}}%
+ \ignorespaces
+ \directsetup{btx:\currentbtxalternative:\currentbtxcategory}%
+ \removeunwantedspaces
+ \endgroup}
+
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+
+\let\completelistofpublications\completebtxrendering
+\let\placelistofpublications \placebtxrendering
+
+\def\publ_place_list_check_criterium
+ {\edef\p_criterium{\btxrenderingparameter\c!criterium} % \v!cite will become \s!cite
+ \ifx\p_criterium\empty
+ \letlistparameter\c!criterium\v!previous
+ \else\ifx\p_criterium\v_cite
+ \letlistparameter\c!criterium\v!here
+ \else
+ \letlistparameter\c!criterium\v_cite
+ \fi\fi}
+
+\def\publ_place_list_complete[#1][#2]% title might become obsolete, just headtext
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \publ_place_list_check_criterium
+ \edef\currentbtxrenderingtitle{\btxrenderingparameter\c!title}%
+ \ifx\currentbtxrenderingtitle\empty
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\headtext{\currentbtxrendering}}]}%
+ \else
+ \normalexpanded{\startnamedsection[\v!chapter][\c!reference=\currentbtxrendering,\c!title={\currentbtxrenderingtitle}]}%
+ \fi
+ \publ_place_list_indeed
+ \stopnamedsection
+ \endgroup}
+
+\def\publ_place_list_standard[#1][#2]%
+ {\begingroup
+ \edef\currentbtxrendering{#1}%
+ \setupcurrentbtxrendering[#2]%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \publ_place_list_check_criterium
+ \publ_place_list_indeed
+ \endgroup}
+
+\newconditional\c_publ_place_all
+\newconditional\c_publ_place_register % to be interfaced
+\newconditional\c_publ_place_check % to be interfaced
+
+\appendtoks
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \settrue\c_publ_place_all
+ \else
+ \setfalse\c_publ_place_all
+ \fi
+\to \everysetupbtxlistplacement
+
+\def\publ_place_list_indeed
+ {\startbtxrendering[\currentbtxrendering]%
+ \directsetup{\btxrenderingparameter\c!setups}%
+ % \determinelistcharacteristics[\currentbtxrendering]%
+ \edef\currentbtxalternative{\btxrenderingparameter\c!alternative}%
+ \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
+ \edef\currentlist{btx:\currentbtxrendering}%
+ \the\everysetupbtxlistplacement
+ \forgetall
+ \ctxcommand{btxsetlistmethod("\currentbtxdataset","\btxrenderingparameter\c!method")}%
+ \startpacked[\v!blank]%
+ % here we just collect items
+ % \strc_lists_analyze
+ % {btx}%
+ % {\currentbtxcriterium}%
+ % {\namedlistparameter\currentbtxrendering\c!number}%
+ \ctxcommand{btxcollectlistentries {
+ names = "btx",
+ criterium = "\currentbtxcriterium",
+ number = "\namedlistparameter\currentbtxrendering\c!number",
+ btxdataset = "\currentbtxdataset",
+ }}%
+ % next we analyze the width
+ \ifx\btx_reference_inject_indeed\relax
+ \d_publ_number_width \zeropoint
+ \d_publ_number_distance\zeropoint
+ \else
+ \edef\p_width{\btxrenderingparameter\c!width}%
+ \ifx\p_width\v!auto
+ \scratchcounter\btxcounter
+ \setbox\scratchbox\vbox{\ctxcommand{btxfetchlistentries("\currentbtxdataset")}}%
+ \d_publ_number_width\wd\scratchbox
+ \global\btxcounter\scratchcounter
+ \else
+ \d_publ_number_width\p_width
+ \fi
+ \d_publ_number_distance\btxrenderingparameter\c!distance
+ \fi
+ \letlistparameter\c!width \d_publ_number_width
+ \letlistparameter\c!distance\d_publ_number_distance
+ % this actually typesets them
+ \letlistparameter\c!interaction\v!none
+ \ctxcommand{btxflushlistentries("\currentbtxdataset","\btxrenderingparameter\c!sorttype")}%
+ \stoppacked
+ \stopbtxrendering
+ \global\advance\btxblock\plusone}
+
+\def\currentbtxblock{\number\btxblock}
+
+\def\publ_place_list_entry_checked
+ {\ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_place_list_entry}
+
+\def\publ_place_list_entry_register
+ {\ctxcommand{btxregisterlistentry("\currentbtxdataset","\currentbtxtag")}}
+
+\unexpanded\def\btxhandlelistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifconditional\c_publ_place_all
+ \publ_place_list_entry
+ \else\ifconditional\c_publ_place_check
+ \publ_place_list_entry_checked
+ \else
+ \publ_place_list_entry
+ \fi\fi
+ \endgroup}
+
+\unexpanded\def\publ_place_list_entry
+ {%\dontleavehmode
+ %\begstrut
+ \global\advance\btxcounter\plusone
+ \ifconditional\c_publ_place_register
+ \publ_place_list_entry_register
+ \fi
+ \edef\currentlist {btx:\currentbtxrendering}%
+ \let\currentlistentrynumber \btx_reference_inject
+ \let\currentlistentrytitle \btx_entry_inject
+ \let\currentlistentrypagenumber\empty
+ \strc_lists_apply_renderingsetup
+ }%\endstrut}
+
+\unexpanded\def\btxchecklistentry#1% called at the lua end
+ {\begingroup
+ \edef\currentbtxtag{#1}%
+ \ifx\currentbtxcriterium\v!all % move this check to lua ... easier to test there anyway
+ \publ_check_list_entry
+ \else
+ \ctxcommand{btxdoifelselistentryplaced("\currentbtxdataset","\currentbtxtag")}\donothing\publ_check_list_entry
+ \fi
+ \endgroup}
+
+\unexpanded\def\publ_check_list_entry
+ {\global\advance\btxcounter\plusone
+ % todo, switch to font
+ \hbox{\btx_reference_inject}%
+ \par}
+
+\unexpanded\def\btx_reference_inject % we can use a faster \reference
+ {\btxlistnumberbox\bgroup
+% \btxlistvariantparameter\c!left
+% {\tttf d>\currentbtxdataset:\currentbtxblock:\currentbtxtag}%
+ \ctxcommand{btxdestination("\currentbtxdataset","\currentbtxblock","\currentbtxtag","\number\btxcounter")}%
+ \btx_reference_inject_indeed
+% \btxlistvariantparameter\c!right
+ \egroup}
+
+\setuvalue{\??btxnumbering\v!short }{\btxlistvariant{short}} % these will be setups
+\setuvalue{\??btxnumbering\v!bib }{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\s!unknown}{\btxlistvariant{num}} % these will be setups
+\setuvalue{\??btxnumbering\v!yes }{\btxlistvariant{num}} % these will be setups
+
+\appendtoks
+ \edef\p_btx_numbering{\btxrenderingparameter\c!numbering}%
+ \letlistparameter\c!numbercommand\firstofoneargument % for the moment, no doubling needed
+ \ifx\p_btx_numbering\v!no
+ \letlistparameter\c!textcommand\outdented % needed? we can use titlealign
+ \letlistparameter\c!symbol \v!none
+ \letlistparameter\c!aligntitle \v!yes
+ \let\btx_reference_inject_indeed\relax
+ \else
+ \ifcsname\??btxnumbering\p_btx_numbering\endcsname \else
+ \let\p_btx_numbering\s!unknown
+ \fi
+ \letlistparameter\c!headnumber\v!always
+ \expandafter\let\expandafter\btx_reference_inject_indeed\csname\??btxnumbering\p_btx_numbering\endcsname
+ \fi
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ \edef\currentbtxcriterium{\btxrenderingparameter\c!criterium}%
+\to \everysetupbtxlistplacement
+
+\unexpanded\def\btxflushauthor
+ {\doifnextoptionalelse\btx_flush_author_yes\btx_flush_author_nop}
+
+\def\btx_flush_author_yes[#1]{\btx_flush_author{#1}}
+\def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}}
+
+\unexpanded\def\btx_flush_author#1#2%
+ {\edef\currentbtxfield{#2}%
+ \let\currentbtxlistvariant\currentbtxfield
+ \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{
+ combiner = "#1",
+ etallimit = \number\btxlistvariantparameter\c!etallimit,
+ etaldisplay = \number\btxlistvariantparameter\c!etaldisplay,
+ })}}
+
+\unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1
+\unexpanded\def\btxflushauthornormalshort {\btx_flush_author{normalshort}} % #1
+\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
+\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
+
+% \btxflushauthor{author}
+% \btxflushauthor{artauthor}
+% \btxflushauthor{editor}
+%
+% \btxflushauthor[normal]{author}
+% \btxflushauthor[normalshort]{author}
+% \btxflushauthor[inverted]{author}
+% \btxflushauthor[invertedshort]{author}
+
+% Interaction
+
+\newconditional\btxinteractive
+
+\unexpanded\def\btxdoifelseinteraction
+ {\iflocation
+ \edef\p_interaction{\btxcitevariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \doubleexpandafter\secondoftwoarguments
+ \else
+ \doubleexpandafter\firstoftwoarguments
+ \fi
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\appendtoks
+ \iflocation
+ \edef\p_interaction{\btxlistvariantparameter\c!interaction}%
+ \ifx\p_interaction\v!stop
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \else
+ \let\doifelsebtxinteractionelse\firstoftwoarguments
+ \settrue\btxinteractive
+ \fi
+ \else
+ \let\doifelsebtxinteractionelse\secondoftwoarguments
+ \setfalse\btxinteractive
+ \fi
+\to \everysetupbtxlistplacement
+
+% bib -> btx
+
+\unexpanded\def\btxgotolink#1[#2]{\doifreferencefoundelse{\bibrefprefix#2}{\goto{#1}[\bibrefprefix#2]}{#1}}
+\unexpanded\def\btxatlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\at [\bibrefprefix#1]}{#1}}
+\unexpanded\def\btxinlink [#1]{\doifreferencefoundelse{\bibrefprefix#1}{\expanded{\goto{\currentreferencetext}}[\bibrefprefix#1]}{#1}}
+
+\unexpanded\def\btxdirectlink#1#2{\goto{#2 {\tttf[#1]}}[#1]}
+\unexpanded\def\btxdirectlink#1#2{\goto{#2}[#1]}
+
+\let\gotobiblink\btxgotolink
+\let\atbiblink \btxatlink
+\let\inbiblink \btxinlink
+
+\unexpanded\def\btxnumberedreference[#1]% \bibtexnumref (replaced by \cite[num])
+ {\dontleavehmode
+ \begingroup
+ \btxcitevariantparameter\v!left
+ \penalty\plustenthousand % todo
+ \ctxcommand{btxresolvelistreference("\currentbtxdataset","#1")}% todo: split dataset from #1, so another call
+ \btxcitevariantparameter\v!right
+ \endgroup}
+
+% \def\btxnumberedplaceholder[#1]% \nobibtexnumref
+% {[#1]}
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxlistplacement
+
+\appendtoks
+ % for old times sake, for a while at least
+ \let\maybeyear\gobbleoneargument
+ \let\noopsort \gobbleoneargument
+\to \everysetupbtxciteplacement
+
+\appendtoks
+ \doifnot{\btxlistvariantparameter\c!option}\v!continue
+ {\global\btxcounter\zerocount}%
+\to \everysetupbtxlistplacement
+
+%D When a publication is cited, we need to signal that somehow. This is done with the
+%D following (not user) command. We could tag without injecting a node but this way
+%D we also store the location, which makes it possible to ask local lists.
+
+\newconditional\c_publ_cite_write
+
+% for reference, but split now done at the lua end
+%
+% \def\publ_cite_write#1% not used
+% {\splitstring#1\at::\to\askedbtxrendering\and\askedbtxtag
+% \ifx\askedbtxtag\empty
+% \let\currentbtxtag \askedbtxrendering
+% \else
+% \let\currentbtxtag \askedbtxtag
+% \let\currentbtxrendering\askedbtxrendering
+% \fi
+% \iftrialtypesetting \else
+% \processcommacommand[\currentbtxtag]{\publ_cite_indeed\currentbtxrendering}%
+% \fi}
+
+\def\publ_cite_indeed#1#2%
+ {\expanded{\writedatatolist[btx][btxset=#1,btxref=#2]}}
+
+\def\dobtxmarkcitation#1#2% called from lua end
+ {\iftrialtypesetting \else
+ \writedatatolist[btx][btxset=#1,btxref=#2]% \c!location=\v!here
+ \fi}
+
+%D \macros{cite,nocite,citation,nocitation,usecitation}
+%D
+%D The inline \type {\cite} command creates a (often) short reference to a publication
+%D and for historic reasons uses a strict test for brackets. This means, at least
+%D in the default case that spaces are ignored in the argument scanner. The \type
+%D {\citation} commands is more liberal but also gobbles following spaces. Both
+%D commands insert a reference as well as a visual clue.
+%D
+%D The \type {no} commands all do the same (they are synonyms): they make sure that
+%D a reference is injected but show nothing. However, they do create a node so best
+%D attach them to some text in order to avoid spacing interferences. A slightly
+%D less efficient alternative is \type {\cite[none][tag]}.
+
+% [tags]
+% [settings|variant][tags]
+% [base::tags]
+% [settings|variant][base::tags]
+
+\unexpanded\def\btxcite
+ {\dontleavehmode
+ \begingroup
+ \strictdoifnextoptionalelse\publ_cite_tags_options\publ_cite_tags_indeed}
+
+\unexpanded\def\publ_cite_tags_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{\btxcitevariantparameter\c!alternative}%
+ \edef\currentbtxcitetag{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\let\publ_citation_tags_indeed\publ_cite_tags_indeed
+
+\unexpanded\def\publ_cite_tags_options[#1]%
+ {\strictdoifnextoptionalelse{\publ_cite_tags_options_indeed{#1}}{\publ_cite_tags_indeed{#1}}}
+
+\unexpanded\def\publ_cite_tags_options_indeed#1[#2]%
+ {\edef\currentbtxcitetag{#2}%
+ \doifassignmentelse{#1}
+ {\publ_cite_tags_settings_indeed{#1}}
+ {\publ_cite_tags_variants_indeed{#1}}}
+
+\def\publ_cite_tags_settings_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ %\letinteractionparameter\c!color\empty
+ \getdummyparameters[\c!alternative=,\c!extras=,#1]%
+ \edef\p_alternative{\dummyparameter\c!alternative}%
+ \ifx\p_alternative\empty \else
+ \let\currentbtxcitevariant\p_alternative
+ \fi
+ \setupcurrentbtxcitevariantparameters[#1]%
+ \edef\p_extras{\dummyparameter\c!extras}%
+ \ifx\p_extras\empty \else
+ \edef\p_right{\btxcitevariantparameter\c!right}%
+ \ifx\p_right\empty \else
+ \setexpandedbtxcitevariantparameter\p_right{\p_extras\p_right}%
+ \fi
+ \fi
+ \publ_cite_variant
+ \endgroup}
+
+\def\publ_cite_tags_variants_indeed#1%
+ {\letinteractionparameter\c!style\empty
+ \edef\currentbtxcitevariant{#1}%
+ \publ_cite_variant
+ \endgroup}
+
+\newconditional\btxcitecompress
+
+\def\publ_cite_variant
+ {\edef\p_compress{\btxcitevariantparameter\c!compress}%
+ % \ifx\p_compress\v!no
+ % \setfalse\btxcitecompress
+ % \else
+ % \settrue\btxcitecompress
+ % \fi
+ \begingroup
+ \settrue\c_publ_cite_write
+ \publ_cite_handle_variant_indeed[\currentbtxcitetag]}
+
+\unexpanded\def\publ_cite_handle_variant#1%
+ {\begingroup
+ \the\everysetupbtxciteplacement
+ \edef\currentbtxcitevariant{#1}%
+ \dosingleargument\publ_cite_handle_variant_indeed}
+
+\def\publ_cite_handle_variant_indeed[#1]%
+ {\usebtxcitevariantstyleandcolor\c!style\c!color
+ \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant
+ \ctxcommand{btxhandlecite(%
+ "\currentbtxdataset",%
+ "#1",%
+ \iftrialtypesetting false\else true\fi,%
+ "\currentbtxcitevariant",%
+ "\btxcitevariantparameter\c!sorttype",%
+ "\btxcitevariantparameter\c!setups"%
+ )}%
+ \endgroup}
+
+\unexpanded\def\btxcitation
+ {\dontleavehmode
+ \begingroup
+ \dodoubleempty\publ_citation}
+
+\def\publ_citation[#1][#2]% could be made more efficient but not now
+ {\ifsecondargument
+ \publ_cite_tags_options_indeed{#1}[#2]%
+ \else
+ \publ_cite_tags_indeed{#1}%
+ \fi}
+
+\unexpanded\def\btxnocite
+ {\dosingleempty\publ_cite_no}
+
+\unexpanded\def\publ_cite_no[#1]%
+ {\iftrialtypesetting \else
+ \ctxcommand{btxhandlenocite("\currentbtxdataset","#1")}%
+ \fi}
+
+%D Compatibility:
+
+\let\cite \btxcite
+\let\citation \btxcitation
+\let\nocite \btxnocite
+\let\nocitation \btxnocite
+\let\usepublication\btxnocite
+
+%D Cite: helpers
+
+\unexpanded\def\btxcitevariant#1%
+ {\ctxcommand{btxcitevariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1")}}
+
+%D List: helpers
+
+\def\currentbtxindex{0}
+
+\unexpanded\def\btxlistvariant#1%
+ {\ctxcommand{btxlistvariant("\currentbtxdataset","\currentbtxblock","\currentbtxtag","#1","\currentbtxindex")}} % some can go
+
+%D Loading variants:
+
+\appendtoks
+ \loadbtxdefinitionfile[\btxrenderingparameter\c!alternative]
+\to \everysetupbtxrendering
+
+%D Defaults:
+
+\setupbtxrendering
+ [\c!dataset=\v!standard,
+ \c!method=\v!global,
+ \c!setups=btx:rendering:\btxrenderingparameter\c!alternative,
+ \c!alternative=apa,
+ \c!sorttype=,
+ \c!criterium=,
+ \c!refcommand=authoryears, % todo
+ \c!numbering=\v!yes,
+% \c!autohang=\v!no,
+ \c!width=\v!auto,
+ \c!distance=1.5\emwidth]
+
+\definebtxrendering
+ [\v!standard]
+
+\setupbtxcitevariant
+ [\c!interaction=\v!start,
+ \c!setups=btx:cite:\btxcitevariantparameter\c!alternative,
+ \c!alternative=num,
+ \c!andtext={ and },
+ \c!otherstext={ et al.},
+ \c!pubsep={, },
+ \c!lastpubsep={ and },
+ \c!compress=\v!no,
+ \c!inbetween={ },
+ \c!left=,
+ \c!right=]
+
+\definebtxcitevariant
+ [author]
+ [%c!sorttype=,
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryear]
+ [\c!compress=\v!yes,
+ \c!inbetween={, },
+ \c!left={(},
+ \c!middle={, },
+ \c!right={)}]
+
+\definebtxcitevariant
+ [authoryears]
+ [authoryear]
+
+\definebtxcitevariant
+ [authornum]
+ [author]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [year]
+ [\c!left={(},
+ \c!right={)}]
+
+\definebtxcitevariant
+ [key]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [serial]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [short]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [type]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [doi]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [url]
+ [\c!left={[},
+ \c!right={]}]
+
+\definebtxcitevariant
+ [page]
+ [\c!left=,
+ \c!right=,
+ \c!inbetween=\endash]
+
+\definebtxcitevariant
+ [num]
+ [\c!compress=\v!yes,
+ \c!inbetween={--},
+ \c!left={[},
+ \c!right={]}]
+
+% \c!artauthor=invertedshort % todo
+% \c!editor =invertedshort % todo
+% \c!author =invertedshort % todo
+
+\setupbtxlistvariant
+ [\c!namesep={, },
+ \c!lastnamesep={ and },
+ \c!finalnamesep={ and },
+ \c!firstnamesep={ },
+ \c!juniorsep={ },
+ \c!vonsep={ },
+ \c!surnamesep={, },
+ \c!etallimit=5,
+ \c!etaldisplay=5,
+ \c!etaltext={ et al.},
+ \c!monthconversion=\v!number,
+ \c!authorconversion=\v!normal]
+
+\definebtxlistvariant
+ [author]
+
+\definebtxlistvariant
+ [editor]
+ [author]
+
+\definebtxlistvariant
+ [artauthor]
+ [author]
+
+% Do we want these in the format? Loading them delayed is somewhat messy.
+
+\loadbtxdefinitionfile[apa]
+\loadbtxdefinitionfile[cite]
+\loadbtxdefinitionfile[commands]
+\loadbtxdefinitionfile[definitions]
+
+\protect
diff --git a/tex/context/base/publ-oth.lua b/tex/context/base/publ-oth.lua
new file mode 100644
index 000000000..14da19f9c
--- /dev/null
+++ b/tex/context/base/publ-oth.lua
@@ -0,0 +1,146 @@
+if not modules then modules = { } end modules ['publ-oth'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, S, C, Ct, Cf, Cg, Cmt, Carg = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.Cmt, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local p_endofline = lpeg.patterns.newline
+
+local loaders = publications.loaders
+local getindex = publications.getindex
+
+local function addfield(t,k,v,fields)
+ k = fields[k]
+ if k then
+ local tk = t[k]
+ if tk then
+ t[k] = tk .. " and " .. v
+ else
+ t[k] = v
+ end
+ end
+ return t
+end
+
+local function checkfield(_,_,t,categories,all)
+ local tag = t.tag
+ if tag then
+ local category = t.category
+ t.tag = nil
+ t.category = categories[category] or category
+ all[tag] = t
+ end
+ return true
+end
+
+-- endnotes --
+
+local fields = {
+ ["@"] = "tag",
+ ["0"] = "category",
+ ["A"] = "author",
+ ["E"] = "editor",
+ ["T"] = "title",
+ ["D"] = "year",
+ ["I"] = "publisher",
+}
+
+local categories = {
+ ["Journal Article"] = "article",
+}
+
+local entry = P("%") * Cg(C(1) * (S(" \t")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+function publications.endnotes_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.endnote(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.endnotes_to_btx(io.loaddata(filename) or ""))
+end
+
+-- refman --
+
+local entry = Cg(C((1-lpeg.S(" \t")-p_endofline)^1) * (S(" \t-")^1) * C((1-p_endofline)^0) * Carg(1)) * p_endofline
+local record = Cf(Ct("") * (entry^1), addfield)
+local records = (Cmt(record * Carg(2) * Carg(3), checkfield) * P(1))^1
+
+local fields = {
+ ["SN"] = "tag",
+ ["TY"] = "category",
+ ["A1"] = "author",
+ ["E1"] = "editor",
+ ["T1"] = "title",
+ ["Y1"] = "year",
+ ["PB"] = "publisher",
+}
+
+local categories = {
+ ["JOUR"] = "article",
+}
+
+function publications.refman_to_btx(data)
+ local all = { }
+ lpegmatch(records,data,1,fields,categories,all)
+ return all
+end
+
+function loaders.refman(dataset,filename)
+ -- we could combine the next into checkfield but let's not create too messy code
+ loaders.lua(dataset,publications.refman_to_btx(io.loaddata(filename) or ""))
+end
+
+-- test --
+
+-- local endnote = [[
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677776
+-- %D 1994
+-- %I IEEE Computer Society
+--
+-- %0 Journal Article
+-- %T Scientific Visualization, Overviews, Methodologies, and Techniques
+-- %A Nielson, Gregory M
+-- %A Hagen, Hans
+-- %A Müller, Heinrich
+-- %@ 0818677775
+-- %D 1994
+-- %I IEEE Computer Society
+-- ]]
+--
+-- local refman = [[
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677776
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+--
+-- TY - JOUR
+-- T1 - Scientific Visualization, Overviews, Methodologies, and Techniques
+-- A1 - Nielson, Gregory M
+-- A1 - Hagen, Hans
+-- A1 - Müller, Heinrich
+-- SN - 0818677775
+-- Y1 - 1994
+-- PB - IEEE Computer Society
+-- ]]
+--
+-- inspect(publications.endnotes_to_btx(endnote))
+-- inspect(publications.refman_to_btx(refman))
diff --git a/tex/context/base/publ-tra.lua b/tex/context/base/publ-tra.lua
new file mode 100644
index 000000000..708795727
--- /dev/null
+++ b/tex/context/base/publ-tra.lua
@@ -0,0 +1,130 @@
+if not modules then modules = { } end modules ['publ-tra'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local sortedhash = table.sortedhash
+
+local tracers = { }
+publications.tracers = tracers
+
+local NC, NR, bold = context.NC, context.NR, context.bold
+
+publications.tracers.fields = table.sorted {
+ "abstract",
+ "address",
+ "annotate",
+ "author",
+ "booktitle",
+ "chapter",
+ "comment",
+ "country",
+ "doi",
+ "edition",
+ "editor",
+ "eprint",
+ "howpublished",
+ "institution",
+ "isbn",
+ "issn",
+ "journal",
+ "key",
+ "keyword",
+ "keywords",
+ "language",
+ "lastchecked",
+ "month",
+ "names",
+ "note",
+ "notes",
+ "number",
+ "organization",
+ "pages",
+ "publisher",
+ "school",
+ "series",
+ "size",
+ "title",
+ "type",
+ "url",
+ "volume",
+ "year",
+ "nationality",
+ "assignee",
+ "bibnumber",
+ "day",
+ "dayfiled",
+ "monthfiled",
+ "yearfiled",
+ "revision",
+}
+
+publications.tracers.citevariants = table.sorted {
+ "author",
+ "authoryear",
+ "authoryears",
+ "authornum",
+ "year",
+ "short",
+ "serial",
+ "key",
+ "doi",
+ "url",
+ "type",
+ "page",
+ "none",
+ "num",
+}
+
+publications.tracers.listvariants = table.sorted {
+ "author",
+ "editor",
+ "artauthor",
+}
+
+publications.tracers.categories = table.sorted {
+ "article",
+ "book",
+ "booklet",
+ "conference",
+ "inbook",
+ "incollection",
+ "inproceedings",
+ "manual",
+ "mastersthesis",
+ "misc",
+ "phdthesis",
+ "proceedings",
+ "techreport",
+ "unpublished",
+}
+
+function tracers.showdatasetfields(name)
+ if name and name ~= "" then
+ local luadata = publications.datasets[name].luadata
+ if next(luadata) then
+ context.starttabulate { "|lT|lT|pT|" }
+ NC() bold("tag")
+ NC() bold("category")
+ NC() bold("fields")
+ NC() NR() context.FL() -- HL()
+ for k, v in sortedhash(luadata) do
+ NC() context(k)
+ NC() context(v.category)
+ NC()
+ for k, v in sortedhash(v) do
+ if k ~= "details" and k ~= "tag" and k ~= "category" then
+ context("%s ",k)
+ end
+ end
+ NC() NR()
+ end
+ context.stoptabulate()
+ end
+ end
+end
+
+commands.showbtxdatasetfields = tracers.showdatasetfields
diff --git a/tex/context/base/publ-tra.mkiv b/tex/context/base/publ-tra.mkiv
new file mode 100644
index 000000000..70db634fe
--- /dev/null
+++ b/tex/context/base/publ-tra.mkiv
@@ -0,0 +1,26 @@
+%D \module
+%D [ file=publ-tra,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=Tracing,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / Tracing}
+
+\registerctxluafile{publ-tra}{1.001}
+
+\unprotect
+
+\unexpanded\def\showbtxdatasetfields
+ {\dosingleempty\publ_dataset_show_fields}
+
+\def\publ_dataset_show_fields[#1]%
+ {\ctxcommand{showbtxdatasetfields("\iffirstargument#1\else\currentbtxdataset\fi")}}
+
+\protect \endinput
diff --git a/tex/context/base/publ-usr.lua b/tex/context/base/publ-usr.lua
new file mode 100644
index 000000000..6bb93ebee
--- /dev/null
+++ b/tex/context/base/publ-usr.lua
@@ -0,0 +1,91 @@
+if not modules then modules = { } end modules ['publ-usr'] = {
+ version = 1.001,
+ comment = "this module part of publication support",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local chardata = characters.data
+
+-- local str = [[
+-- \startpublication[k=Berdnikov:TB21-2-129,t=article,a={{Berdnikov},{}},y=2000,n=2257,s=BHHJ00]
+-- \artauthor[]{Alexander}[A.]{}{Berdnikov}
+-- \artauthor[]{Hans}[H.]{}{Hagen}
+-- \artauthor[]{Taco}[T.]{}{Hoekwater}
+-- \artauthor[]{Bogus{\l}aw}[B.]{}{Jackowski}
+-- \pubyear{2000}
+-- \arttitle{{Even more MetaFun with \MP: A request for permission}}
+-- \journal{TUGboat}
+-- \issn{0896-3207}
+-- \volume{21}
+-- \issue{2}
+-- \pages{129--130}
+-- \month{6}
+-- \stoppublication
+-- ]]
+
+local remapped = {
+ artauthor = "author",
+ arttitle = "title",
+}
+
+local P, Cs, R, Cc, Carg = lpeg.P, lpeg.Cs, lpeg.R, lpeg.Cc, lpeg.Carg
+
+local function register(target,key,a,b,c,d,e)
+ key = remapped[key] or key
+ if b and d and e then
+ local s = nil
+ if b ~= "" and b then
+ s = s and s .. " " .. b or b
+ end
+ if d ~= "" and d then
+ s = s and s .. " " .. d or d
+ end
+ if e ~= "" and e then
+ s = s and s .. " " .. e or e
+ end
+ if a ~= "" and a then
+ s = s and s .. " " .. a or a
+ end
+ local value = target[key]
+ if s then
+ if value then
+ target[key] = value .. " and " .. s
+ else
+ target[key] = s
+ end
+ else
+ if not value then
+ target[key] = s
+ end
+ end
+ else
+ target[key] = b
+ end
+end
+
+local leftbrace = P("{")
+local rightbrace = P("}")
+local leftbracket = P("[")
+local rightbracket = P("]")
+
+local key = P("\\") * Cs(R("az","AZ")^1) * lpeg.patterns.space^0
+local mandate = leftbrace * Cs(lpeg.patterns.balanced) * rightbrace + Cc(false)
+local optional = leftbracket * Cs((1-rightbracket)^0) * rightbracket + Cc(false)
+local value = optional^-1 * mandate^-1 * optional^-1 * mandate^-2
+
+local pattern = ((Carg(1) * key * value) / register + P(1))^0
+
+function publications.addtexentry(dataset,settings,content)
+ settings = utilities.parsers.settings_to_hash(settings)
+ local data = {
+ tag = settings.tag or settings.k or "no tag",
+ category = settings.category or settings.t or "article",
+ }
+ lpeg.match(pattern,content,1,data) -- can set tag too
+ dataset.userdata[data.tag] = data
+ dataset.luadata[data.tag] = data
+ publications.markasupdated(dataset)
+ return data
+end
diff --git a/tex/context/base/publ-usr.mkiv b/tex/context/base/publ-usr.mkiv
new file mode 100644
index 000000000..cb078f424
--- /dev/null
+++ b/tex/context/base/publ-usr.mkiv
@@ -0,0 +1,2 @@
+% todo
+
diff --git a/tex/context/base/publ-xml.mkiv b/tex/context/base/publ-xml.mkiv
new file mode 100644
index 000000000..007f9bb27
--- /dev/null
+++ b/tex/context/base/publ-xml.mkiv
@@ -0,0 +1,114 @@
+%D \module
+%D [ file=publ-xml,
+%D version=2013.12.24,
+%D title=\CONTEXT\ Publication Support,
+%D subtitle=XML,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Publication Support / XML}
+
+\unprotect
+
+\unexpanded\def\convertbtxdatasettoxml
+ {\dosingleempty\publ_convert_to_xml}
+
+\def\publ_convert_to_xml[#1]%
+ {\ctxcommand{convertbtxdatasettoxml("\iffirstargument#1\else\v!standard\fi",true)}} % or current when not empty
+
+% \startxmlsetups btx:initialize
+% \xmlregistereddocumentsetups{#1}{}
+% \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+% \xmlmain{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:initialize
+ \xmlsetsetup{#1}{bibtex|entry|field}{btx:*}
+ \xmlmain{#1}
+\stopxmlsetups
+
+% \startxmlsetups btx:entry
+% \xmlflush{#1}
+% \stopxmlsetups
+
+\startxmlsetups btx:field
+ \xmlflushcontext{#1}
+\stopxmlsetups
+
+\protect \endinput
+
+% \startxmlsetups bibtex:entry:getkeys
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='author']/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlfilter{#1}{/field[@name='year' ]/text()}}
+% \xmladdsortentry{bibtex}{#1}{\xmlatt{#1}{tag}}
+% \stopxmlsetups
+
+% \startbuffer
+% \startxmlsetups xml:bibtex:sorter
+% \xmlresetsorter{bibtex}
+% % \xmlfilter{#1}{entry/command(bibtex:entry:getkeys)}
+% \xmlfilter{#1}{
+% bibtex
+% /entry[@category='article']
+% /field[@name='author' and find(text(),'Knuth')]
+% /../command(bibtex:entry:getkeys)}
+% \xmlsortentries{bibtex}
+% \xmlflushsorter{bibtex}{bibtex:entry:flush}
+% \stopxmlsetups
+% \stopbuffer
+
+% \bgroup
+% \setups[bibtex-commands]
+% \getbuffer
+% \egroup
+
+% \startxmlsetups bibtex:entry:flush
+% \xmlfilter{#1}{/field[@name='author']/context()} / %
+% \xmlfilter{#1}{/field[@name='year' ]/context()} / %
+% \xmlatt{#1}{tag}\par
+% \stopxmlsetups
+
+% \startpacked
+% \getbuffer
+% \stoppacked
+
+
+% \unexpanded\def\btx_xml_list_handle_entry
+% {\begingroup
+% \ignorespaces
+% \xmlfilter{btx:\currentbtxrendering}{/bibtex/entry[@tag='\currentbtxtag']/command(btx:format)}%
+% \removeunwantedspaces
+% \endgroup}
+
+% \startxmlsetups btx:format
+% \btxlistparameter\c!before\relax % prevents lookahead
+% \edef\currentbibxmlnode {#1}
+% \edef\currentbibxmltag {\xmlatt{#1}{tag}}
+% \edef\currentbtxcategory{\xmlatt{#1}{category}}
+% \ignorespaces
+% \xmlcommand{#1}{.}{btx:\currentbtxformat:\currentbibxmlcategory}
+% \removeunwantedspaces
+% \btxlistparameter\c!after\relax % prevents lookahead
+% \stopxmlsetups
+
+% \startxmlsetups btx:list
+% \xmlfilter{#1}{/bibtex/entry/command(bibtex:format)}
+% \stopxmlsetups
+
+% \startxmlsetups btx:btx
+% \xmlfilter{#1}{/entry/command(btx:format)}
+% \stopxmlsetups
+
+% \unexpanded\def\btx_xml_doifelse#1{\xmldoifelse\currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doif #1{\xmldoif \currentbibxmlnode{/field[@name='#1']}}
+% \unexpanded\def\btx_xml_doifnot #1{\xmldoifnot \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_flush #1{\xmlcontext \currentbibxmlnode{/field[@name='#1']}}
+% \def\btx_xml_setup {\xmlsetup \currentbibxmlnode} % {#1}
+% \unexpanded\def\btx_xml_todo #1{[#1]}
+
+% \xmlfilter{#1}{/field[@name='\currentbtxfield']/btxconcat('\currentbtxfield')}
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index 386f2ba57..e9ea6393b 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -48,6 +48,7 @@
\logo [ASCIITEX] {ascii\TeX}
\logo [BACHOTEX] {Bacho\TeX}
\logo [BIBTEX] {bib\TeX}
+\logo [MLBIBTEX] {MLbib\TeX}
\logo [BLUESKY] {BlueSky}
\logo [BMP] {bmp}
\logo [BSD] {bsd}
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 7061e0103..28b0d63f7 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 054b088ad..1e51302f7 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index aea0c2e69..c6d88290e 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 01/11/14 23:58:35
+-- merge date : 01/14/14 15:03:49
do -- begin closure to overcome local limits and interference