summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2014-11-17 01:15:05 +0100
committerContext Git Mirror Bot <phg42.2a@gmail.com>2014-11-17 01:15:05 +0100
commit4270545e13cb1bdf4cc0b3305a62ac9d680e296a (patch)
tree9eaaa8d7564bb2fd517a72387ff7c9e4cfb3dde3
parent1de6c31af257171be8ba0d5c7e28896612214dca (diff)
downloadcontext-4270545e13cb1bdf4cc0b3305a62ac9d680e296a.tar.gz
2014-11-17 00:34:00
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4387 -> 4386 bytes
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-two.lua8
-rw-r--r--tex/context/base/core-uti.lua2
-rw-r--r--tex/context/base/grph-inc.lua8
-rw-r--r--tex/context/base/lang-hyp.lua17
-rw-r--r--tex/context/base/lxml-ini.mkiv7
-rw-r--r--tex/context/base/publ-aut.lua124
-rw-r--r--tex/context/base/publ-dat.lua12
-rw-r--r--tex/context/base/publ-ini.lua267
-rw-r--r--tex/context/base/publ-ini.mkiv2
-rw-r--r--tex/context/base/publ-reg.lua66
-rw-r--r--tex/context/base/spac-ver.lua88
-rw-r--r--tex/context/base/spac-ver.mkiv9
-rw-r--r--tex/context/base/status-files.pdfbin24695 -> 24693 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin342642 -> 342594 bytes
-rw-r--r--tex/context/base/strc-itm.lua64
-rw-r--r--tex/context/base/strc-itm.mkvi34
-rw-r--r--tex/context/base/x-asciimath.mkiv2
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
21 files changed, 376 insertions, 340 deletions
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 887899070..b12bf169a 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.11.12 21:46}
+\newcontextversion{2014.11.17 00:32}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 902b45c8c..df6137533 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 38ebbad4b..61ab2188e 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.11.12 21:46}
+\edef\contextversion{2014.11.17 00:32}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/core-two.lua b/tex/context/base/core-two.lua
index 1f2bc7c6c..9773eaf6f 100644
--- a/tex/context/base/core-two.lua
+++ b/tex/context/base/core-two.lua
@@ -42,9 +42,13 @@ end
jobpasses.define = allocate
-function jobpasses.save(id,str)
+function jobpasses.save(id,str,index)
local jti = allocate(id)
- jti[#jti+1] = str
+ if index then
+ jti[index] = str
+ else
+ jti[#jti+1] = str
+ end
end
function jobpasses.savetagged(id,tag,str)
diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua
index ecb90d24b..27ef93608 100644
--- a/tex/context/base/core-uti.lua
+++ b/tex/context/base/core-uti.lua
@@ -36,7 +36,7 @@ local report_passes = logs.reporter("job","passes")
job = job or { }
local job = job
-job.version = 1.27
+job.version = 1.28
job.packversion = 1.02
-- some day we will implement loading of other jobs and then we need
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index 2d3cd252f..f191ed7d9 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -643,11 +643,19 @@ local function register(askedname,specification)
local newbase = oldbase
--
local fc = specification.cache or figures.cachepaths.path
+--
+-- -- todo:
+--
+-- if fc == "auto" then
+-- newpath = newpath .. "/cache"
+-- dir.mkdir(newpath)
+-- else
if fc and fc ~= "" and fc ~= "." then
newpath = fc
else
newbase = defaultprefix .. newbase
end
+-- end
if not file.is_writable(newpath) then
if trace_conversion then
report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
diff --git a/tex/context/base/lang-hyp.lua b/tex/context/base/lang-hyp.lua
index 87216cc8b..205baccce 100644
--- a/tex/context/base/lang-hyp.lua
+++ b/tex/context/base/lang-hyp.lua
@@ -312,7 +312,13 @@ if context then
lpegmatch(word,extra,1,dictionary.patterns,dictionary.specials)
end
end
- local usedchars = lpegmatch(split,patterns.characters)
+ local permitted = patterns.characters
+-- local additional = "[]()"
+-- local additional = specification.additional
+-- if additional then
+-- permitted = permitted .. additional -- has to be attribute driven
+-- end
+ local usedchars = lpegmatch(split,permitted)
local characters = { }
local unicodes = { }
for i=1,#usedchars do
@@ -636,8 +642,9 @@ if context then
else
- -- traditional.loadpatterns("nl","lang-nl")
- -- traditional.loadpatterns("de","lang-de")
+-- traditional.loadpatterns("nl","lang-nl")
+-- traditional.loadpatterns("de","lang-de")
+-- traditional.loadpatterns("us","lang-us")
-- traditional.registerpattern("nl","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
-- traditional.registerpattern("nl","oo1ë", { start = 2, length = 3, before = "o", after = "e" } )
@@ -659,5 +666,9 @@ else
-- print( "qqqxcxkqqq", traditional.injecthyphens(dictionaries.de, "qqqxcxkqqq", specification),"")
-- print("qqqqxcxkqqqq",traditional.injecthyphens(dictionaries.de,"qqqqxcxkqqqq",specification),"")
+-- print("kunstmatig", traditional.injecthyphens(dictionaries.nl,"kunstmatig", specification),"")
+-- print("kunststofmatig", traditional.injecthyphens(dictionaries.nl,"kunststofmatig", specification),"")
+-- print("kunst[stof]matig", traditional.injecthyphens(dictionaries.nl,"kunst[stof]matig", specification),"")
+
end
diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv
index 13230eac8..0092ea4f4 100644
--- a/tex/context/base/lxml-ini.mkiv
+++ b/tex/context/base/lxml-ini.mkiv
@@ -235,11 +235,6 @@
% \xmlsetfunction{main}{verbatim}{lxml.displayverbatim}
% \xmlsetfunction{main}{verb} {lxml.inlineverbatim}
-% \unexpanded\def\startxmldisplayverbatim[#1]{}
-% \unexpanded\def\stopxmldisplayverbatim {}
-% \unexpanded\def\startxmlinlineverbatim [#1]{}
-% \unexpanded\def\stopxmlinlineverbatim {}
-
% we use an xml: namespace so one has to define a suitable verbatim, say
%
% \definetyping[xml:verbatim][typing]
@@ -258,7 +253,7 @@
\unexpanded\def\startxmlinlineverbatim[#1]%
{\begingroup
\edef\currenttype{xml:#1}%
- \let\stopxmldisplayverbatim\endgroup
+ \let\stopxmlinlineverbatim\endgroup
\doinitializeverbatim}
% will move but is developed for xml
diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua
index a449e25b4..95378df4a 100644
--- a/tex/context/base/publ-aut.lua
+++ b/tex/context/base/publ-aut.lua
@@ -25,10 +25,7 @@ local commands = commands
local publications = publications
local datasets = publications.datasets
-local writers = publications.writers
-local authors = publications.authors
-local detailed = publications.detailed
-local casters = publications.casters
+local getcasted = publications.getcasted
local chardata = characters.data
@@ -211,9 +208,6 @@ local function splitauthorstring(str)
return authors
end
-authors.splitstring = splitauthorstring
-casters.author = splitauthorstring
-
local function the_initials(initials,symbol,connector)
if not symbol then
symbol = "."
@@ -310,23 +304,8 @@ function commands.btxauthorfield(i,field)
end
function commands.btxauthor(dataset,tag,field,settings)
- local current = datasets[dataset]
- if not current then
- return f_invalid("dataset",dataset)
- end
- local entry = current.luadata[tag]
- if not entry then
- return f_invalid("entry",tag)
- end
- local value = entry[field]
- if not value then
- return f_invalid("field",field)
- end
- local split = detailed.author[value]
- if type(split) ~= "table" then
- return f_invalid("cast",value)
- end
- local max = split and #split or 0
+ local split = getcasted(dataset,tag,field)
+ local max = split and #split or 0
if max == 0 then
return
-- error
@@ -408,9 +387,6 @@ local splitter = sorters.splitters.utf
-- authors(s) | year | journal | title | pages
-local pubsorters = { }
-authors.sorters = pubsorters
-
local function components(snippet,short)
local vons = snippet.vons
local surnames = snippet.surnames
@@ -462,41 +438,6 @@ local function writer(key,snippets)
return concat(snippets," ",1,s)
end
-writers.author = writer
-
-local default = { "author" }
-
-function authors.getauthor(dataset,tag,categories)
- local current = datasets[dataset]
- local luadata = current.luadata
- local entry = luadata and luadata[tag]
- if entry then
- local category = entry.category
- local list
- if categories then
- local c = categories[category]
- if c then
- local sets = c.sets
- list = sets and sets.author and sets.authors or default
- else
- list = default
- end
- else
- list = default
- end
- for i=1,#list do
- local l = list[i]
- local v = entry[l]
- if v then
- return detailed.author[v], l
- end
- end
- end
-end
-
-publications.serializeauthor = function(a) return writer { a } end
-publications.authorcomponents = components
-
local function newsplitter(splitter)
return table.setmetatableindex({},function(t,k) -- could be done in the sorter but seldom that many shared
local v = splitter(k,true) -- in other cases
@@ -510,29 +451,21 @@ end
-- first : key author editor publisher title journal volume number pages
-- second: year suffix title month day journal volume number
-local function directget(dataset,entry,field)
- local value = entry[field]
- if value then
- return detailed.author[value]
- end
-end
-
-local function byauthor(dataset,list,method)
+local function indexer(dataset,list,method)
local current = datasets[dataset]
local luadata = current.luadata
local result = { }
local splitted = newsplitter(splitter) -- saves mem
local snippets = { } -- saves mem
- local get = publications.directget or directget
local field = "author" -- todo
for i=1,#list do
-- either { tag, tag, ... } or { { tag, index }, { tag, index } }
- local li = list[i]
- local tag = type(li) == "string" and li or li[1]
- local index = tostring(i)
- local entry = luadata[tag]
+ local li = list[i]
+ local tag = type(li) == "string" and li or li[1]
+ local index = tostring(i)
+ local entry = luadata[tag]
if entry then
- local value = get(current,entry,field) or ""
+ local value = getcasted(current,entry,field) or ""
local mainkey = writer(value,snippets)
result[i] = {
index = i,
@@ -555,18 +488,18 @@ local function byauthor(dataset,list,method)
result[i] = {
index = i,
split = {
- splitted[""], -- key
- splitted[""], -- mainkey
- splitted["9999"], -- year
- splitted[" "], -- suffix
- splitted["14"], -- month
- splitted["33"], -- day
- splitted[""], -- journal
- splitted[""], -- volume
- splitted[""], -- number
- splitted[""], -- title
- splitted[""], -- pages
- splitted[index], -- index
+ splitted[""], -- key
+ splitted[""], -- mainkey
+ splitted["9999"], -- year
+ splitted[" "], -- suffix
+ splitted["14"], -- month
+ splitted["33"], -- day
+ splitted[""], -- journal
+ splitted[""], -- volume
+ splitted[""], -- number
+ splitted[""], -- title
+ splitted[""], -- pages
+ splitted[index], -- index
},
}
end
@@ -574,11 +507,8 @@ local function byauthor(dataset,list,method)
return result
end
-authors.sorters.writer = writer
-authors.sorters.author = byauthor
-
-function authors.sorted(dataset,list,sorttype) -- experimental
- local valid = byauthor(dataset,list,sorttype)
+local function sorted(dataset,list,sorttype) -- experimental
+ local valid = indexer(dataset,list,sorttype)
if #valid == 0 or #valid ~= #list then
return list
else
@@ -589,3 +519,11 @@ function authors.sorted(dataset,list,sorttype) -- experimental
return valid
end
end
+
+-- made public
+
+publications.indexers .author = indexer
+publications.writers .author = writer
+publications.sorters .author = sorted
+publications.casters .author = splitauthorstring
+publications.components.author = components
diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua
index b11ddf215..2ab948c9d 100644
--- a/tex/context/base/publ-dat.lua
+++ b/tex/context/base/publ-dat.lua
@@ -60,9 +60,6 @@ publications.datasets = datasets
local writers = publications.writers or { }
publications.writers = writers
-local authors = publications.authors or { }
-publications.authors = authors
-
local tables = publications.tables or { }
publications.tables = tables
@@ -75,6 +72,15 @@ publications.loaders = loaders
local casters = { }
publications.casters = casters
+local sorters = { }
+publications.sorters = sorters
+
+local indexers = { }
+publications.indexers = indexers
+
+local components = { }
+publications.components = components -- register components
+
local enhancers = publications.enhancers or { }
publications.enhancers = enhancers
diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua
index c4cca2047..1e00ca736 100644
--- a/tex/context/base/publ-ini.lua
+++ b/tex/context/base/publ-ini.lua
@@ -637,16 +637,87 @@ end
-- basic access
-local function getfield(dataset,tag,name)
+local function getfield(dataset,tag,name) -- for the moment quick and dirty
local d = datasets[dataset].luadata[tag]
return d and d[name]
end
-local function getdetail(dataset,tag,name)
+local function getdetail(dataset,tag,name) -- for the moment quick and dirty
local d = datasets[dataset].details[tag]
return d and d[name]
end
+local function getcasted(dataset,tag,field,specification)
+ local current = datasets[dataset]
+ if current then
+ local data = current.luadata[tag]
+ if data then
+ local category = data.category
+ if not specification then
+ specification = currentspecification
+ end
+ local catspec = specification.categories[category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ end
+ end
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ local kind = specification.types[field]
+ return detailed[kind][value], field, kind
+ end
+ end
+ local data = current.details[tag]
+ if data then
+ local kind = specification.types[field]
+ return data[field], field, kind -- no check
+ end
+ end
+ end
+end
+
+local function getdirect(dataset,data,field,catspec) -- no field check, no dataset check
+ local catspec = (catspec or currentspecification).categories[data.category]
+ if not catspec then
+ return false
+ end
+ local fields = catspec.fields
+ if fields then
+ local sets = catspec.sets
+ if sets then
+ local set = sets[field]
+ if set then
+ for i=1,#set do
+ local field = set[i]
+ local value = fields[field] and data[field] -- redundant check
+ if value then
+ return value
+ end
+ end
+ end
+ end
+ return fields[field] and data[field] or nil -- redundant check
+ end
+end
+
+publications.getcasted = getcasted
+publications.getdirect = getdirect
+
function commands.btxsingularorplural(dataset,tag,name)
local d = datasets[dataset].details[tag]
if d then
@@ -719,27 +790,22 @@ do
-- of the source and or style).
function publications.enhancers.suffixes(dataset)
-
if not dataset then
- -- bad news
- return
+ return -- bad news
else
report("analyzing previous publication run for %a",dataset.name)
end
local used = usedentries[dataset.name]
if not used then
- -- probably a first run
- return
+ return -- probably a first run
end
- local luadata = dataset.luadata
- local details = dataset.details
- local ordered = dataset.ordered
- local caster = casters.author
- local getter = publications.directget
- local shorts = { }
- if not luadata or not detailr or not ordered then
- return
- -- also bad news
+ local luadata = dataset.luadata
+ local details = dataset.details
+ local ordered = dataset.ordered
+ local field = "author" -- currently only author
+ local shorts = { }
+ if not luadata or not details or not ordered then
+ return -- also bad news
end
for i=1,#ordered do
local entry = ordered[i]
@@ -754,9 +820,8 @@ do
local userdata = listentry.userdata
local btxspc = userdata and userdata.btxspc
if btxspc then
- local author = getter(dataset,entry,"author",specifications[btxspc])
- if author then
- author = caster(author)
+ local author = getcasted(dataset,tag,field,specifications[btxspc])
+ if type(author) == "table" then
-- number depends on sort order
local t = { }
if #author > 0 then
@@ -784,6 +849,8 @@ do
else
s[#s+1] = { tag, year, u, i }
end
+ else
+ report("author typecast expected for fiel %a",field)
end
else
--- no spec so let's forget about it
@@ -957,7 +1024,7 @@ do
end
end
- local function get(dataset,tag,field,what,check,catspec)
+ local function get(dataset,tag,field,what,check,catspec) -- somewhat more extensive
local current = rawget(datasets,dataset)
if current then
local data = current.luadata[tag]
@@ -1002,31 +1069,9 @@ do
return ""
end
- publications.get = get
-
- function publications.directget(dataset,data,field,catspec)
- local catspec = (catspec or currentspecification).categories[data.category]
- if not catspec then
- return false
- end
- local fields = catspec.fields
- if fields then
- local sets = catspec.sets
- if sets then
- local set = sets[field]
- if set then
- for i=1,#set do
- local field = set[i]
- local value = fields[field] and data[field] -- redundant check
- if value then
- return value
- end
- end
- end
- end
- return fields[field] and data[field] or nil -- redundant check
- end
- end
+ publications.permitted = permitted
+ publications.found = found
+ publications.get = get
function commands.btxfieldname(name,tag,field) context(get(name,tag,field,false,false)) end
function commands.btxfieldtype(name,tag,field) context(get(name,tag,field,true, false)) end
@@ -1379,7 +1424,6 @@ do
lists.result = result
structures.lists.result = result
rendering.pages = pages -- or list.pages
- -- inspect(pages)
end
methods[v_global] = methods[v_local]
@@ -1552,7 +1596,7 @@ do
end
end,
[v_author] = function(dataset,rendering,list)
- local valid = publications.authors.sorters.author(dataset,list)
+ local valid = publications.indexers.author(dataset,list)
if #valid == 0 or #valid ~= #list then
-- nothing to sort
else
@@ -2107,6 +2151,8 @@ do
end
end
+ --
+
local function simplegetter(first,last,field)
local value = first[field]
if value then
@@ -2141,15 +2187,6 @@ do
return v
end)
- -- todo: just a sort key and then fetch normal by fieldname
-
- -- setmetatableindex(citevariants,function(t,k)
- -- local p = registeredcitevariants[k]
- -- local v = p and p ~= k and rawget(t,p) or defaultvariant
- -- t[k] = v
- -- return v
- -- end)
-
setmetatableindex(citevariants,function(t,k)
local p = registeredcitevariants[k]
local v = nil
@@ -2173,6 +2210,69 @@ do
})
end
+ --
+
+ -- -- what to do with sort .. todo: sorters by type
+
+ -- function citevariants.handler(key)
+ -- local function setter(dataset,tag,entry,internal)
+ -- return {
+ -- dataset = dataset,
+ -- tag = tag,
+ -- internal = internal,
+ -- category = getfield(dataset,tag,key),
+ -- }
+ -- end
+ -- local function getter(first,last)
+ -- return simplegetter(first,last,key)
+ -- end
+ -- return function(presets)
+ -- processcite(presets,{
+ -- setter = setter,
+ -- getter = getter,
+ -- })
+ -- end
+ -- end
+ --
+ -- citevariants.category = citevariants.handler("category")
+ -- citevariants.type = citevariants.handler("type")
+
+ -- category | type
+
+ do
+
+ local function setter(dataset,tag,entry,internal)
+ return {
+ dataset = dataset,
+ tag = tag,
+ internal = internal,
+ category = getfield(dataset,tag,"category"),
+ }
+ end
+
+ local function getter(first,last)
+ return simplegetter(first,last,"category")
+ end
+
+ function citevariants.category(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "serial",
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ function citevariants.type(presets)
+ processcite(presets,{
+ -- variant = presets.variant or "type",
+ setter = setter,
+ getter = getter,
+ })
+ end
+
+ end
+
+
-- entry
do
@@ -2245,7 +2345,7 @@ do
dataset = dataset,
tag = tag,
internal = internal,
- pages = getdetail(dataset,tag,"pages"),
+ pages = getcasted(dataset,tag,"pages"),
}
end
@@ -2367,41 +2467,6 @@ do
end
- -- category | type
-
- do
-
- local function setter(dataset,tag,entry,internal)
- return {
- dataset = dataset,
- tag = tag,
- internal = internal,
- category = getfield(dataset,tag,"category"),
- }
- end
-
- local function getter(first,last)
- return simplegetter(first,last,"category")
- end
-
- function citevariants.category(presets)
- processcite(presets,{
- -- variant = presets.variant or "serial",
- setter = setter,
- getter = getter,
- })
- end
-
- function citevariants.type(presets)
- processcite(presets,{
- -- variant = presets.variant or "type",
- setter = setter,
- getter = getter,
- })
- end
-
- end
-
-- key | tag
do
@@ -2437,20 +2502,10 @@ do
end
- -- todo : sort
- -- todo : choose between publications or commands namespace
- -- todo : use details.author
- -- todo : sort details.author
- -- (name, name and name) .. how names? how sorted?
- -- todo: we loop at the tex end .. why not here
- -- \cite[{hh,afo},kvm]
-
- -- common
+ -- authors
do
- local getauthor = publications.authors.getauthor
-
local currentbtxciteauthor = function()
context.currentbtxciteauthor()
return true -- needed?
@@ -2580,7 +2635,7 @@ do
dataset = dataset,
tag = tag,
internal = internal,
- author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ author = getcasted(dataset,tag,"author"),
}
end
@@ -2607,7 +2662,7 @@ do
dataset = dataset,
tag = tag,
internal = internal,
- author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ author = getcasted(dataset,tag,"author"),
num = text,
sortkey = text and lpegmatch(numberonly,text),
}
@@ -2634,7 +2689,7 @@ do
dataset = dataset,
tag = tag,
internal = internal,
- author = getauthor(dataset,tag,currentspecification.categories), -- todo: list
+ author = getcasted(dataset,tag,"author"),
year = getfield(dataset,tag,"year"),
suffix = getdetail(dataset,tag,"suffix"),
sortkey = getdetail(dataset,tag,"suffixedyear"),
diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv
index 2c81f294e..a3fb68186 100644
--- a/tex/context/base/publ-ini.mkiv
+++ b/tex/context/base/publ-ini.mkiv
@@ -39,9 +39,9 @@
\writestatus{loading}{ConTeXt Publication Support / Initialization}
\registerctxluafile{publ-dat}{1.001}
+\registerctxluafile{publ-ini}{1.001}
\registerctxluafile{publ-aut}{1.001}
\registerctxluafile{publ-usr}{1.001}
-\registerctxluafile{publ-ini}{1.001}
\registerctxluafile{publ-oth}{1.001} % this could become an option
\registerctxluafile{publ-fnd}{1.001} % new method (for the moment only local)
\registerctxluafile{publ-jrn}{1.001}
diff --git a/tex/context/base/publ-reg.lua b/tex/context/base/publ-reg.lua
index ef4ed06a3..ccee57ecd 100644
--- a/tex/context/base/publ-reg.lua
+++ b/tex/context/base/publ-reg.lua
@@ -24,7 +24,7 @@ local v_all = variables.all
local publications = publications
local datasets = publications.datasets
local specifications = publications.specifications
-local detailed = publications.detailed
+local writers = publications.writers
local registrations = { }
local sequence = { }
@@ -81,38 +81,6 @@ function commands.setbtxregister(specification)
end
end
------ getter = publications.directget
-
-local function getter(current,tag,step) -- todo: detail
- local data = current.luadata[tag]
- if data then
- local catspec = specifications[step.specification].categories[data.category]
- if catspec then
- local fields = catspec.fields
- if fields then
- local field = step.field
- local sets = catspec.sets
- if sets then
- local set = sets[field]
- if set then
- for i=1,#set do
- local field = set[i]
- local value = fields[field] and data[field] -- redundant check
- if value then
- return field, value, catspec.types[field] or "string"
- end
- end
- end
- end
- local value = fields[field] and data[field]
- if value then
- return field, value, catspec.types[field] or "string"
- end
- end
- end
- end
-end
-
function commands.btxtoregister(dataset,tag)
local current = datasets[dataset]
for i=1,#sequence do
@@ -121,10 +89,9 @@ function commands.btxtoregister(dataset,tag)
if dset == v_all or dset == dataset then
local done = step.done
if not done[tag] then
- local field, value, kind = getter(current,tag,step)
+ local value, field, kind = getcasted(current,tag,step.field,specifications[step.specification])
if value then
- local cast = detailed[kind][value] or value
- flushers[kind](step,field,value,cast)
+ flushers[kind](step,field,value)
end
done[tag] = true
end
@@ -143,13 +110,12 @@ end
local ctx_dosetfastregisterentry = context.dosetfastregisterentry -- register entry key
local p_keywords = lpeg.tsplitat(lpeg.patterns.whitespace^0 * lpeg.P(";") * lpeg.patterns.whitespace^0)
-local serialize = publications.serializeauthor
-local components = publications.authorcomponents
+local components = publications.components.author
local f_author = formatters[ [[\btxindexedauthor{%s}{%s}{%s}{%s}{%s}{%s}]] ]
-function flushers.string(step,field,value,cast)
- if value and value ~= "" then
- ctx_dosetfastregisterentry(step.register,type(cast) == "string" and cast or value,"",step.processor or "","")
+function flushers.string(step,field,value)
+ if type(value) == "string" and value ~= "" then
+ ctx_dosetfastregisterentry(step.register,value or "","",step.processor or "","")
end
end
@@ -160,26 +126,26 @@ local shorts = {
invertedshort = "invertedshort",
}
-function flushers.author(step,field,value,cast)
- if cast and #cast > 0 then
+function flushers.author(step,field,value)
+ if type(value) == "table" and #value > 0 then
local register = step.register
local processor = step.processor
local alternative = shorts[step.alternative or "invertedshort"] or "invertedshort"
- for i=1,#cast do
- local a = cast[i]
- local k = serialize(a)
+ for i=1,#value do
+ local a = value[i]
+ local k = writers[field] { a }
local e = f_author(alternative,components(a,short))
ctx_dosetfastregisterentry(register,e,k,processor or "","")
end
end
end
-function flushers.keywords(step,field,value,cast)
- if cast and #cast > 0 then
+function flushers.keywords(step,field,value)
+ if type(value) == "table" and #value > 0 then
local register = step.register
local processor = step.processor
- for i=1,#cast do
- ctx_dosetfastregisterentry(register,cast[i],"",processor or "","")
+ for i=1,#value do
+ ctx_dosetfastregisterentry(register,value[i],"",processor or "","")
end
end
end
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index ce51a05e8..3afddc79a 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -8,11 +8,16 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- we also need to call the spacer for inserts!
+-- somehow lists still don't always have proper prev nodes so i need to
+-- check all of the luatex code some day .. maybe i should replece the
+-- whole mvl handler by lua code .. why not
+
-- todo: use lua nodes with lua data (>0.79)
-- see ** can go when 0.79
-- this code dates from the beginning and is kind of experimental; it
--- will be optimized and improved soon
+-- will be optimized and improved soon .. it's way too complex now but
+-- dates from less possibilities
--
-- the collapser will be redone with user nodes; also, we might get make
-- parskip into an attribute and appy it explicitly thereby getting rid
@@ -992,30 +997,24 @@ specialmethods[1] = function(pagehead,pagetail,start,penalty)
end
-- specialmethods[2] : always put something before and use that as to-be-changed
+--
+-- we could inject a vadjust to force a recalculation .. a mess
+--
+-- so, the next is far from robust and okay but for the moment this overlaying
+-- has to do
local function check_experimental_overlay(head,current) -- todo
local p = nil
local c = current
local n = nil
-setfield(head,"prev",nil) -- till we have 0.79 **
+ -- setfield(head,"prev",nil) -- till we have 0.79 **
- local function overlay(p, n, s, mvl)
- local c = getprev(n)
- while c and c ~= p do
- local p = getprev(c)
- free_node(c)
- c = p
- end
- setfield(n,"prev",nil)
- if not mvl then
- setfield(p,"next",n)
- end
- local p_ht = getfield(p,"height")
- local p_dp = getfield(p,"depth")
- local n_ht = getfield(n,"height")
+ local function overlay(p,n,s,mvl)
+ local p_ht = getfield(p,"height")
+ local p_dp = getfield(p,"depth")
+ local n_ht = getfield(n,"height")
local delta = n_ht + s + p_dp
- local k = new_kern(-delta)
if trace_vspacing then
report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,s,delta)
end
@@ -1023,9 +1022,31 @@ setfield(head,"prev",nil) -- till we have 0.79 **
-- we should adapt pagetotal ! (need a hook for that)
setfield(p,"height",n_ht)
end
- return k
+ -- make kern
+ local k = new_kern(-delta)
+ if head == current then
+ head = k -- as it will get appended, else we loose the kern
+ end
+ -- remove rubish
+ local c = getnext(p)
+ while c and c ~= n do
+ local nc = getnext(c)
+ if c == head then
+ head = nc
+ end
+ free_node(c)
+ c = nc
+ end
+ -- insert kern .. brr the kern is somehow not seen unless we also inject a penalty
+ setfield(p,"next",k)
+ setfield(k,"prev",p)
+ setfield(k,"next",n)
+ setfield(n,"prev",k)
+ -- done
+ return head, n
end
+ -- goto next line
while c do
local id = getid(c)
if id == glue_code or id == penalty_code or id == kern_code then
@@ -1039,7 +1060,7 @@ setfield(head,"prev",nil) -- till we have 0.79 **
end
end
if n then
- -- we have a next line
+ -- we have a next line, goto prev line
c = current
while c do
local id = getid(c)
@@ -1081,21 +1102,17 @@ setfield(head,"prev",nil) -- till we have 0.79 **
c = getnext(c)
end
if p and p ~= n then
- local k = overlay(p,n,s,true)
- insert_node_before(n,n,k)
- return k, getnext(n)
+ return overlay(p,n,s,true)
end
end
elseif p ~= n then
- local k = overlay(p,n,0,false )
- insert_node_after(p,p,k)
- return head, getnext(n)
+ return overlay(p,n,0,false)
end
end
return remove_node(head, current, true)
end
--- This will be replaces after 0.79 when we have a more robust look-back and
+-- This will be replaced after 0.80+ when we have a more robust look-back and
-- can look at the bigger picture.
-- todo: look back and when a special is there before a list is seen penalty keep ut
@@ -1193,6 +1210,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
if trace then trace_info("start analyzing",where,what) end
+-- local headprev = getprev(head)
+
while current do
local id = getid(current)
if id == hlist_code or id == vlist_code then
@@ -1360,6 +1379,8 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
elseif sc == overlay then
-- todo (overlay following line over previous
if trace then trace_skip("overlay",sc,so,sp,current) end
+ -- beware: head can actually be after the affected nodes as
+ -- we look back ... some day head will the real head
head, current = check_experimental_overlay(head,current,a_snapmethod)
elseif ignore_following then
if trace then trace_skip("disabled",sc,so,sp,current) end
@@ -1557,9 +1578,9 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
local p = new_penalty(penalty_data)
if trace then trace_done("result",p) end
head, tail = insert_node_after(head,tail,p)
--- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
- properties[p] = { special_penalty = special_penalty or penalty_data }
--- end
+ -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ properties[p] = { special_penalty = special_penalty or penalty_data }
+ -- end
end
if glue_data then
if not tail then tail = find_node_tail(head) end
@@ -1571,7 +1592,7 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
else
head, tail = insert_node_after(head,tail,glue_data)
end
-texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
+ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
end
if trace then
if glue_data or penalty_data then
@@ -1582,6 +1603,13 @@ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevd
trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
end
end
+
+-- if headprev then
+-- setprev(head,headprev)
+-- setnext(headprev,head)
+-- end
+-- print("C HEAD",tonode(head))
+
return head, true
end
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index 96fc70ff1..0289a0419 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -1915,6 +1915,13 @@
\fi\fi
\relax}
+% \startitemize[n]
+% \item \input zapf
+% \item \startitemize[a]
+% \item \input knuth
+% \stopitemize
+% \stopitemize
+
% \strut \hfill first line \blank[overlay] second line \hfill \strut
%
% \ruledvbox {
@@ -1932,7 +1939,7 @@
\definevspacing[\v!back] [category:7]
% together [category:8]
\definevspacing[\v!overlay] [category:9]
-\definevspacing[\v!always] [category:0]
+\definevspacing[\v!always] [category:0] % hm, internally it's discard
\definevspacing[\v!weak] [order:0]
\definevspacing[\v!strong] [order:100]
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 77f7dbed8..4332c3048 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 88d2dd01f..307792e1c 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/strc-itm.lua b/tex/context/base/strc-itm.lua
index 4945c282f..675917d59 100644
--- a/tex/context/base/strc-itm.lua
+++ b/tex/context/base/strc-itm.lua
@@ -6,33 +6,43 @@ if not modules then modules = { } end modules ['strc-itm'] = {
license = "see context related readme files"
}
-local structures = structures
-local itemgroups = structures.itemgroups
-local jobpasses = job.passes
-
-local setvariable = jobpasses.save
-local getvariable = jobpasses.getfield
-
-function itemgroups.register(name,nofitems,maxwidth)
- setvariable("itemgroup", { nofitems, maxwidth })
-end
-
-function itemgroups.nofitems(name,index)
- return getvariable("itemgroup", index, 1, 0)
-end
-
-function itemgroups.maxwidth(name,index)
- return getvariable("itemgroup", index, 2, 0)
-end
-
--- interface (might become counter/dimension)
-
-commands.registeritemgroup = itemgroups.register
-
-function commands.nofitems(name,index)
- context(getvariable("itemgroup", index, 1, 0))
+local structures = structures
+local itemgroups = structures.itemgroups
+local jobpasses = job.passes
+
+local setvariable = jobpasses.save
+local getvariable = jobpasses.getfield
+
+local texsetcount = tex.setcount
+local texsetdimen = tex.setdimen
+local texgetcount = tex.getcount
+
+local f_stamp = string.formatters["itemgroup:%s:%s"]
+local counts = table.setmetatableindex("number")
+
+-- We keep the counter at the Lua end so we can group the items within
+-- an itemgroup which in turn makes for less passes when one itemgroup
+-- entry is added or removed.
+
+function commands.analyzeitemgroup(name,level)
+ local n = counts[name]
+ if level == 1 then
+ n = n + 1
+ counts[name] = n
+ end
+ local stamp = f_stamp(name,n)
+ local n = getvariable(stamp,level,1,0)
+ local w = getvariable(stamp,level,2,0)
+ texsetcount("local","c_strc_itemgroups_max_items",n)
+ texsetdimen("local","d_strc_itemgroups_max_width",w)
end
-function commands.maxitemwidth(name,index)
- context(getvariable("itemgroup", index, 2, 0))
+function commands.registeritemgroup(name,level,nofitems,maxwidth)
+ local n = counts[name]
+ if texgetcount("@@trialtypesetting") == 0 then
+ -- no trialtypsetting
+ setvariable(f_stamp(name,n), { nofitems, maxwidth }, level)
+ elseif level == 1 then
+ counts[name] = n - 1
+ end
end
diff --git a/tex/context/base/strc-itm.mkvi b/tex/context/base/strc-itm.mkvi
index fd612b339..af03d13e1 100644
--- a/tex/context/base/strc-itm.mkvi
+++ b/tex/context/base/strc-itm.mkvi
@@ -15,6 +15,9 @@
\registerctxluafile{strc-itm}{1.001}
+%D As we analyze/register widths and such we could as well push and pop the
+%D numbers at the \LUA\ end (which saves a few calls).
+
%D Cleaning up this module happened around the time when Kate Bush came up
%D with the nicest numbered list of words: 50 Words For Snow. It's therefore
%D no surprise that I had that cd running several times when updating this
@@ -203,7 +206,6 @@
\newdimen \d_strc_itemgroups_max_width % multipass
\newcount \c_strc_itemgroups_max_items % multipass
-\newcount \c_strc_itemgroups_n_of_lists
\newcount \c_strc_itemgroups_n_of_items
\newcount \c_strc_itemgroups_nesting
\newcount \c_strc_itemgroups_column_depth
@@ -227,13 +229,10 @@
\let \currentitemgroupsegments \empty
\def\strc_itemgroups_register_status
- {\iftrialtypesetting \else
- \ctxcommand{registeritemgroup("\currentitemgroup",\number\c_strc_itemgroups_n_of_items,"\itemgroupparameter\c!maxwidth")}%
- \fi}
+ {\ctxcommand{registeritemgroup("\currentparentitemgroup",\number\c_strc_itemgroups_nesting,\number\c_strc_itemgroups_n_of_items,\number\dimexpr\itemgroupparameter\c!maxwidth)}}
\def\strc_itemgroups_check_n_of_items % we could do this at the lua end and save a call (i.e. will be dimen and counter)
- {\c_strc_itemgroups_max_items\ctxcommand{nofitems("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\relax
- \d_strc_itemgroups_max_width\ctxcommand{maxitemwidth("\currentitemgroup",\number\c_strc_itemgroups_n_of_lists)}\scaledpoint
+ {\ctxcommand{analyzeitemgroup("\currentparentitemgroup",\number\c_strc_itemgroups_nesting)}\relax
\edef\currentnofitems{\the\c_strc_itemgroups_max_items}}
% todo: \dodosetreference -> \strc_counters_register_component (to be checked)
@@ -467,7 +466,7 @@
\let\strc_itemgroups_margin_symbol\empty
\let\strc_itemgroups_extra_symbol\empty
%
- \global\letitemgroupparameter\c!maxwidth\!!zerocount
+ \global\letitemgroupparameter\c!maxwidth\!!zeropoint
}
\setvalue{\??itemgroupfirst\v!intro }{\settrue\c_strc_itemgroups_intro }
@@ -745,7 +744,6 @@
\iftrialtypesetting
\strc_counters_save\v_strc_itemgroups_counter
\fi
- \global\advance\c_strc_itemgroups_n_of_lists\plusone
\c_strc_itemgroups_n_of_items\zerocount
\strc_itemgroups_check_n_of_items
\ifx\itemgroupoptions\empty
@@ -941,8 +939,7 @@
\fi\fi
% new test, needed in sidefloats (surfaced in volker's proceedings)
\iftrialtypesetting
- \strc_counters_restore\v_strc_itemgroups_counter
- \global\advance\c_strc_itemgroups_n_of_lists\minusone
+ \strc_counters_restore\v_strc_itemgroups_counter % could happen in LUA
\fi
\global\advance\c_strc_itemgroups_nesting\minusone
\xdef\currentitemlevel{\number\c_strc_itemgroups_nesting}%
@@ -1039,11 +1036,22 @@
\strc_itemgroups_start_item_next
\fi
\ifconditional\c_strc_itemgroups_concat
+ % % not good enough:
+ %
% \vskip-\lastskip % we cannot use a \dimexpr here because
% \vskip-\lineheight % then we loose the stretch and shrink
% \nobreak
%
- \blank[\v!overlay]% new per 2014-03-27
+ % % new per 2014-11-16
+ %
+ % \blank[\v!overlay]% new per 2014-03-27
+ %
+ % % changed per 2014-11-16 as somehow we need a penalty to prevent the kern from disappearing
+ % % .. .kind of fight with default tex append-to-mvl behaviour .. so still not good enough
+ %
+ %\blank[\v!back]%
+ \nobreak
+ \blank[\v!overlay]%
%
\setfalse\c_strc_itemgroups_concat
\fi
@@ -1285,8 +1293,8 @@
\setfalse\c_strc_itemgroups_symbol}
\def\strc_itemgroups_make_fitting_box
- {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\scaledpoint\relax % brr, sp
- \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\number\wd\b_strc_itemgroups}}%
+ {\ifdim\wd\b_strc_itemgroups>\itemgroupparameter\c!maxwidth\relax
+ \normalexpanded{\global\setitemgroupparameter{\c!maxwidth}{\the\wd\b_strc_itemgroups}}%
\fi
\ifdim\d_strc_itemgroups_max_width>\zeropoint
\setbox\b_strc_itemgroups\simplealignedbox
diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv
index b9d7de416..c24377275 100644
--- a/tex/context/base/x-asciimath.mkiv
+++ b/tex/context/base/x-asciimath.mkiv
@@ -1,4 +1,4 @@
-D \module
+%D \module
%D [ file=x-asciimath,
%D version=2014.06.01, % 2006.04.24, % 1999.11.06,
%D title=\CONTEXT\ Modules,
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index b1cf6044c..79a0f9030 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 11/12/14 21:46:32
+-- merge date : 11/17/14 00:32:09
do -- begin closure to overcome local limits and interference